From 02a7b4ebdf98983f97b3a2007798356b0c39c1c4 Mon Sep 17 00:00:00 2001 From: Ricky Stewart Date: Mon, 26 Oct 2020 18:34:53 +0000 Subject: [PATCH] Bug 1654103: Standardize on Black for Python code in `mozilla-central`. Allow-list all Python code in tree for use with the black linter, and re-format all code in-tree accordingly. To produce this patch I did all of the following: 1. Make changes to tools/lint/black.yml to remove include: stanza and update list of source extensions. 2. Run ./mach lint --linter black --fix 3. Make some ad-hoc manual updates to python/mozbuild/mozbuild/test/configure/test_configure.py -- it has some hard-coded line numbers that the reformat breaks. 4. Make some ad-hoc manual updates to `testing/marionette/client/setup.py`, `testing/marionette/harness/setup.py`, and `testing/firefox-ui/harness/setup.py`, which have hard-coded regexes that break after the reformat. 5. Add a set of exclusions to black.yml. These will be deleted in a follow-up bug (1672023). # ignore-this-changeset Differential Revision: https://phabricator.services.mozilla.com/D94045 --- .ycm_extra_conf.py | 33 +- accessible/android/moz.build | 51 +- accessible/aom/moz.build | 32 +- accessible/atk/moz.build | 84 +- accessible/base/moz.build | 147 +- accessible/generic/moz.build | 84 +- accessible/html/moz.build | 58 +- accessible/interfaces/gecko/moz.build | 38 +- accessible/interfaces/ia2/moz.build | 162 +- accessible/interfaces/moz.build | 68 +- accessible/interfaces/msaa/moz.build | 54 +- accessible/ipc/extension/android/moz.build | 24 +- accessible/ipc/extension/mac/moz.build | 22 +- accessible/ipc/extension/moz.build | 12 +- accessible/ipc/extension/other/moz.build | 12 +- accessible/ipc/moz.build | 62 +- accessible/ipc/other/moz.build | 42 +- accessible/ipc/win/handler/moz.build | 124 +- accessible/ipc/win/moz.build | 70 +- accessible/ipc/win/typelib/moz.build | 11 +- accessible/mac/SelectorMapGen.py | 11 +- accessible/mac/moz.build | 94 +- accessible/moz.build | 59 +- accessible/other/moz.build | 22 +- accessible/tests/mochitest/moz.build | 58 +- accessible/windows/ia2/moz.build | 62 +- accessible/windows/moz.build | 4 +- accessible/windows/msaa/moz.build | 98 +- accessible/windows/sdn/moz.build | 22 +- accessible/windows/uia/moz.build | 18 +- accessible/xpcom/AccEventGen.py | 89 +- accessible/xpcom/moz.build | 85 +- accessible/xul/moz.build | 66 +- browser/actors/moz.build | 114 +- browser/app/macbuild/Contents/moz.build | 22 +- browser/app/macversion.py | 21 +- browser/app/moz.build | 110 +- browser/app/no-pie/moz.build | 16 +- .../gen_ntdll_freestanding_lib.py | 4 +- .../app/winlauncher/freestanding/moz.build | 44 +- browser/app/winlauncher/moz.build | 42 +- browser/app/winlauncher/test/moz.build | 14 +- browser/base/gen_aboutneterror_codes.py | 12 +- browser/base/moz.build | 126 +- browser/branding/aurora/content/moz.build | 2 +- browser/branding/aurora/locales/moz.build | 4 +- browser/branding/aurora/moz.build | 8 +- browser/branding/nightly/content/moz.build | 2 +- browser/branding/nightly/locales/moz.build | 4 +- browser/branding/nightly/moz.build | 8 +- browser/branding/official/content/moz.build | 2 +- browser/branding/official/locales/moz.build | 2 +- browser/branding/official/moz.build | 8 +- browser/branding/unofficial/content/moz.build | 2 +- browser/branding/unofficial/locales/moz.build | 4 +- browser/branding/unofficial/moz.build | 8 +- browser/components/about/moz.build | 18 +- browser/components/aboutconfig/moz.build | 4 +- browser/components/aboutlogins/moz.build | 18 +- browser/components/attribution/moz.build | 20 +- browser/components/build/moz.build | 14 +- .../components/contextualidentity/moz.build | 8 +- .../customizableui/content/moz.build | 2 +- browser/components/customizableui/moz.build | 22 +- browser/components/doh/moz.build | 16 +- browser/components/downloads/moz.build | 30 +- .../enterprisepolicies/helpers/moz.build | 6 +- .../components/enterprisepolicies/moz.build | 16 +- .../enterprisepolicies/schemas/moz.build | 2 +- .../enterprisepolicies/tests/moz.build | 18 +- browser/components/extensions/moz.build | 18 +- .../components/extensions/schemas/moz.build | 2 +- browser/components/fxmonitor/moz.build | 11 +- browser/components/installerprefs/moz.build | 10 +- browser/components/ion/moz.build | 12 +- browser/components/migration/moz.build | 60 +- .../tests/unit/insertIEHistory/moz.build | 8 +- browser/components/moz.build | 110 +- browser/components/newtab/moz.build | 28 +- browser/components/originattributes/moz.build | 10 +- browser/components/payments/moz.build | 26 +- browser/components/payments/server.py | 7 +- browser/components/places/moz.build | 14 +- browser/components/pocket/moz.build | 4 +- .../components/preferences/dialogs/moz.build | 8 +- browser/components/preferences/moz.build | 21 +- browser/components/privatebrowsing/moz.build | 8 +- browser/components/prompts/moz.build | 4 +- browser/components/protections/moz.build | 8 +- browser/components/protocolhandler/moz.build | 10 +- .../components/resistfingerprinting/moz.build | 8 +- browser/components/search/moz.build | 20 +- .../marionette/test_engines_on_restart.py | 9 +- browser/components/sessionstore/moz.build | 46 +- browser/components/shell/moz.build | 68 +- .../shell/test/mac_desktop_image.py | 63 +- browser/components/ssb/moz.build | 20 +- browser/components/syncedtabs/moz.build | 27 +- browser/components/touchbar/moz.build | 12 +- .../components/translation/content/moz.build | 2 +- browser/components/translation/moz.build | 28 +- browser/components/uitour/moz.build | 14 +- browser/components/urlbar/moz.build | 64 +- browser/extensions/doh-rollout/moz.build | 12 +- .../extensions/formautofill/locales/moz.build | 2 +- browser/extensions/formautofill/moz.build | 60 +- browser/extensions/moz.build | 8 +- .../report-site-issue/locales/moz.build | 2 +- .../extensions/report-site-issue/moz.build | 50 +- browser/extensions/screenshots/moz.build | 117 +- browser/extensions/webcompat/moz.build | 192 +- browser/fonts/moz.build | 8 +- browser/fxr/moz.build | 2 +- browser/installer/windows/moz.build | 14 +- browser/locales/filter.py | 42 +- browser/locales/generate_bookmarks.py | 18 +- browser/locales/generate_ini.py | 15 +- browser/locales/moz.build | 32 +- browser/modules/moz.build | 86 +- browser/moz.build | 60 +- browser/moz.configure | 20 +- browser/themes/addons/moz.build | 2 +- browser/themes/linux/moz.build | 5 +- browser/themes/moz.build | 26 +- browser/themes/osx/moz.build | 3 +- browser/themes/windows/moz.build | 5 +- browser/tools/mozscreenshots/moz.build | 16 +- .../mozscreenshots/extension/moz.build | 70 +- build/RunCbindgen.py | 48 +- build/appini_header.py | 69 +- build/build-clang/build-clang.py | 507 +- build/build-infer/build-infer.py | 126 +- build/buildconfig.py | 4 +- build/checksums.py | 87 +- build/clang-plugin/ThirdPartyPaths.py | 11 +- build/clang-plugin/ThreadAllows.py | 36 +- build/clang-plugin/import_mozilla_checks.py | 104 +- build/clang-plugin/moz.build | 162 +- build/clang-plugin/tests/moz.build | 125 +- build/compare-mozconfig/compare-mozconfigs.py | 114 +- build/gen_symverscript.py | 10 +- build/gen_test_packages_manifest.py | 122 +- build/mach_bootstrap.py | 362 +- build/midl.py | 37 +- build/mobile/remoteautomation.py | 190 +- build/moz.build | 136 +- build/moz.configure/android-ndk.configure | 356 +- build/moz.configure/android-sdk.configure | 106 +- build/moz.configure/arm.configure | 209 +- build/moz.configure/bindgen.configure | 298 +- build/moz.configure/checks.configure | 54 +- build/moz.configure/compile-checks.configure | 109 +- build/moz.configure/compilers-util.configure | 82 +- build/moz.configure/flags.configure | 65 +- build/moz.configure/headers.configure | 112 +- build/moz.configure/init.configure | 1146 +- build/moz.configure/java.configure | 63 +- build/moz.configure/keyfiles.configure | 48 +- build/moz.configure/lto-pgo.configure | 303 +- build/moz.configure/memory.configure | 78 +- build/moz.configure/node.configure | 42 +- build/moz.configure/nspr.configure | 97 +- build/moz.configure/nss.configure | 18 +- build/moz.configure/old.configure | 334 +- build/moz.configure/pkg.configure | 55 +- build/moz.configure/rust.configure | 375 +- build/moz.configure/toolchain.configure | 2140 +-- build/moz.configure/update-programs.configure | 73 +- build/moz.configure/util.configure | 206 +- build/moz.configure/warnings.configure | 160 +- build/moz.configure/windows.configure | 407 +- build/pgo/profileserver.py | 141 +- build/submit_telemetry_data.py | 58 +- build/unix/elfhack/inject/copy_source.py | 2 +- build/unix/elfhack/inject/moz.build | 24 +- build/unix/elfhack/moz.build | 29 +- build/unix/moz.build | 10 +- build/unix/rewrite_asan_dylib.py | 67 +- build/unix/stdc++compat/moz.build | 20 +- build/upload.py | 32 +- build/upload_generated_sources.py | 93 +- build/valgrind/mach_commands.py | 203 +- build/valgrind/output_handler.py | 53 +- build/variables.py | 60 +- build/win32/autowinchecksec.py | 53 +- build/win32/crashinjectdll/moz.build | 6 +- build/win32/moz.build | 44 +- build/windows_toolchain.py | 183 +- caps/moz.build | 83 +- caps/tests/gtest/moz.build | 10 +- chrome/moz.build | 38 +- chrome/test/moz.build | 4 +- client.py | 119 +- config/MozZipFile.py | 42 +- config/check_js_msg_encoding.py | 21 +- config/check_js_opcode.py | 10 +- config/check_source_count.py | 35 +- config/check_spidermonkey_style.py | 472 +- config/check_vanilla_allocations.py | 81 +- config/create_rc.py | 113 +- config/create_res.py | 48 +- config/createprecomplete.py | 32 +- config/external/fdlibm/moz.build | 6 +- config/external/ffi/moz.build | 189 +- config/external/ffi/preprocess_libffi_asm.py | 4 +- config/external/ffi/subst_header.py | 20 +- config/external/freetype2/moz.build | 8 +- config/external/icu/common/moz.build | 32 +- config/external/icu/data/convert_icudata.py | 14 +- config/external/icu/data/moz.build | 38 +- config/external/icu/i18n/moz.build | 18 +- config/external/icu/icupkg/moz.build | 20 +- config/external/icu/moz.build | 20 +- config/external/icu/toolutil/moz.build | 14 +- config/external/lgpllibs/moz.build | 4 +- config/external/moz.build | 84 +- config/external/nspr/ds/moz.build | 26 +- config/external/nspr/libc/moz.build | 48 +- config/external/nspr/moz.build | 24 +- config/external/nspr/pr/moz.build | 407 +- config/external/rlbox/moz.build | 36 +- config/external/rlbox_lucet_sandbox/moz.build | 10 +- config/external/sqlite/moz.build | 14 +- config/external/zlib/moz.build | 10 +- config/make-stl-wrappers.py | 2 +- config/make-system-wrappers.py | 12 +- config/make-windows-h-wrapper.py | 41 +- config/moz.build | 104 +- config/mozunit/mozunit/mozunit.py | 121 +- config/mozunit/setup.py | 24 +- config/nsinstall.py | 75 +- config/printprereleasesuffix.py | 16 +- config/rebuild_check.py | 20 +- config/run-and-prefix.py | 23 +- config/run_spidermonkey_checks.py | 3 +- config/tests/src-simple/moz.build | 2 +- config/tests/test_mozbuild_reading.py | 35 +- config/tests/unit-mozunit.py | 75 +- config/tests/unit-nsinstall.py | 58 +- config/tests/unit-printprereleasesuffix.py | 54 +- config/tests/unitMozZipFile.py | 83 +- configure.py | 129 +- devtools/client/aboutdebugging/moz.build | 18 +- .../aboutdebugging/src/actions/moz.build | 10 +- .../src/components/connect/moz.build | 10 +- .../src/components/debugtarget/moz.build | 32 +- .../aboutdebugging/src/components/moz.build | 24 +- .../src/components/shared/moz.build | 6 +- .../src/components/sidebar/moz.build | 10 +- .../aboutdebugging/src/middleware/moz.build | 14 +- .../aboutdebugging/src/modules/moz.build | 20 +- devtools/client/aboutdebugging/src/moz.build | 16 +- .../aboutdebugging/src/reducers/moz.build | 8 +- .../client/aboutdebugging/src/types/moz.build | 10 +- .../client/accessibility/actions/moz.build | 8 +- .../client/accessibility/components/moz.build | 52 +- devtools/client/accessibility/moz.build | 27 +- .../client/accessibility/reducers/moz.build | 7 +- devtools/client/accessibility/utils/moz.build | 5 +- devtools/client/application/moz.build | 15 +- .../client/application/src/actions/moz.build | 10 +- .../src/components/manifest/moz.build | 24 +- .../application/src/components/moz.build | 10 +- .../src/components/routing/moz.build | 6 +- .../src/components/service-workers/moz.build | 10 +- .../application/src/components/ui/moz.build | 2 +- .../application/src/middleware/moz.build | 2 +- .../client/application/src/modules/moz.build | 4 +- devtools/client/application/src/moz.build | 16 +- .../client/application/src/reducers/moz.build | 10 +- .../client/application/src/types/moz.build | 8 +- devtools/client/debugger/dist/moz.build | 8 +- devtools/client/debugger/moz.build | 15 +- .../client/debugger/src/actions/ast/moz.build | 8 +- .../src/actions/breakpoints/moz.build | 14 +- .../client/debugger/src/actions/moz.build | 40 +- .../debugger/src/actions/pause/moz.build | 40 +- .../debugger/src/actions/sources/moz.build | 20 +- .../src/actions/utils/middleware/moz.build | 18 +- .../debugger/src/actions/utils/moz.build | 4 +- .../debugger/src/client/firefox/moz.build | 10 +- devtools/client/debugger/src/client/moz.build | 6 +- .../src/components/Editor/Preview/moz.build | 10 +- .../src/components/Editor/menus/moz.build | 10 +- .../debugger/src/components/Editor/moz.build | 46 +- .../src/components/PrimaryPanes/moz.build | 14 +- .../SecondaryPanes/Breakpoints/moz.build | 16 +- .../SecondaryPanes/Frames/moz.build | 14 +- .../src/components/SecondaryPanes/moz.build | 24 +- .../client/debugger/src/components/moz.build | 20 +- .../src/components/shared/Button/moz.build | 11 +- .../components/shared/Button/styles/moz.build | 8 +- .../debugger/src/components/shared/moz.build | 28 +- .../debugger/src/context-menu/moz.build | 2 +- devtools/client/debugger/src/moz.build | 20 +- .../client/debugger/src/reducers/moz.build | 42 +- .../client/debugger/src/selectors/moz.build | 26 +- .../debugger/src/utils/breakpoint/moz.build | 10 +- .../debugger/src/utils/editor/moz.build | 20 +- devtools/client/debugger/src/utils/moz.build | 92 +- .../debugger/src/utils/pause/frames/moz.build | 16 +- .../src/utils/pause/mapScopes/moz.build | 24 +- .../client/debugger/src/utils/pause/moz.build | 10 +- .../debugger/src/utils/pause/scopes/moz.build | 12 +- .../debugger/src/utils/resource/moz.build | 19 +- .../debugger/src/utils/sources-tree/moz.build | 24 +- .../client/debugger/src/workers/moz.build | 10 +- .../debugger/src/workers/parser/moz.build | 6 +- .../src/workers/pretty-print/moz.build | 6 +- .../debugger/src/workers/search/moz.build | 6 +- devtools/client/dom/content/actions/moz.build | 4 +- .../client/dom/content/components/moz.build | 6 +- devtools/client/dom/content/moz.build | 16 +- .../client/dom/content/reducers/moz.build | 6 +- devtools/client/dom/moz.build | 10 +- devtools/client/framework/actions/moz.build | 10 +- .../framework/browser-toolbox/moz.build | 4 +- .../client/framework/components/moz.build | 14 +- devtools/client/framework/moz.build | 68 +- devtools/client/framework/reducers/moz.build | 10 +- devtools/client/fronts/addon/moz.build | 4 +- devtools/client/fronts/descriptors/moz.build | 9 +- devtools/client/fronts/inspector/moz.build | 3 +- devtools/client/fronts/moz.build | 88 +- devtools/client/fronts/targets/moz.build | 8 +- devtools/client/fronts/worker/moz.build | 6 +- .../inspector/animation/actions/moz.build | 4 +- .../animation/components/graph/moz.build | 22 +- .../components/keyframes-graph/moz.build | 16 +- .../inspector/animation/components/moz.build | 51 +- devtools/client/inspector/animation/moz.build | 18 +- .../inspector/animation/reducers/moz.build | 2 +- .../inspector/animation/utils/moz.build | 8 +- .../inspector/boxmodel/actions/moz.build | 4 +- .../inspector/boxmodel/components/moz.build | 12 +- devtools/client/inspector/boxmodel/moz.build | 14 +- .../inspector/boxmodel/reducers/moz.build | 2 +- .../client/inspector/boxmodel/utils/moz.build | 2 +- .../inspector/changes/actions/moz.build | 4 +- .../inspector/changes/components/moz.build | 4 +- devtools/client/inspector/changes/moz.build | 22 +- .../inspector/changes/reducers/moz.build | 2 +- .../inspector/changes/selectors/moz.build | 2 +- .../client/inspector/changes/utils/moz.build | 4 +- .../inspector/compatibility/actions/moz.build | 4 +- .../client/inspector/compatibility/moz.build | 20 +- .../compatibility/reducers/moz.build | 2 +- .../inspector/compatibility/utils/moz.build | 2 +- .../client/inspector/components/moz.build | 2 +- devtools/client/inspector/computed/moz.build | 8 +- .../inspector/extensions/actions/moz.build | 4 +- .../inspector/extensions/components/moz.build | 8 +- .../client/inspector/extensions/moz.build | 12 +- .../inspector/extensions/reducers/moz.build | 2 +- .../inspector/flexbox/actions/moz.build | 4 +- .../inspector/flexbox/components/moz.build | 16 +- devtools/client/inspector/flexbox/moz.build | 12 +- .../inspector/flexbox/reducers/moz.build | 4 +- .../client/inspector/fonts/actions/moz.build | 8 +- .../inspector/fonts/components/moz.build | 32 +- devtools/client/inspector/fonts/moz.build | 18 +- .../client/inspector/fonts/reducers/moz.build | 6 +- .../client/inspector/fonts/utils/moz.build | 4 +- .../client/inspector/grids/actions/moz.build | 6 +- .../inspector/grids/components/moz.build | 10 +- devtools/client/inspector/grids/moz.build | 16 +- .../client/inspector/grids/reducers/moz.build | 4 +- .../client/inspector/grids/utils/moz.build | 2 +- .../inspector/layout/components/moz.build | 2 +- devtools/client/inspector/layout/moz.build | 10 +- .../client/inspector/layout/utils/moz.build | 2 +- .../inspector/markup/components/moz.build | 2 +- devtools/client/inspector/markup/moz.build | 14 +- .../client/inspector/markup/utils/moz.build | 2 +- .../client/inspector/markup/views/moz.build | 22 +- devtools/client/inspector/moz.build | 48 +- .../client/inspector/rules/actions/moz.build | 8 +- .../inspector/rules/components/moz.build | 24 +- .../client/inspector/rules/models/moz.build | 10 +- devtools/client/inspector/rules/moz.build | 28 +- .../client/inspector/rules/reducers/moz.build | 6 +- .../client/inspector/rules/utils/moz.build | 4 +- .../client/inspector/rules/views/moz.build | 6 +- devtools/client/inspector/shared/moz.build | 20 +- devtools/client/jsonview/components/moz.build | 24 +- .../client/jsonview/components/reps/moz.build | 2 +- devtools/client/jsonview/css/moz.build | 14 +- devtools/client/jsonview/lib/moz.build | 4 +- devtools/client/jsonview/moz.build | 24 +- devtools/client/locales/moz.build | 2 +- devtools/client/memory/actions/moz.build | 26 +- devtools/client/memory/components/moz.build | 30 +- .../memory/components/tree-map/moz.build | 10 +- devtools/client/memory/moz.build | 34 +- devtools/client/memory/reducers/moz.build | 24 +- devtools/client/moz.build | 62 +- devtools/client/netmonitor/moz.build | 19 +- .../client/netmonitor/src/actions/moz.build | 22 +- .../src/components/messages/moz.build | 46 +- .../src/components/messages/parsers/moz.build | 10 +- .../messages/parsers/signalr/moz.build | 12 +- .../messages/parsers/socket-io/moz.build | 8 +- .../messages/parsers/sockjs/moz.build | 2 +- .../messages/parsers/stomp/moz.build | 8 +- .../messages/parsers/wamp/moz.build | 4 +- .../netmonitor/src/components/moz.build | 36 +- .../src/components/previews/moz.build | 5 +- .../src/components/request-blocking/moz.build | 2 +- .../src/components/request-details/moz.build | 21 +- .../src/components/request-list/moz.build | 44 +- .../src/components/search/moz.build | 8 +- .../client/netmonitor/src/connector/moz.build | 6 +- devtools/client/netmonitor/src/har/moz.build | 22 +- .../netmonitor/src/middleware/moz.build | 12 +- devtools/client/netmonitor/src/moz.build | 28 +- .../client/netmonitor/src/reducers/moz.build | 20 +- .../client/netmonitor/src/selectors/moz.build | 12 +- .../netmonitor/src/utils/firefox/moz.build | 2 +- .../client/netmonitor/src/utils/moz.build | 32 +- .../client/netmonitor/src/widgets/moz.build | 12 +- .../client/netmonitor/src/workers/moz.build | 4 +- .../netmonitor/src/workers/search/moz.build | 6 +- .../performance-new/aboutprofiling/moz.build | 4 +- .../performance-new/components/moz.build | 20 +- devtools/client/performance-new/moz.build | 30 +- .../client/performance-new/popup/moz.build | 10 +- .../client/performance-new/store/moz.build | 6 +- .../client/performance/components/moz.build | 22 +- .../performance/modules/logic/moz.build | 10 +- devtools/client/performance/modules/moz.build | 24 +- .../performance/modules/widgets/moz.build | 8 +- devtools/client/performance/moz.build | 26 +- .../client/performance/test/helpers/moz.build | 24 +- devtools/client/performance/test/moz.build | 2 +- devtools/client/performance/views/moz.build | 20 +- devtools/client/preferences/moz.build | 2 +- devtools/client/responsive/actions/moz.build | 10 +- .../client/responsive/components/moz.build | 24 +- devtools/client/responsive/moz.build | 30 +- devtools/client/responsive/reducers/moz.build | 8 +- devtools/client/responsive/utils/moz.build | 16 +- .../client/shared/components/menu/moz.build | 8 +- devtools/client/shared/components/moz.build | 48 +- .../object-inspector/components/moz.build | 4 +- .../components/object-inspector/moz.build | 10 +- .../object-inspector/components/moz.build | 4 +- .../object-inspector/moz.build | 10 +- .../object-inspector/utils/moz.build | 10 +- .../object-inspector/utils/moz.build | 10 +- .../client/shared/components/reps/moz.build | 6 +- .../shared/components/reps/reps/moz.build | 72 +- .../shared/components/reps/shared/moz.build | 4 +- .../shared/components/splitter/moz.build | 6 +- .../client/shared/components/tabs/moz.build | 4 +- .../shared/components/throttling/moz.build | 10 +- .../client/shared/components/tree/moz.build | 12 +- devtools/client/shared/fluent-l10n/moz.build | 2 +- devtools/client/shared/moz.build | 104 +- .../client/shared/redux/middleware/moz.build | 20 +- devtools/client/shared/redux/moz.build | 10 +- .../remote-debugging/adb/commands/moz.build | 12 +- .../shared/remote-debugging/adb/moz.build | 26 +- .../remote-debugging/adb/xpcshell/adb.py | 25 +- .../client/shared/remote-debugging/moz.build | 16 +- devtools/client/shared/sourceeditor/moz.build | 16 +- devtools/client/shared/widgets/moz.build | 32 +- .../client/shared/widgets/tooltip/moz.build | 26 +- devtools/client/storage/moz.build | 14 +- devtools/client/storage/utils/moz.build | 4 +- devtools/client/styleeditor/moz.build | 16 +- devtools/client/themes/audio/moz.build | 2 +- devtools/client/themes/moz.build | 2 +- devtools/client/webconsole/actions/moz.build | 20 +- .../webconsole/components/FilterBar/moz.build | 10 +- .../webconsole/components/Input/moz.build | 12 +- .../components/Output/message-types/moz.build | 16 +- .../webconsole/components/Output/moz.build | 22 +- .../client/webconsole/components/moz.build | 10 +- .../client/webconsole/enhancers/moz.build | 10 +- .../client/webconsole/middleware/moz.build | 6 +- devtools/client/webconsole/moz.build | 58 +- devtools/client/webconsole/reducers/moz.build | 16 +- .../client/webconsole/selectors/moz.build | 14 +- devtools/client/webconsole/utils/moz.build | 14 +- devtools/moz.build | 54 +- devtools/platform/moz.build | 12 +- .../actors/accessibility/audit/moz.build | 10 +- .../server/actors/accessibility/moz.build | 20 +- devtools/server/actors/addon/moz.build | 4 +- .../server/actors/compatibility/lib/moz.build | 4 +- .../server/actors/compatibility/moz.build | 8 +- devtools/server/actors/descriptors/moz.build | 9 +- devtools/server/actors/emulation/moz.build | 6 +- devtools/server/actors/highlighters/moz.build | 30 +- .../actors/highlighters/utils/moz.build | 6 +- devtools/server/actors/inspector/moz.build | 24 +- devtools/server/actors/moz.build | 140 +- .../server/actors/network-monitor/moz.build | 22 +- .../actors/network-monitor/utils/moz.build | 6 +- devtools/server/actors/object/moz.build | 12 +- devtools/server/actors/resources/moz.build | 28 +- .../server/actors/resources/utils/moz.build | 8 +- devtools/server/actors/targets/moz.build | 18 +- devtools/server/actors/utils/moz.build | 36 +- devtools/server/actors/watcher/moz.build | 5 +- .../actors/watcher/target-helpers/moz.build | 9 +- .../actors/webconsole/listeners/moz.build | 12 +- devtools/server/actors/webconsole/moz.build | 20 +- devtools/server/actors/worker/moz.build | 12 +- .../connectors/js-window-actor/moz.build | 10 +- devtools/server/connectors/moz.build | 8 +- devtools/server/moz.build | 28 +- devtools/server/performance/moz.build | 14 +- devtools/server/socket/moz.build | 4 +- devtools/server/startup/moz.build | 10 +- .../shared/compatibility/dataset/moz.build | 4 +- devtools/shared/compatibility/moz.build | 6 +- .../shared/css/generated/mach_commands.py | 74 +- devtools/shared/css/generated/moz.build | 2 +- devtools/shared/css/moz.build | 14 +- devtools/shared/discovery/moz.build | 4 +- devtools/shared/heapsnapshot/moz.build | 74 +- .../shared/heapsnapshot/tests/gtest/moz.build | 36 +- devtools/shared/inspector/moz.build | 5 +- devtools/shared/layout/moz.build | 5 +- devtools/shared/locales/moz.build | 2 +- devtools/shared/moz.build | 128 +- devtools/shared/performance-new/moz.build | 8 +- devtools/shared/performance/moz.build | 6 +- devtools/shared/platform/moz.build | 6 +- devtools/shared/protocol/Actor/moz.build | 2 +- devtools/shared/protocol/Front/moz.build | 2 +- devtools/shared/protocol/moz.build | 22 +- devtools/shared/qrcode/moz.build | 16 +- .../resources/legacy-listeners/moz.build | 36 +- .../legacy-target-watchers/moz.build | 8 +- devtools/shared/resources/moz.build | 14 +- .../shared/resources/transformers/moz.build | 10 +- devtools/shared/screenshot/moz.build | 4 +- devtools/shared/security/moz.build | 12 +- devtools/shared/specs/addon/moz.build | 4 +- devtools/shared/specs/descriptors/moz.build | 9 +- devtools/shared/specs/moz.build | 100 +- devtools/shared/specs/targets/moz.build | 14 +- devtools/shared/specs/worker/moz.build | 6 +- devtools/shared/storage/moz.build | 8 +- devtools/shared/transport/moz.build | 18 +- .../webconsole/GeneratePureDOMFunctions.py | 75 +- .../webconsole/GenerateReservedWordsJS.py | 19 +- devtools/shared/webconsole/moz.build | 25 +- devtools/shared/worker/moz.build | 10 +- devtools/startup/aboutdevtools/moz.build | 6 +- devtools/startup/locales/moz.build | 2 +- devtools/startup/moz.build | 22 +- docs/conf.py | 6 +- docshell/base/moz.build | 186 +- docshell/base/timeline/moz.build | 64 +- docshell/build/moz.build | 12 +- docshell/moz.build | 62 +- docshell/resources/content/moz.build | 2 +- docshell/shistory/moz.build | 42 +- docshell/test/moz.build | 180 +- dom/abort/moz.build | 14 +- dom/abort/tests/moz.build | 4 +- dom/animation/moz.build | 98 +- dom/audiochannel/moz.build | 18 +- dom/base/gen-usecounters.py | 59 +- dom/base/moz.build | 970 +- dom/base/test/gtest/moz.build | 18 +- dom/base/test/jsmodules/moz.build | 4 +- dom/base/test/moz.build | 34 +- dom/base/test/useractivation/moz.build | 2 +- dom/base/usecounters.py | 79 +- dom/battery/moz.build | 12 +- dom/bindings/Codegen.py | 11724 ++++++++++------ dom/bindings/Configuration.py | 437 +- dom/bindings/GenerateCSS2PropertiesWebIDL.py | 35 +- dom/bindings/mach_commands.py | 27 +- dom/bindings/moz.build | 218 +- dom/bindings/mozwebidlcodegen/__init__.py | 263 +- .../test/test_mozwebidlcodegen.py | 125 +- dom/bindings/parser/WebIDL.py | 5118 ++++--- dom/bindings/parser/runtests.py | 47 +- dom/bindings/parser/tests/test_any_null.py | 6 +- .../test_argument_identifier_conflicts.py | 6 +- .../parser/tests/test_argument_keywords.py | 27 +- .../parser/tests/test_argument_novoid.py | 6 +- dom/bindings/parser/tests/test_arraybuffer.py | 48 +- dom/bindings/parser/tests/test_attr.py | 114 +- .../parser/tests/test_attr_sequence_type.py | 42 +- .../parser/tests/test_attributes_on_types.py | 315 +- .../parser/tests/test_builtin_filename.py | 9 +- dom/bindings/parser/tests/test_builtins.py | 33 +- dom/bindings/parser/tests/test_bytestring.py | 84 +- dom/bindings/parser/tests/test_callback.py | 21 +- .../parser/tests/test_callback_constructor.py | 49 +- .../parser/tests/test_callback_interface.py | 34 +- dom/bindings/parser/tests/test_cereactions.py | 80 +- .../test_conditional_dictionary_member.py | 84 +- dom/bindings/parser/tests/test_const.py | 35 +- dom/bindings/parser/tests/test_constructor.py | 467 +- .../parser/tests/test_constructor_global.py | 25 +- .../test_constructor_no_interface_object.py | 18 +- dom/bindings/parser/tests/test_deduplicate.py | 13 +- dom/bindings/parser/tests/test_dictionary.py | 457 +- .../parser/tests/test_distinguishability.py | 284 +- dom/bindings/parser/tests/test_double_null.py | 6 +- .../parser/tests/test_duplicate_qualifiers.py | 24 +- dom/bindings/parser/tests/test_empty_enum.py | 7 +- .../test_empty_sequence_default_value.py | 47 +- dom/bindings/parser/tests/test_enum.py | 52 +- .../tests/test_enum_duplicate_values.py | 7 +- dom/bindings/parser/tests/test_error_colno.py | 16 +- .../parser/tests/test_error_lineno.py | 24 +- .../tests/test_exposed_extended_attribute.py | 212 +- .../parser/tests/test_extended_attributes.py | 76 +- dom/bindings/parser/tests/test_float_types.py | 60 +- .../parser/tests/test_forward_decl.py | 7 +- .../parser/tests/test_global_extended_attr.py | 85 +- .../parser/tests/test_identifier_conflict.py | 36 +- .../parser/tests/test_incomplete_parent.py | 7 +- .../parser/tests/test_incomplete_types.py | 61 +- dom/bindings/parser/tests/test_interface.py | 339 +- ...st_interface_const_identifier_conflicts.py | 6 +- ...ace_identifier_conflicts_across_members.py | 24 +- .../test_interface_maplikesetlikeiterable.py | 566 +- .../parser/tests/test_interfacemixin.py | 361 +- .../parser/tests/test_lenientSetter.py | 48 +- dom/bindings/parser/tests/test_method.py | 340 +- dom/bindings/parser/tests/test_namespace.py | 59 +- dom/bindings/parser/tests/test_newobject.py | 16 +- .../parser/tests/test_nullable_equivalency.py | 54 +- .../parser/tests/test_nullable_void.py | 6 +- .../parser/tests/test_optional_constraints.py | 23 +- dom/bindings/parser/tests/test_overload.py | 46 +- dom/bindings/parser/tests/test_promise.py | 132 +- .../parser/tests/test_prototype_ident.py | 69 +- dom/bindings/parser/tests/test_putForwards.py | 46 +- dom/bindings/parser/tests/test_record.py | 38 +- dom/bindings/parser/tests/test_replaceable.py | 48 +- .../test_securecontext_extended_attribute.py | 440 +- .../test_special_method_signature_mismatch.py | 96 +- .../parser/tests/test_special_methods.py | 96 +- .../tests/test_special_methods_uniqueness.py | 19 +- dom/bindings/parser/tests/test_stringifier.py | 115 +- dom/bindings/parser/tests/test_toJSON.py | 312 +- .../tests/test_treatNonCallableAsNull.py | 25 +- dom/bindings/parser/tests/test_typedef.py | 54 +- .../tests/test_typedef_identifier_conflict.py | 9 +- .../tests/test_unenumerable_own_properties.py | 23 +- dom/bindings/parser/tests/test_unforgeable.py | 219 +- dom/bindings/parser/tests/test_union.py | 102 +- dom/bindings/parser/tests/test_union_any.py | 6 +- .../parser/tests/test_union_nullable.py | 35 +- dom/bindings/parser/tests/test_usvstring.py | 30 +- .../parser/tests/test_variadic_callback.py | 7 +- .../parser/tests/test_variadic_constraints.py | 47 +- dom/bindings/test/moz.build | 64 +- dom/broadcastchannel/moz.build | 20 +- dom/browser-element/moz.build | 22 +- dom/cache/moz.build | 144 +- dom/canvas/moz.build | 332 +- .../generate-wrappers-and-manifest.py | 262 +- dom/canvas/test/webgl-conf/import.py | 24 +- .../test/webgl-mochitest/mochi-to-testcase.py | 38 +- dom/chrome-webidl/moz.build | 100 +- dom/clients/api/moz.build | 21 +- dom/clients/manager/moz.build | 103 +- dom/clients/moz.build | 4 +- dom/commandhandler/moz.build | 32 +- dom/console/moz.build | 42 +- dom/credentialmanagement/moz.build | 16 +- dom/crypto/moz.build | 30 +- dom/debugger/moz.build | 22 +- dom/encoding/moz.build | 18 +- dom/events/android/moz.build | 4 +- dom/events/emacs/moz.build | 4 +- dom/events/mac/moz.build | 4 +- dom/events/moz.build | 296 +- dom/events/unix/moz.build | 4 +- dom/events/win/moz.build | 4 +- dom/fetch/moz.build | 76 +- dom/file/ipc/moz.build | 80 +- dom/file/moz.build | 80 +- dom/file/uri/moz.build | 28 +- dom/filehandle/moz.build | 20 +- dom/filesystem/compat/moz.build | 32 +- dom/filesystem/compat/tests/moz.build | 2 +- dom/filesystem/moz.build | 52 +- dom/filesystem/tests/moz.build | 2 +- dom/flex/moz.build | 18 +- dom/gamepad/moz.build | 116 +- dom/geolocation/moz.build | 48 +- dom/grid/moz.build | 34 +- dom/html/input/moz.build | 45 +- dom/html/moz.build | 430 +- dom/indexedDB/moz.build | 168 +- dom/indexedDB/test/gtest/moz.build | 14 +- dom/interfaces/base/moz.build | 43 +- dom/interfaces/events/moz.build | 5 +- dom/interfaces/geolocation/moz.build | 11 +- dom/interfaces/html/moz.build | 7 +- dom/interfaces/notification/moz.build | 5 +- dom/interfaces/payments/moz.build | 12 +- dom/interfaces/push/moz.build | 8 +- dom/interfaces/security/moz.build | 9 +- dom/interfaces/sidebar/moz.build | 5 +- dom/interfaces/storage/moz.build | 6 +- dom/interfaces/xul/moz.build | 23 +- dom/ipc/fuzztest/moz.build | 14 +- dom/ipc/jsactor/moz.build | 46 +- dom/ipc/moz.build | 356 +- dom/jsurl/moz.build | 10 +- dom/l10n/moz.build | 30 +- dom/l10n/tests/gtest/moz.build | 4 +- dom/locales/moz.build | 10 +- dom/localstorage/moz.build | 84 +- dom/localstorage/test/gtest/moz.build | 8 +- dom/manifest/moz.build | 18 +- dom/mathml/moz.build | 16 +- dom/media/bridge/moz.build | 36 +- dom/media/doctor/gtest/moz.build | 16 +- dom/media/doctor/moz.build | 42 +- dom/media/eme/mediadrm/moz.build | 14 +- dom/media/eme/moz.build | 62 +- dom/media/encoder/moz.build | 48 +- dom/media/fake-cdm/moz.build | 16 +- dom/media/flac/moz.build | 22 +- dom/media/fuzz/moz.build | 26 +- dom/media/gmp-plugin-openh264/moz.build | 10 +- dom/media/gmp/moz.build | 234 +- dom/media/gmp/widevine-adapter/moz.build | 22 +- dom/media/gtest/moz.build | 178 +- dom/media/gtest/mp4_demuxer/moz.build | 86 +- dom/media/hls/moz.build | 22 +- dom/media/imagecapture/moz.build | 12 +- dom/media/ipc/moz.build | 78 +- dom/media/mediacapabilities/moz.build | 28 +- dom/media/mediacontrol/moz.build | 54 +- dom/media/mediacontrol/tests/gtest/moz.build | 25 +- dom/media/mediasession/moz.build | 16 +- dom/media/mediasink/moz.build | 20 +- dom/media/mediasource/gtest/moz.build | 10 +- dom/media/mediasource/moz.build | 50 +- dom/media/moz.build | 620 +- dom/media/mp3/moz.build | 16 +- dom/media/mp4/moz.build | 54 +- dom/media/ogg/moz.build | 32 +- .../agnostic/bytestreams/gtest/moz.build | 4 +- .../platforms/agnostic/bytestreams/moz.build | 20 +- dom/media/platforms/agnostic/eme/moz.build | 18 +- dom/media/platforms/agnostic/gmp/moz.build | 12 +- dom/media/platforms/ffmpeg/ffvpx/moz.build | 42 +- dom/media/platforms/ffmpeg/moz.build | 20 +- dom/media/platforms/moz.build | 191 +- dom/media/platforms/omx/moz.build | 30 +- dom/media/platforms/wmf/moz.build | 38 +- dom/media/systemservices/moz.build | 139 +- dom/media/wave/moz.build | 16 +- dom/media/webaudio/blink/moz.build | 48 +- dom/media/webaudio/moz.build | 232 +- dom/media/webm/moz.build | 33 +- dom/media/webrtc/common/moz.build | 20 +- dom/media/webrtc/jsapi/moz.build | 50 +- dom/media/webrtc/jsep/moz.build | 20 +- dom/media/webrtc/libwebrtcglue/moz.build | 44 +- dom/media/webrtc/moz.build | 132 +- dom/media/webrtc/sdp/moz.build | 54 +- dom/media/webrtc/tests/fuzztests/moz.build | 14 +- .../third_party_build/extract-for-git.py | 94 +- dom/media/webrtc/third_party_build/moz.build | 116 +- .../third_party_build/vendor-libwebrtc.py | 147 +- dom/media/webrtc/transport/build/moz.build | 48 +- dom/media/webrtc/transport/common.build | 136 +- dom/media/webrtc/transport/fuzztest/moz.build | 30 +- dom/media/webrtc/transport/ipc/moz.build | 60 +- dom/media/webrtc/transport/moz.build | 14 +- dom/media/webrtc/transport/test/moz.build | 126 +- dom/media/webrtc/transportbridge/moz.build | 32 +- dom/media/webspeech/moz.build | 6 +- dom/media/webspeech/recognition/moz.build | 74 +- dom/media/webspeech/synth/android/moz.build | 10 +- dom/media/webspeech/synth/cocoa/moz.build | 6 +- dom/media/webspeech/synth/moz.build | 79 +- dom/media/webspeech/synth/speechd/moz.build | 10 +- dom/media/webspeech/synth/windows/moz.build | 8 +- dom/media/webvtt/moz.build | 14 +- dom/messagechannel/moz.build | 30 +- dom/messagechannel/tests/moz.build | 6 +- dom/midi/moz.build | 88 +- dom/moz.build | 196 +- dom/network/interfaces/moz.build | 6 +- dom/network/moz.build | 70 +- dom/notification/moz.build | 38 +- dom/offline/moz.build | 10 +- dom/payments/ipc/moz.build | 16 +- dom/payments/moz.build | 54 +- dom/performance/moz.build | 68 +- dom/permission/moz.build | 18 +- dom/plugins/base/moz.build | 114 +- dom/plugins/ipc/hangui/moz.build | 20 +- dom/plugins/ipc/interpose/moz.build | 6 +- dom/plugins/ipc/moz.build | 189 +- dom/plugins/test/moz.build | 13 +- .../test/testplugin/flashplugin/moz.build | 8 +- dom/plugins/test/testplugin/moz.build | 10 +- .../test/testplugin/secondplugin/moz.build | 8 +- .../test/testplugin/thirdplugin/moz.build | 8 +- dom/power/moz.build | 20 +- dom/presentation/interfaces/moz.build | 35 +- dom/presentation/moz.build | 112 +- dom/presentation/provider/moz.build | 22 +- dom/prio/moz.build | 12 +- dom/prio/test/gtest/moz.build | 10 +- dom/promise/moz.build | 36 +- dom/prototype/moz.build | 12 +- dom/push/moz.build | 60 +- dom/push/test/xpcshell/moz.build | 6 +- dom/quota/moz.build | 106 +- dom/quota/test/gtest/moz.build | 16 +- dom/quota/test/moz.build | 54 +- dom/reporting/moz.build | 64 +- dom/reporting/tests/gtest/moz.build | 6 +- dom/script/moz.build | 42 +- dom/security/featurepolicy/fuzztest/moz.build | 15 +- dom/security/featurepolicy/moz.build | 36 +- .../featurepolicy/test/gtest/moz.build | 7 +- dom/security/fuzztest/moz.build | 15 +- dom/security/moz.build | 110 +- dom/security/sanitizer/moz.build | 24 +- .../test/csp/file_upgrade_insecure_wsh.py | 2 - .../test/csp/file_websocket_self_wsh.py | 2 - .../general/file_sec_fetch_websocket_wsh.py | 1 - dom/security/test/gtest/moz.build | 18 +- .../https-only/file_upgrade_insecure_wsh.py | 2 - .../file_main_bug803225_websocket_wsh.py | 2 - dom/security/test/moz.build | 34 +- dom/serviceworkers/moz.build | 198 +- dom/serviceworkers/test/gtest/moz.build | 6 +- dom/simpledb/moz.build | 38 +- dom/smil/moz.build | 104 +- dom/storage/moz.build | 74 +- dom/svg/moz.build | 486 +- dom/system/android/moz.build | 10 +- dom/system/linux/moz.build | 16 +- dom/system/mac/moz.build | 13 +- dom/system/moz.build | 68 +- dom/system/windows/moz.build | 11 +- dom/tests/moz.build | 144 +- dom/u2f/moz.build | 22 +- dom/url/moz.build | 24 +- dom/vr/moz.build | 106 +- dom/webauthn/moz.build | 102 +- dom/webbrowserpersist/moz.build | 50 +- dom/webgpu/moz.build | 91 +- dom/webidl/moz.build | 1454 +- dom/webshare/moz.build | 3 +- dom/websocket/moz.build | 16 +- .../tests/file_websocket_basic_wsh.py | 12 +- ...bsocket_permessage_deflate_disabled_wsh.py | 6 +- ...websocket_permessage_deflate_params_wsh.py | 6 +- ...bsocket_permessage_deflate_rejected_wsh.py | 6 +- .../file_websocket_permessage_deflate_wsh.py | 6 +- dom/websocket/tests/file_websocket_wsh.py | 41 +- .../websocket_hybi/file_binary-frames_wsh.py | 4 +- .../file_check-binary-messages_wsh.py | 14 +- dom/workers/moz.build | 128 +- dom/workers/remoteworkers/moz.build | 52 +- dom/workers/sharedworkers/moz.build | 24 +- .../test_service_workers_at_startup.py | 17 +- dom/worklet/moz.build | 28 +- dom/xhr/moz.build | 36 +- dom/xml/moz.build | 48 +- dom/xml/resources/moz.build | 2 +- dom/xslt/base/moz.build | 26 +- dom/xslt/moz.build | 14 +- dom/xslt/xml/moz.build | 14 +- dom/xslt/xpath/moz.build | 90 +- dom/xslt/xslt/moz.build | 86 +- dom/xul/moz.build | 106 +- editor/composer/moz.build | 70 +- editor/libeditor/moz.build | 144 +- editor/moz.build | 38 +- editor/spellchecker/moz.build | 20 +- editor/txmgr/moz.build | 24 +- editor/txmgr/tests/moz.build | 4 +- extensions/auth/moz.build | 30 +- extensions/moz.build | 6 +- extensions/permissions/moz.build | 34 +- extensions/permissions/test/gtest/moz.build | 5 +- extensions/permissions/test/moz.build | 7 +- extensions/pref/autoconfig/moz.build | 6 +- extensions/pref/autoconfig/src/moz.build | 12 +- .../test/marionette/test_autoconfig.py | 91 +- extensions/pref/moz.build | 6 +- extensions/spellcheck/hunspell/glue/moz.build | 32 +- extensions/spellcheck/hunspell/moz.build | 6 +- extensions/spellcheck/idl/moz.build | 7 +- extensions/spellcheck/locales/moz.build | 4 +- extensions/spellcheck/moz.build | 14 +- extensions/spellcheck/src/moz.build | 30 +- extensions/universalchardet/moz.build | 6 +- extensions/universalchardet/tests/moz.build | 3 +- gfx/2d/moz.build | 352 +- gfx/angle/moz.build | 10 +- gfx/angle/targets/angle_common/moz.build | 132 +- .../targets/angle_gpu_info_util/moz.build | 92 +- gfx/angle/targets/angle_image_util/moz.build | 86 +- gfx/angle/targets/libEGL/moz.build | 152 +- gfx/angle/targets/libGLESv2/moz.build | 536 +- gfx/angle/targets/preprocessor/moz.build | 100 +- gfx/angle/targets/translator/moz.build | 336 +- gfx/angle/update-angle.py | 479 +- gfx/angle/vendor_from_git.py | 49 +- gfx/config/moz.build | 26 +- gfx/gl/GLConsts.py | 62 +- gfx/gl/moz.build | 217 +- gfx/ipc/moz.build | 103 +- gfx/layers/apz/test/gtest/moz.build | 38 +- gfx/layers/apz/test/gtest/mvm/moz.build | 6 +- gfx/layers/d3d11/genshaders.py | 83 +- gfx/layers/ipc/fuzztest/moz.build | 12 +- gfx/layers/moz.build | 1128 +- gfx/moz.build | 74 +- gfx/src/moz.build | 116 +- gfx/tests/gtest/moz.build | 106 +- .../test_pref_rollout_workaround.py | 122 +- gfx/tests/moz.build | 8 +- gfx/thebes/gencjkcisvs.py | 43 +- gfx/thebes/moz.build | 384 +- gfx/vr/moz.build | 150 +- gfx/vr/service/moz.build | 41 +- gfx/vr/vrhost/moz.build | 42 +- gfx/vr/vrhost/testhost/moz.build | 10 +- gfx/webrender_bindings/moz.build | 152 +- gfx/wgpu_bindings/moz.build | 15 +- hal/moz.build | 136 +- image/build/moz.build | 18 +- image/decoders/icon/android/moz.build | 6 +- image/decoders/icon/gtk/moz.build | 6 +- image/decoders/icon/mac/moz.build | 4 +- image/decoders/icon/moz.build | 28 +- image/decoders/icon/win/moz.build | 6 +- image/decoders/moz.build | 56 +- image/encoders/bmp/moz.build | 6 +- image/encoders/ico/moz.build | 10 +- image/encoders/jpeg/moz.build | 4 +- image/encoders/moz.build | 8 +- image/encoders/png/moz.build | 6 +- image/moz.build | 186 +- image/test/fuzzing/moz.build | 18 +- image/test/gtest/moz.build | 186 +- intl/build/moz.build | 14 +- intl/gtest/moz.build | 8 +- intl/hyphenation/glue/moz.build | 20 +- intl/icu_sources_data.py | 361 +- intl/l10n/moz.build | 45 +- intl/l10n/rust/gtest/moz.build | 4 +- intl/locale/android/moz.build | 8 +- intl/locale/gtk/moz.build | 8 +- intl/locale/mac/moz.build | 8 +- intl/locale/moz.build | 109 +- intl/locale/props2arrays.py | 12 +- intl/locale/tests/gtest/moz.build | 14 +- intl/locale/windows/moz.build | 9 +- intl/locales/moz.build | 76 +- intl/lwbrk/gtest/moz.build | 5 +- intl/lwbrk/moz.build | 30 +- intl/moz.build | 36 +- intl/strres/moz.build | 18 +- intl/uconv/moz.build | 26 +- intl/uconv/tests/moz.build | 5 +- intl/unicharutil/moz.build | 6 +- intl/unicharutil/util/moz.build | 32 +- ipc/app/moz.build | 74 +- ipc/chromium/moz.build | 156 +- ipc/glue/moz.build | 320 +- ipc/gtest/moz.build | 10 +- ipc/ipdl/ipdl.py | 166 +- ipc/ipdl/ipdl/__init__.py | 46 +- ipc/ipdl/ipdl/ast.py | 123 +- ipc/ipdl/ipdl/builtin.py | 81 +- ipc/ipdl/ipdl/cgen.py | 55 +- ipc/ipdl/ipdl/checker.py | 27 +- ipc/ipdl/ipdl/cxx/ast.py | 312 +- ipc/ipdl/ipdl/cxx/cgen.py | 321 +- ipc/ipdl/ipdl/cxx/code.py | 39 +- ipc/ipdl/ipdl/direct_call.py | 916 +- ipc/ipdl/ipdl/lower.py | 3860 +++-- ipc/ipdl/ipdl/parser.py | 321 +- ipc/ipdl/ipdl/type.py | 574 +- ipc/ipdl/ipdl/util.py | 2 +- ipc/ipdl/moz.build | 12 +- ipc/ipdl/test/cxx/app/moz.build | 14 +- ipc/ipdl/test/cxx/genIPDLUnitTests.py | 146 +- ipc/ipdl/test/cxx/moz.build | 306 +- ipc/ipdl/test/ipdl/IPDLCompile.py | 45 +- ipc/ipdl/test/ipdl/moz.build | 1 - ipc/ipdl/test/ipdl/runtests.py | 65 +- ipc/ipdl/test/moz.build | 6 +- ipc/moz.build | 16 +- ipc/mscom/moz.build | 108 +- ipc/mscom/mozglue/moz.build | 6 +- ipc/mscom/oop/moz.build | 30 +- ipc/pull-chromium.py | 34 +- ipc/testshell/moz.build | 34 +- js/ductwork/debugger/moz.build | 14 +- js/ffi.configure | 72 +- js/moz.build | 26 +- js/moz.configure | 824 +- js/src/build/moz.build | 83 +- js/src/builtin/embedjs.py | 83 +- js/src/builtin/intl/make_intl_data.py | 1860 ++- js/src/debugger/moz.build | 29 +- js/src/devtools/automation/autospider.py | 645 +- js/src/devtools/gc/gc-test.py | 78 +- js/src/devtools/rootAnalysis/analyze.py | 396 +- js/src/devtools/rootAnalysis/explain.py | 50 +- js/src/devtools/rootAnalysis/mach_commands.py | 219 +- js/src/devtools/rootAnalysis/run-test.py | 75 +- .../rootAnalysis/t/exceptions/test.py | 18 +- .../devtools/rootAnalysis/t/hazards/test.py | 86 +- .../rootAnalysis/t/sixgill-tree/test.py | 78 +- js/src/devtools/rootAnalysis/t/sixgill.py | 70 +- .../rootAnalysis/t/suppression/test.py | 18 +- .../devtools/rootAnalysis/t/virtual/test.py | 60 +- js/src/frontend/GenerateReservedWords.py | 116 +- js/src/frontend/align_stack_comment.py | 49 +- js/src/frontend/moz.build | 108 +- js/src/frontend/smoosh/moz.build | 13 +- js/src/fuzz-tests/moz.build | 36 +- js/src/gc/GenerateStatsPhases.py | 315 +- js/src/gc/moz.build | 63 +- js/src/gdb/lib-for-tests/catcher.py | 6 +- js/src/gdb/lib-for-tests/prologue.py | 59 +- js/src/gdb/moz.build | 66 +- js/src/gdb/mozilla/CellHeader.py | 8 +- js/src/gdb/mozilla/ExecutableAllocator.py | 19 +- js/src/gdb/mozilla/GCCellPtr.py | 73 +- js/src/gdb/mozilla/Interpreter.py | 46 +- js/src/gdb/mozilla/IonGraph.py | 110 +- js/src/gdb/mozilla/JSObject.py | 52 +- js/src/gdb/mozilla/JSString.py | 36 +- js/src/gdb/mozilla/JSSymbol.py | 8 +- js/src/gdb/mozilla/PropertyKey.py | 15 +- js/src/gdb/mozilla/Root.py | 24 +- js/src/gdb/mozilla/asmjs.py | 12 +- js/src/gdb/mozilla/jitsrc.py | 71 +- js/src/gdb/mozilla/jsop.py | 10 +- js/src/gdb/mozilla/jsval.py | 83 +- js/src/gdb/mozilla/prettyprinters.py | 50 +- js/src/gdb/mozilla/unwind.py | 151 +- js/src/gdb/progressbar.py | 18 +- js/src/gdb/run-tests.py | 257 +- js/src/gdb/taskpool.py | 46 +- js/src/gdb/tests/enum-printers.py | 26 +- js/src/gdb/tests/test-ExecutableAllocator.py | 19 +- js/src/gdb/tests/test-GCCellPtr.py | 26 +- js/src/gdb/tests/test-Interpreter.py | 20 +- js/src/gdb/tests/test-JSObject-null.py | 8 +- js/src/gdb/tests/test-JSObject.py | 22 +- js/src/gdb/tests/test-JSString-null.py | 8 +- js/src/gdb/tests/test-JSString-subclasses.py | 4 +- js/src/gdb/tests/test-JSString.py | 22 +- js/src/gdb/tests/test-JSSymbol.py | 12 +- js/src/gdb/tests/test-Root-null.py | 6 +- js/src/gdb/tests/test-Root.py | 34 +- js/src/gdb/tests/test-asmjs.py | 6 +- js/src/gdb/tests/test-jsbytecode.py | 8 +- js/src/gdb/tests/test-jsid.py | 24 +- js/src/gdb/tests/test-jsop.py | 8 +- js/src/gdb/tests/test-jsval.py | 34 +- js/src/gdb/tests/test-prettyprinters.py | 30 +- js/src/gdb/tests/test-unwind.py | 6 +- js/src/gdb/tests/typedef-printers.py | 8 +- js/src/irregexp/import-irregexp.py | 73 +- js/src/irregexp/moz.build | 54 +- js/src/jit-test/jit_test.py | 506 +- js/src/jit/GenerateCacheIRFiles.py | 498 +- js/src/jit/GenerateOpcodeFiles.py | 25 +- js/src/jit/arm/gen-double-encoder-table.py | 6 +- js/src/jit/moz.build | 437 +- js/src/jsapi-tests/moz.build | 278 +- js/src/make-source-package.py | 278 +- js/src/moz.build | 866 +- js/src/rust/moz.build | 42 +- js/src/shell/js-gdb.py | 4 +- js/src/shell/moz.build | 50 +- js/src/tests/compare_bench.py | 26 +- js/src/tests/jstests.py | 624 +- js/src/tests/lib/jittests.py | 429 +- js/src/tests/lib/manifest.py | 324 +- js/src/tests/lib/progressbar.py | 54 +- js/src/tests/lib/remote.py | 24 +- js/src/tests/lib/results.py | 275 +- js/src/tests/lib/structuredlog.py | 34 +- js/src/tests/lib/tasks_unix.py | 17 +- js/src/tests/lib/tasks_win.py | 32 +- js/src/tests/lib/terminal_unix.py | 28 +- js/src/tests/lib/terminal_win.py | 36 +- js/src/tests/lib/tests.py | 159 +- js/src/tests/lib/wptreport.py | 42 +- js/src/tests/moz.build | 1 - .../make-normalize-generateddata-input.py | 63 +- js/src/tests/parsemark.py | 122 +- js/src/tests/test/run.py | 82 +- js/src/tests/test262-export.py | 128 +- js/src/tests/test262-update.py | 276 +- js/src/vm/jsopcode.py | 283 +- js/src/vm/make_opcode_doc.py | 111 +- js/src/wasm/moz.build | 63 +- js/sub.configure | 49 +- js/xpconnect/idl/moz.build | 15 +- js/xpconnect/loader/moz.build | 58 +- js/xpconnect/loader/script_cache.py | 43 +- js/xpconnect/moz.build | 10 +- js/xpconnect/public/moz.build | 5 +- js/xpconnect/shell/moz.build | 42 +- js/xpconnect/src/moz.build | 102 +- js/xpconnect/tests/browser/moz.build | 3 +- js/xpconnect/tests/chrome/moz.build | 6 +- .../tests/components/native/moz.build | 14 +- js/xpconnect/tests/idl/moz.build | 16 +- js/xpconnect/tests/marionette/moz.build | 2 +- .../marionette/test_preloader_telemetry.py | 32 +- js/xpconnect/tests/mochitest/moz.build | 3 +- js/xpconnect/tests/moz.build | 32 +- js/xpconnect/wrappers/moz.build | 28 +- layout/base/gtest/moz.build | 22 +- layout/base/moz.build | 282 +- layout/base/tests/marionette/selection.py | 178 +- .../test_accessiblecaret_cursor_mode.py | 62 +- .../test_accessiblecaret_selection_mode.py | 356 +- layout/build/moz.build | 102 +- layout/forms/moz.build | 74 +- layout/generic/FrameClass.py | 2 +- layout/generic/FrameClasses.py | 2 - layout/generic/GenerateFrameLists.py | 8 +- layout/generic/frame-graph.py | 6 +- layout/generic/moz.build | 436 +- layout/inspector/moz.build | 34 +- layout/ipc/moz.build | 30 +- layout/mathml/moz.build | 91 +- layout/media/moz.build | 22 +- layout/media/webrtc/moz.build | 8 +- layout/moz.build | 48 +- layout/painting/moz.build | 104 +- layout/printing/moz.build | 42 +- layout/reftests/border-image/gen-refs.py | 660 +- .../reftests/fonts/gsubtest/makegsubfonts.py | 422 +- layout/reftests/fonts/mark-generate.py | 29 +- layout/reftests/fonts/math/generate.py | 127 +- layout/reftests/moz.build | 472 +- layout/style/GenerateCSSPropertyID.py | 15 +- layout/style/GenerateCSSPropsGenerated.py | 54 +- .../GenerateCompositorAnimatableProperties.py | 11 +- .../GenerateComputedDOMStyleGenerated.py | 7 +- .../style/GenerateCountedUnknownProperties.py | 8 +- layout/style/GenerateServoCSSPropList.py | 37 +- layout/style/moz.build | 521 +- layout/style/test/gen-css-properties.py | 6 +- .../test/gtest/generate_example_stylesheet.py | 13 +- layout/style/test/gtest/moz.build | 18 +- layout/style/test/moz.build | 244 +- layout/svg/moz.build | 148 +- layout/tables/moz.build | 72 +- layout/tools/layout-debug/moz.build | 8 +- layout/tools/layout-debug/src/moz.build | 14 +- layout/tools/layout-debug/ui/moz.build | 2 +- layout/tools/recording/moz.build | 6 +- layout/tools/reftest/mach_commands.py | 152 +- .../reftest/mach_test_package_commands.py | 60 +- layout/tools/reftest/moz.build | 42 +- layout/tools/reftest/output.py | 117 +- layout/tools/reftest/reftest/__init__.py | 104 +- layout/tools/reftest/reftestcommandline.py | 818 +- layout/tools/reftest/remotereftest.py | 226 +- layout/tools/reftest/runreftest.py | 539 +- layout/tools/reftest/selftest/conftest.py | 74 +- .../selftest/test_python_manifest_parser.py | 12 +- .../selftest/test_reftest_manifest_parser.py | 37 +- .../reftest/selftest/test_reftest_output.py | 88 +- layout/xul/grid/moz.build | 54 +- layout/xul/moz.build | 110 +- layout/xul/tree/moz.build | 56 +- media/gmp-clearkey/0.1/gtest/moz.build | 6 +- media/gmp-clearkey/0.1/moz.build | 63 +- media/psshparser/gtest/moz.build | 6 +- media/psshparser/moz.build | 14 +- media/webrtc/moz.build | 10 +- media/webrtc/signaling/gtest/moz.build | 67 +- memory/build/moz.build | 53 +- memory/gtest/moz.build | 14 +- memory/moz.build | 14 +- memory/moz.configure | 2 +- memory/mozalloc/moz.build | 48 +- memory/replace/dmd/block_analyzer.py | 207 +- memory/replace/dmd/dmd.py | 647 +- memory/replace/dmd/moz.build | 30 +- memory/replace/dmd/test/moz.build | 23 +- memory/replace/dmd/test/scan-test.py | 68 +- memory/replace/logalloc/moz.build | 16 +- .../replace/logalloc/replay/logalloc_munge.py | 65 +- memory/replace/logalloc/replay/moz.build | 59 +- memory/replace/moz.build | 18 +- memory/replace/phc/moz.build | 24 +- memory/replace/phc/test/gtest/moz.build | 6 +- memory/replace/phc/test/moz.build | 4 +- memory/volatile/moz.build | 20 +- memory/volatile/tests/moz.build | 4 +- mfbt/moz.build | 308 +- mfbt/tests/gtest/moz.build | 26 +- mfbt/tests/moz.build | 169 +- mobile/android/actors/moz.build | 26 +- mobile/android/app/moz.build | 32 +- mobile/android/base/moz.build | 10 +- .../android/branding/beta/content/moz.build | 2 +- .../android/branding/beta/locales/moz.build | 2 +- mobile/android/branding/beta/moz.build | 2 +- .../branding/nightly/content/moz.build | 2 +- .../branding/nightly/locales/moz.build | 2 +- mobile/android/branding/nightly/moz.build | 2 +- .../branding/official/content/moz.build | 2 +- .../branding/official/locales/moz.build | 2 +- mobile/android/branding/official/moz.build | 2 +- .../branding/unofficial/content/moz.build | 2 +- .../branding/unofficial/locales/moz.build | 2 +- mobile/android/branding/unofficial/moz.build | 2 +- mobile/android/chrome/geckoview/moz.build | 2 +- mobile/android/chrome/moz.build | 16 +- .../android/components/extensions/moz.build | 10 +- .../components/extensions/schemas/moz.build | 2 +- mobile/android/components/geckoview/moz.build | 44 +- mobile/android/components/moz.build | 12 +- mobile/android/debug_sign_tool.py | 173 +- mobile/android/fonts/moz.build | 14 +- .../src/androidTest/assets/moz.build | 69 +- mobile/android/gradle.configure | 318 +- mobile/android/gradle.py | 20 +- mobile/android/installer/moz.build | 4 +- mobile/android/locales/filter.py | 20 +- mobile/android/locales/moz.build | 6 +- mobile/android/modules/geckoview/moz.build | 62 +- mobile/android/modules/moz.build | 8 +- mobile/android/moz.build | 44 +- mobile/android/moz.configure | 83 +- mobile/android/themes/core/moz.build | 2 +- mobile/android/themes/geckoview/moz.build | 2 +- mobile/locales/filter.py | 20 +- mobile/locales/moz.build | 6 +- modules/libjar/moz.build | 46 +- modules/libjar/zipwriter/moz.build | 20 +- modules/libmar/moz.build | 14 +- modules/libmar/sign/moz.build | 20 +- modules/libmar/src/moz.build | 26 +- modules/libmar/tests/moz.build | 6 +- modules/libmar/tool/moz.build | 48 +- modules/libmar/verify/moz.build | 34 +- .../libpref/init/generate_static_pref_list.py | 341 +- modules/libpref/moz.build | 234 +- modules/libpref/test/gtest/moz.build | 22 +- .../test/test_generate_static_pref_list.py | 247 +- modules/moz.build | 8 +- moz.build | 198 +- moz.configure | 820 +- mozglue/android/moz.build | 83 +- mozglue/baseprofiler/moz.build | 134 +- mozglue/build/moz.build | 92 +- mozglue/dllservices/gen_dll_blocklist_defs.py | 257 +- mozglue/dllservices/moz.build | 62 +- mozglue/linker/moz.build | 30 +- mozglue/linker/tests/moz.build | 16 +- mozglue/misc/interceptor/moz.build | 25 +- mozglue/misc/moz.build | 111 +- mozglue/moz.build | 22 +- .../TestDllBlocklist_AllowByVersion/moz.build | 10 +- .../TestDllBlocklist_MatchByName/moz.build | 8 +- .../TestDllBlocklist_MatchByVersion/moz.build | 10 +- .../TestDllBlocklist_NoOpEntryPoint/moz.build | 10 +- mozglue/tests/gtest/moz.build | 15 +- mozglue/tests/interceptor/moz.build | 28 +- mozglue/tests/moz.build | 50 +- netwerk/base/http-sfv/moz.build | 10 +- netwerk/base/moz.build | 540 +- netwerk/base/mozurl/moz.build | 12 +- netwerk/base/rust-helper/moz.build | 6 +- netwerk/build/moz.build | 50 +- netwerk/cache/moz.build | 58 +- netwerk/cache2/moz.build | 74 +- netwerk/cookie/moz.build | 78 +- netwerk/dns/mdns/libmdns/moz.build | 44 +- netwerk/dns/mdns/moz.build | 6 +- netwerk/dns/moz.build | 136 +- netwerk/dns/prepare_tlds.py | 191 +- netwerk/dns/tests/moz.build | 4 +- netwerk/dns/tests/unit/data/moz.build | 10 +- netwerk/dns/tests/unit/moz.build | 4 +- netwerk/ipc/moz.build | 152 +- netwerk/locales/moz.build | 2 +- netwerk/mime/moz.build | 18 +- netwerk/moz.build | 46 +- netwerk/protocol/about/moz.build | 28 +- netwerk/protocol/data/moz.build | 23 +- netwerk/protocol/file/moz.build | 36 +- netwerk/protocol/ftp/moz.build | 42 +- netwerk/protocol/gio/moz.build | 15 +- netwerk/protocol/http/make_incoming_tables.py | 120 +- netwerk/protocol/http/make_outgoing_tables.py | 31 +- netwerk/protocol/http/moz.build | 298 +- netwerk/protocol/moz.build | 8 +- netwerk/protocol/res/moz.build | 34 +- netwerk/protocol/viewsource/moz.build | 18 +- netwerk/protocol/websocket/moz.build | 86 +- netwerk/sctp/datachannel/moz.build | 34 +- netwerk/socket/moz.build | 45 +- netwerk/socket/neqo_glue/moz.build | 13 +- netwerk/streamconv/converters/moz.build | 32 +- netwerk/streamconv/moz.build | 20 +- netwerk/system/android/moz.build | 6 +- netwerk/system/linux/moz.build | 6 +- netwerk/system/mac/moz.build | 8 +- netwerk/system/moz.build | 22 +- netwerk/system/netlink/moz.build | 6 +- netwerk/system/win32/moz.build | 6 +- netwerk/test/fuzz/moz.build | 31 +- netwerk/test/gtest/moz.build | 101 +- netwerk/test/gtest/parse-ftp/moz.build | 104 +- netwerk/test/http3server/moz.build | 12 +- netwerk/test/httpserver/moz.build | 10 +- netwerk/test/moz.build | 21 +- netwerk/test/unit/moz.build | 2 +- netwerk/url-classifier/moz.build | 80 +- netwerk/wifi/moz.build | 50 +- parser/html/moz.build | 160 +- parser/htmlparser/moz.build | 70 +- parser/moz.build | 9 +- parser/prototype/moz.build | 12 +- parser/xml/moz.build | 6 +- parser/xml/test/moz.build | 2 +- python/devtools/migrate-l10n/migrate/main.py | 129 +- python/gdbpp/gdbpp/__init__.py | 2 +- python/gdbpp/gdbpp/enumset.py | 14 +- python/gdbpp/gdbpp/linkedlist.py | 12 +- python/gdbpp/gdbpp/owningthread.py | 12 +- python/gdbpp/gdbpp/smartptr.py | 32 +- python/gdbpp/gdbpp/string.py | 6 +- python/gdbpp/gdbpp/tarray.py | 16 +- python/gdbpp/gdbpp/thashtable.py | 40 +- python/l10n/convert_xul_to_fluent/convert.py | 102 +- .../convert_xul_to_fluent/lib/__init__.py | 1 + python/l10n/convert_xul_to_fluent/lib/dtd.py | 5 +- .../l10n/convert_xul_to_fluent/lib/fluent.py | 10 +- .../convert_xul_to_fluent/lib/migration.py | 68 +- .../l10n/convert_xul_to_fluent/lib/utils.py | 4 +- python/l10n/convert_xul_to_fluent/lib/xul.py | 45 +- .../bug_1552333_aboutCertError.py | 50 +- .../bug_1634042_page_action_menu.py | 120 +- .../bug_1658629_migration_urlbar_actions.py | 40 +- .../bug_1667781_preferences_dialogs.py | 56 +- .../bug_1668284_settings_change.py | 23 +- python/l10n/test_fluent_migrations/fmt.py | 130 +- python/lldbutils/lldbutils/__init__.py | 4 +- python/lldbutils/lldbutils/general.py | 69 +- python/lldbutils/lldbutils/gfx.py | 47 +- python/lldbutils/lldbutils/layout.py | 25 +- python/lldbutils/lldbutils/utils.py | 36 +- python/mach/mach/base.py | 14 +- python/mach/mach/commands/commandinfo.py | 243 +- python/mach/mach/commands/settings.py | 35 +- python/mach/mach/config.py | 81 +- python/mach/mach/decorators.py | 135 +- python/mach/mach/dispatcher.py | 201 +- python/mach/mach/logging.py | 68 +- python/mach/mach/main.py | 277 +- python/mach/mach/mixin/logging.py | 7 +- python/mach/mach/mixin/process.py | 70 +- python/mach/mach/registrar.py | 55 +- python/mach/mach/sentry.py | 38 +- python/mach/mach/telemetry.py | 51 +- python/mach/mach/terminal.py | 8 +- python/mach/mach/test/conftest.py | 14 +- python/mach/mach/test/providers/basic.py | 7 +- python/mach/mach/test/providers/commands.py | 14 +- python/mach/mach/test/providers/conditions.py | 12 +- .../mach/test/providers/conditions_invalid.py | 2 +- python/mach/mach/test/providers/throw.py | 8 +- python/mach/mach/test/test_commands.py | 27 +- python/mach/mach/test/test_conditions.py | 42 +- python/mach/mach/test/test_config.py | 120 +- python/mach/mach/test/test_dispatcher.py | 16 +- python/mach/mach/test/test_entry_point.py | 25 +- python/mach/mach/test/test_error_output.py | 21 +- python/mach/mach/test/test_logger.py | 15 +- python/mach/mach/test/test_mach.py | 10 +- python/mach/mach/test/test_telemetry.py | 192 +- python/mach/mach/test/zero_microseconds.py | 7 +- python/mach/setup.py | 41 +- python/mach_commands.py | 255 +- python/moz.build | 90 +- python/mozboot/bin/bootstrap.py | 254 +- python/mozboot/mozboot/android.py | 275 +- python/mozboot/mozboot/archlinux.py | 137 +- python/mozboot/mozboot/base.py | 590 +- python/mozboot/mozboot/bootstrap.py | 429 +- python/mozboot/mozboot/centosfedora.py | 100 +- python/mozboot/mozboot/debian.py | 80 +- python/mozboot/mozboot/dump_syms.py | 6 +- python/mozboot/mozboot/fix_stacks.py | 6 +- python/mozboot/mozboot/freebsd.py | 52 +- python/mozboot/mozboot/gentoo.py | 56 +- python/mozboot/mozboot/linux_common.py | 83 +- python/mozboot/mozboot/lucetc.py | 2 +- python/mozboot/mozboot/mach_commands.py | 86 +- python/mozboot/mozboot/minidump_stackwalk.py | 6 +- python/mozboot/mozboot/mozconfig.py | 84 +- python/mozboot/mozboot/mozillabuild.py | 154 +- python/mozboot/mozboot/nasm.py | 6 +- python/mozboot/mozboot/node.py | 8 +- python/mozboot/mozboot/openbsd.py | 34 +- python/mozboot/mozboot/opensuse.py | 96 +- python/mozboot/mozboot/osx.py | 426 +- python/mozboot/mozboot/rust.py | 111 +- python/mozboot/mozboot/sccache.py | 10 +- python/mozboot/mozboot/solus.py | 75 +- python/mozboot/mozboot/static_analysis.py | 6 +- python/mozboot/mozboot/stylo.py | 12 +- python/mozboot/mozboot/test/test_mozconfig.py | 83 +- .../mozboot/mozboot/test/test_write_config.py | 56 +- python/mozboot/mozboot/util.py | 17 +- python/mozboot/mozboot/void.py | 59 +- python/mozboot/mozboot/wasi_sysroot.py | 2 +- python/mozboot/mozboot/windows.py | 108 +- python/mozboot/setup.py | 12 +- python/mozbuild/mozbuild/action/buildlist.py | 15 +- .../mozbuild/mozbuild/action/check_binary.py | 236 +- .../mozbuild/action/download_wpt_manifest.py | 3 +- python/mozbuild/mozbuild/action/dump_env.py | 7 +- .../mozbuild/mozbuild/action/dumpsymbols.py | 126 +- .../mozbuild/action/exe_7z_archive.py | 68 +- .../mozbuild/action/exe_7z_extract.py | 9 +- python/mozbuild/mozbuild/action/fat_aar.py | 101 +- .../mozbuild/mozbuild/action/file_generate.py | 89 +- .../mozbuild/action/generate_searchjson.py | 25 +- .../mozbuild/action/generate_symbols_file.py | 40 +- python/mozbuild/mozbuild/action/install.py | 2 +- python/mozbuild/mozbuild/action/jar_maker.py | 2 +- python/mozbuild/mozbuild/action/l10n_merge.py | 27 +- .../mozbuild/action/langpack_manifest.py | 204 +- python/mozbuild/mozbuild/action/make_dmg.py | 25 +- python/mozbuild/mozbuild/action/make_unzip.py | 7 +- python/mozbuild/mozbuild/action/make_zip.py | 7 +- python/mozbuild/mozbuild/action/node.py | 40 +- .../action/output_searchplugins_list.py | 2 +- .../action/package_generated_sources.py | 15 +- .../mozbuild/action/process_define_files.py | 88 +- .../action/process_install_manifest.py | 90 +- .../mozbuild/action/symbols_archive.py | 77 +- .../mozbuild/mozbuild/action/test_archive.py | 1167 +- python/mozbuild/mozbuild/action/tooltool.py | 961 +- python/mozbuild/mozbuild/action/unpack_dmg.py | 29 +- python/mozbuild/mozbuild/action/util.py | 12 +- python/mozbuild/mozbuild/action/webidl.py | 2 +- python/mozbuild/mozbuild/action/wrap_rustc.py | 57 +- python/mozbuild/mozbuild/action/xpccheck.py | 64 +- .../mozbuild/mozbuild/action/xpidl-process.py | 101 +- python/mozbuild/mozbuild/action/zip.py | 26 +- python/mozbuild/mozbuild/analyze/hg.py | 53 +- .../mozbuild/mozbuild/android_version_code.py | 123 +- python/mozbuild/mozbuild/artifact_builds.py | 38 +- python/mozbuild/mozbuild/artifact_cache.py | 88 +- python/mozbuild/mozbuild/artifact_commands.py | 559 +- python/mozbuild/mozbuild/artifacts.py | 1255 +- python/mozbuild/mozbuild/backend/__init__.py | 32 +- python/mozbuild/mozbuild/backend/base.py | 145 +- .../mozbuild/backend/cargo_build_defs.py | 294 +- python/mozbuild/mozbuild/backend/clangd.py | 8 +- python/mozbuild/mozbuild/backend/common.py | 297 +- .../mozbuild/backend/configenvironment.py | 129 +- .../mozbuild/mozbuild/backend/cpp_eclipse.py | 235 +- .../mozbuild/mozbuild/backend/fastermake.py | 196 +- .../mozbuild/backend/mach_commands.py | 44 +- python/mozbuild/mozbuild/backend/make.py | 101 +- .../mozbuild/backend/recursivemake.py | 1225 +- .../mozbuild/backend/static_analysis.py | 11 +- .../mozbuild/backend/test_manifest.py | 55 +- .../mozbuild/mozbuild/backend/visualstudio.py | 497 +- python/mozbuild/mozbuild/base.py | 520 +- python/mozbuild/mozbuild/build_commands.py | 222 +- python/mozbuild/mozbuild/chunkify.py | 4 +- .../mozbuild/mozbuild/code_analysis/moz.build | 4 +- .../mozbuild/codecoverage/chrome_map.py | 76 +- .../mozbuild/codecoverage/lcov_rewriter.py | 321 +- .../mozbuild/codecoverage/manifest_handler.py | 2 +- .../mozbuild/codecoverage/packager.py | 32 +- .../mozbuild/compilation/codecomplete.py | 30 +- .../mozbuild/mozbuild/compilation/database.py | 4 +- python/mozbuild/mozbuild/compilation/util.py | 24 +- .../mozbuild/mozbuild/compilation/warnings.py | 125 +- python/mozbuild/mozbuild/config_status.py | 116 +- .../mozbuild/mozbuild/configure/__init__.py | 553 +- .../mozbuild/configure/check_debug_ranges.py | 40 +- .../mozbuild/mozbuild/configure/constants.py | 150 +- python/mozbuild/mozbuild/configure/help.py | 54 +- .../mozbuild/mozbuild/configure/libstdcxx.py | 51 +- python/mozbuild/mozbuild/configure/lint.py | 98 +- python/mozbuild/mozbuild/configure/options.py | 347 +- python/mozbuild/mozbuild/configure/util.py | 72 +- .../mozbuild/mozbuild/controller/building.py | 977 +- .../mozbuild/mozbuild/controller/clobber.py | 112 +- python/mozbuild/mozbuild/doctor.py | 229 +- python/mozbuild/mozbuild/dotproperties.py | 42 +- .../mozbuild/export_telemetry_schema.py | 14 +- python/mozbuild/mozbuild/faster_daemon.py | 229 +- python/mozbuild/mozbuild/frontend/context.py | 2306 +-- python/mozbuild/mozbuild/frontend/data.py | 736 +- python/mozbuild/mozbuild/frontend/emitter.py | 1416 +- .../mozbuild/mozbuild/frontend/gyp_reader.py | 365 +- .../mozbuild/frontend/mach_commands.py | 200 +- python/mozbuild/mozbuild/frontend/reader.py | 673 +- python/mozbuild/mozbuild/frontend/sandbox.py | 75 +- python/mozbuild/mozbuild/gen_test_backend.py | 10 +- python/mozbuild/mozbuild/generated_sources.py | 46 +- python/mozbuild/mozbuild/gn_processor.py | 444 +- python/mozbuild/mozbuild/html_build_viewer.py | 40 +- python/mozbuild/mozbuild/jar.py | 348 +- python/mozbuild/mozbuild/makeutil.py | 91 +- python/mozbuild/mozbuild/mozconfig.py | 218 +- python/mozbuild/mozbuild/mozinfo.py | 129 +- python/mozbuild/mozbuild/nodeutil.py | 40 +- python/mozbuild/mozbuild/preprocessor.py | 362 +- python/mozbuild/mozbuild/pythonutil.py | 34 +- .../mozbuild/repackaging/application_ini.py | 7 +- python/mozbuild/mozbuild/repackaging/dmg.py | 7 +- .../mozbuild/repackaging/installer.py | 9 +- python/mozbuild/mozbuild/repackaging/mar.py | 45 +- python/mozbuild/mozbuild/repackaging/msi.py | 67 +- python/mozbuild/mozbuild/schedules.py | 84 +- python/mozbuild/mozbuild/shellutil.py | 129 +- python/mozbuild/mozbuild/sphinx.py | 165 +- .../mozbuild/test/action/test_buildlist.py | 22 +- .../test/action/test_langpack_manifest.py | 47 +- .../mozbuild/test/action/test_node.py | 8 +- .../action/test_process_install_manifest.py | 42 +- .../mozbuild/mozbuild/test/backend/common.py | 311 +- .../test/backend/data/build/app/moz.build | 34 +- .../test/backend/data/build/moz.build | 40 +- .../test/backend/data/database/moz.build | 8 +- .../test/backend/data/defines/moz.build | 12 +- .../test/backend/data/dist-files/moz.build | 4 +- .../backend/data/exports-generated/moz.build | 16 +- .../test/backend/data/exports/moz.build | 10 +- .../final-target-files-wildcard/moz.build | 2 +- .../backend/data/final_target/both/moz.build | 4 +- .../data/final_target/dist-subdir/moz.build | 2 +- .../data/final_target/final-target/moz.build | 2 +- .../test/backend/data/final_target/moz.build | 2 +- .../data/final_target/xpi-name/moz.build | 2 +- .../data/generated-files-force/moz.build | 12 +- .../backend/data/generated-files/moz.build | 12 +- .../backend/data/generated_includes/moz.build | 2 +- .../test/backend/data/gn-processor/moz.build | 36 +- .../test/backend/data/host-defines/moz.build | 12 +- .../data/host-rust-library-features/moz.build | 9 +- .../backend/data/host-rust-library/moz.build | 9 +- .../install_substitute_config_files/moz.build | 2 +- .../sub/moz.build | 4 +- .../backend/data/ipdl_sources/bar/moz.build | 6 +- .../backend/data/ipdl_sources/foo/moz.build | 6 +- .../test/backend/data/ipdl_sources/moz.build | 4 +- .../test/backend/data/jar-manifests/moz.build | 3 +- .../test/backend/data/linkage/moz.build | 12 +- .../test/backend/data/linkage/prog/moz.build | 8 +- .../backend/data/linkage/prog/qux/moz.build | 4 +- .../backend/data/linkage/real/foo/moz.build | 7 +- .../test/backend/data/linkage/real/moz.build | 8 +- .../backend/data/linkage/shared/baz/moz.build | 4 +- .../backend/data/linkage/shared/moz.build | 10 +- .../linkage/static/bar/bar_helper/moz.build | 4 +- .../backend/data/linkage/static/bar/moz.build | 8 +- .../backend/data/linkage/static/moz.build | 8 +- .../backend/data/local_includes/moz.build | 2 +- .../backend/data/localized-files/moz.build | 6 +- .../localized-generated-files-AB_CD/moz.build | 30 +- .../localized-generated-files-force/moz.build | 20 +- .../data/localized-generated-files/moz.build | 12 +- .../backend/data/localized-pp-files/moz.build | 4 +- .../data/prog-lib-c-only/c-library/moz.build | 4 +- .../data/prog-lib-c-only/c-program/moz.build | 4 +- .../c-simple-programs/moz.build | 2 +- .../prog-lib-c-only/cxx-library/moz.build | 6 +- .../prog-lib-c-only/cxx-program/moz.build | 4 +- .../cxx-simple-programs/moz.build | 2 +- .../backend/data/prog-lib-c-only/moz.build | 20 +- .../prog-lib-c-only/simple-programs/moz.build | 1 - .../data/program-paths/dist-bin/moz.build | 2 +- .../data/program-paths/dist-subdir/moz.build | 4 +- .../data/program-paths/final-target/moz.build | 4 +- .../test/backend/data/program-paths/moz.build | 10 +- .../program-paths/not-installed/moz.build | 2 +- .../test/backend/data/resources/moz.build | 12 +- .../data/rust-library-features/moz.build | 7 +- .../test/backend/data/rust-library/moz.build | 7 +- .../backend/data/rust-programs/code/moz.build | 4 +- .../test/backend/data/rust-programs/moz.build | 2 +- .../test/backend/data/sources/moz.build | 22 +- .../test/backend/data/stub0/dir1/moz.build | 2 - .../test/backend/data/stub0/dir2/moz.build | 1 - .../test/backend/data/stub0/dir3/moz.build | 1 - .../test/backend/data/stub0/moz.build | 6 +- .../data/substitute_config_files/moz.build | 2 +- .../test-manifest-shared-support/moz.build | 4 +- .../test-manifests-backend-sources/moz.build | 2 +- .../moz.build | 4 +- .../test-manifests-package-tests/moz.build | 4 +- .../data/test-manifests-written/moz.build | 6 +- .../test-support-binaries-tracked/moz.build | 2 +- .../src/moz.build | 6 +- .../test/moz.build | 23 +- .../test/backend/data/test_config/moz.build | 2 +- .../backend/data/variable_passthru/moz.build | 10 +- .../backend/data/visual-studio/dir1/moz.build | 10 +- .../test/backend/data/visual-studio/moz.build | 4 +- .../test/backend/data/xpidl/moz.build | 4 +- .../test/backend/test_configenvironment.py | 53 +- .../mozbuild/test/backend/test_database.py | 20 +- .../mozbuild/test/backend/test_fastermake.py | 23 +- .../test/backend/test_gn_processor.py | 219 +- .../backend/test_partialconfigenvironment.py | 130 +- .../test/backend/test_test_manifest.py | 75 +- .../test/backend/test_visualstudio.py | 55 +- .../test/code_analysis/test_mach_commands.py | 7 +- python/mozbuild/mozbuild/test/common.py | 54 +- .../test/compilation/test_warnings.py | 131 +- .../mozbuild/test/configure/common.py | 119 +- .../test/configure/data/decorators.configure | 17 +- .../test/configure/data/extra.configure | 8 +- .../configure/data/imply_option/imm.configure | 23 +- .../data/imply_option/infer.configure | 16 +- .../data/imply_option/infer_ko.configure | 21 +- .../data/imply_option/negative.configure | 24 +- .../data/imply_option/simple.configure | 16 +- .../data/imply_option/values.configure | 16 +- .../test/configure/data/included.configure | 27 +- .../test/configure/data/moz.configure | 124 +- .../test/configure/data/set_config.configure | 30 +- .../test/configure/data/set_define.configure | 30 +- .../test/configure/data/subprocess.configure | 21 +- .../mozbuild/mozbuild/test/configure/lint.py | 36 +- .../test/configure/test_checks_configure.py | 1150 +- .../test/configure/test_compile_checks.py | 438 +- .../mozbuild/test/configure/test_configure.py | 1660 ++- .../mozbuild/test/configure/test_lint.py | 266 +- .../test/configure/test_moz_configure.py | 149 +- .../mozbuild/test/configure/test_options.py | 1043 +- .../configure/test_toolchain_configure.py | 2690 ++-- .../test/configure/test_toolchain_helpers.py | 498 +- .../configure/test_toolkit_moz_configure.py | 147 +- .../mozbuild/test/configure/test_util.py | 483 +- .../test/controller/test_ccachestats.py | 10 +- .../mozbuild/test/controller/test_clobber.py | 66 +- .../data/allow-compiler-warnings/moz.build | 13 +- .../test/frontend/data/asflags/moz.build | 13 +- .../frontend/data/branding-files/moz.build | 9 +- .../frontend/data/compile-defines/moz.build | 12 +- .../compile-flags-field-validation/moz.build | 10 +- .../data/compile-flags-templates/moz.build | 15 +- .../compile-flags-type-validation/moz.build | 10 +- .../frontend/data/compile-flags/moz.build | 16 +- .../frontend/data/compile-includes/moz.build | 10 +- .../data/config-file-substitution/moz.build | 4 +- .../moz.build | 7 +- .../test/frontend/data/defines/moz.build | 12 +- .../data/disable-compiler-warnings/moz.build | 11 +- .../data/disable-stl-wrapping/moz.build | 12 +- .../data/dist-files-missing/moz.build | 4 +- .../test/frontend/data/dist-files/moz.build | 4 +- .../frontend/data/exports-generated/moz.build | 8 +- .../data/exports-missing-generated/moz.build | 4 +- .../frontend/data/exports-missing/moz.build | 6 +- .../test/frontend/data/exports/moz.build | 20 +- .../bug_component/bad-assignment/moz.build | 4 +- .../different-matchers/moz.build | 8 +- .../files-info/bug_component/final/moz.build | 4 +- .../final/subcomponent/moz.build | 4 +- .../data/files-info/bug_component/moz.build | 4 +- .../files-info/bug_component/simple/moz.build | 4 +- .../files-info/bug_component/static/moz.build | 8 +- .../moz.build | 2 +- .../generated-files-absolute-script/moz.build | 6 +- .../data/generated-files-force/moz.build | 10 +- .../generated-files-method-names/moz.build | 10 +- .../data/generated-files-no-inputs/moz.build | 8 +- .../moz.build | 6 +- .../data/generated-files-no-script/moz.build | 6 +- .../frontend/data/generated-files/moz.build | 6 +- .../frontend/data/generated-sources/moz.build | 24 +- .../data/generated_includes/moz.build | 2 +- .../data/host-compile-flags/moz.build | 22 +- .../host-program-paths/final-target/moz.build | 4 +- .../host-program-paths/installed/moz.build | 2 +- .../data/host-program-paths/moz.build | 8 +- .../not-installed/moz.build | 2 +- .../data/host-rust-libraries/moz.build | 9 +- .../host-rust-program-no-cargo-toml/moz.build | 2 +- .../moz.build | 2 +- .../data/host-rust-programs/moz.build | 2 +- .../test/frontend/data/host-sources/moz.build | 18 +- .../data/include-basic/included.build | 2 +- .../frontend/data/include-basic/moz.build | 4 +- .../data/include-file-stack/included-1.build | 2 +- .../data/include-file-stack/moz.build | 2 +- .../frontend/data/include-missing/moz.build | 2 +- .../include-outside-topsrcdir/relative.build | 2 +- .../child/child.build | 2 +- .../child/child2.build | 2 +- .../child/grandchild/grandchild.build | 2 +- .../include-relative-from-child/parent.build | 2 +- .../data/include-topsrcdir-relative/moz.build | 2 +- .../include-topsrcdir-relative/sibling.build | 2 +- .../inheriting-variables/foo/baz/moz.build | 2 +- .../data/inheriting-variables/foo/moz.build | 2 +- .../data/inheriting-variables/moz.build | 4 +- .../frontend/data/ipdl_sources/bar/moz.build | 6 +- .../frontend/data/ipdl_sources/foo/moz.build | 6 +- .../test/frontend/data/ipdl_sources/moz.build | 4 +- .../jar-manifests-multiple-files/moz.build | 3 +- .../frontend/data/jar-manifests/moz.build | 2 +- .../data/library-defines/liba/moz.build | 4 +- .../data/library-defines/libb/moz.build | 8 +- .../data/library-defines/libc/moz.build | 4 +- .../data/library-defines/libd/moz.build | 2 +- .../frontend/data/library-defines/moz.build | 6 +- .../test/frontend/data/link-flags/moz.build | 12 +- .../data/local_includes-filename/moz.build | 2 +- .../local_includes-invalid/objdir/moz.build | 2 +- .../local_includes-invalid/srcdir/moz.build | 2 +- .../frontend/data/local_includes/moz.build | 2 +- .../localized-files-from-generated/moz.build | 4 +- .../data/localized-files-no-en-us/moz.build | 6 +- .../moz.build | 4 +- .../frontend/data/localized-files/moz.build | 6 +- .../moz.build | 4 +- .../localized-generated-files-force/moz.build | 4 +- .../data/localized-generated-files/moz.build | 2 +- .../data/localized-pp-files/moz.build | 4 +- .../data/missing-local-includes/moz.build | 2 +- .../frontend/data/missing-xpidl/moz.build | 4 +- .../data/multiple-rust-libraries/moz.build | 16 +- .../multiple-rust-libraries/rust1/moz.build | 2 +- .../multiple-rust-libraries/rust2/moz.build | 2 +- .../data/program-paths/dist-bin/moz.build | 2 +- .../data/program-paths/dist-subdir/moz.build | 4 +- .../data/program-paths/final-target/moz.build | 4 +- .../frontend/data/program-paths/moz.build | 10 +- .../program-paths/not-installed/moz.build | 2 +- .../test/frontend/data/program/moz.build | 10 +- .../data/reader-error-bad-dir/moz.build | 2 +- .../data/reader-error-error-func/moz.build | 3 +- .../data/reader-error-included-from/moz.build | 2 +- .../reader-error-missing-include/moz.build | 2 +- .../reader-error-outside-topsrcdir/moz.build | 2 +- .../data/reader-error-repeated-dir/moz.build | 4 +- .../reader-error-write-bad-value/moz.build | 2 +- .../moz.build | 4 +- .../data/resolved-flags-error/moz.build | 14 +- .../data/rust-library-dash-folding/moz.build | 7 +- .../rust-library-duplicate-features/moz.build | 7 +- .../data/rust-library-features/moz.build | 7 +- .../rust-library-invalid-crate-type/moz.build | 7 +- .../data/rust-library-name-mismatch/moz.build | 7 +- .../data/rust-library-no-cargo-toml/moz.build | 7 +- .../rust-library-no-lib-section/moz.build | 7 +- .../data/rust-program-no-cargo-toml/moz.build | 2 +- .../rust-program-nonexistent-name/moz.build | 2 +- .../frontend/data/rust-programs/moz.build | 2 +- .../test/frontend/data/schedules/moz.build | 20 +- .../frontend/data/schedules/subd/moz.build | 8 +- .../frontend/data/sources-just-c/moz.build | 16 +- .../test/frontend/data/sources/moz.build | 24 +- .../data/test-install-shared-lib/moz.build | 8 +- .../data/test-linkables-cxx-link/moz.build | 9 +- .../test-linkables-cxx-link/one/moz.build | 6 +- .../test-linkables-cxx-link/three/moz.build | 4 +- .../test-linkables-cxx-link/two/moz.build | 6 +- .../test-manifest-absolute-support/moz.build | 2 +- .../data/test-manifest-dupes/moz.build | 2 +- .../test-manifest-emitted-includes/moz.build | 2 +- .../data/test-manifest-empty/moz.build | 2 +- .../test-manifest-install-includes/moz.build | 2 +- .../data/test-manifest-just-support/moz.build | 2 +- .../test-manifest-keys-extracted/moz.build | 18 +- .../test-manifest-missing-manifest/moz.build | 2 +- .../moz.build | 2 +- .../test-manifest-missing-test-file/moz.build | 2 +- .../moz.build | 2 +- .../test-manifest-shared-missing/moz.build | 4 +- .../test-manifest-shared-support/moz.build | 4 +- .../moz.build | 2 +- .../moz.build | 6 +- .../data/test-symbols-file-objdir/moz.build | 10 +- .../frontend/data/test-symbols-file/moz.build | 6 +- .../data/traversal-all-vars/moz.build | 4 +- .../traversal-outside-topsrcdir/moz.build | 2 +- .../traversal-relative-dirs/foo/moz.build | 2 +- .../data/traversal-relative-dirs/moz.build | 2 +- .../traversal-repeated-dirs/bar/moz.build | 2 +- .../traversal-repeated-dirs/foo/moz.build | 2 +- .../data/traversal-repeated-dirs/moz.build | 2 +- .../data/traversal-simple/foo/moz.build | 2 +- .../frontend/data/traversal-simple/moz.build | 2 +- .../unified-sources-non-unified/moz.build | 20 +- .../frontend/data/unified-sources/moz.build | 20 +- .../test/frontend/data/use-yasm/moz.build | 6 +- .../frontend/data/variable-passthru/moz.build | 10 +- .../frontend/data/visibility-flags/moz.build | 12 +- .../data/wasm-compile-flags/moz.build | 18 +- .../test/frontend/data/wasm-sources/moz.build | 10 +- .../data/xpidl-module-no-sources/moz.build | 2 +- .../mozbuild/test/frontend/test_context.py | 552 +- .../mozbuild/test/frontend/test_emitter.py | 1566 ++- .../mozbuild/test/frontend/test_namespaces.py | 138 +- .../mozbuild/test/frontend/test_reader.py | 471 +- .../mozbuild/test/frontend/test_sandbox.py | 370 +- .../test/test_android_version_code.py | 105 +- .../mozbuild/test/test_artifact_cache.py | 39 +- python/mozbuild/mozbuild/test/test_base.py | 274 +- .../mozbuild/mozbuild/test/test_containers.py | 88 +- .../mozbuild/test/test_dotproperties.py | 141 +- .../mozbuild/mozbuild/test/test_expression.py | 43 +- .../mozbuild/mozbuild/test/test_jarmaker.py | 384 +- .../mozbuild/mozbuild/test/test_licenses.py | 20 +- .../mozbuild/test/test_line_endings.py | 20 +- .../mozbuild/mozbuild/test/test_makeutil.py | 167 +- .../mozbuild/mozbuild/test/test_manifest.py | 54 +- .../mozbuild/mozbuild/test/test_mozconfig.py | 228 +- python/mozbuild/mozbuild/test/test_mozinfo.py | 349 +- .../mozbuild/test/test_preprocessor.py | 1131 +- .../mozbuild/mozbuild/test/test_pythonutil.py | 12 +- python/mozbuild/mozbuild/test/test_util.py | 473 +- .../mozbuild/test/test_util_fileavoidwrite.py | 1 + python/mozbuild/mozbuild/testing.py | 129 +- python/mozbuild/mozbuild/util.py | 415 +- .../mozbuild/mozbuild/vendor/mach_commands.py | 2 +- .../mozbuild/vendor/vendor_manifest.py | 9 +- .../mozbuild/mozbuild/vendor/vendor_python.py | 12 +- .../mozbuild/mozbuild/vendor/vendor_rust.py | 21 +- python/mozbuild/mozbuild/virtualenv.py | 347 +- python/mozbuild/mozpack/archive.py | 26 +- python/mozbuild/mozpack/chrome/flags.py | 181 +- python/mozbuild/mozpack/chrome/manifest.py | 213 +- python/mozbuild/mozpack/copier.py | 191 +- python/mozbuild/mozpack/dmg.py | 188 +- python/mozbuild/mozpack/errors.py | 13 +- python/mozbuild/mozpack/executables.py | 111 +- python/mozbuild/mozpack/files.py | 447 +- python/mozbuild/mozpack/manifests.py | 149 +- python/mozbuild/mozpack/mozjar.py | 483 +- python/mozbuild/mozpack/packager/__init__.py | 212 +- python/mozbuild/mozpack/packager/formats.py | 155 +- python/mozbuild/mozpack/packager/l10n.py | 75 +- python/mozbuild/mozpack/packager/unpack.py | 87 +- python/mozbuild/mozpack/path.py | 89 +- .../mozpack/test/support/minify_js_verify.py | 4 +- python/mozbuild/mozpack/test/test_archive.py | 112 +- .../mozpack/test/test_chrome_flags.py | 215 +- .../mozpack/test/test_chrome_manifest.py | 209 +- python/mozbuild/mozpack/test/test_copier.py | 430 +- python/mozbuild/mozpack/test/test_errors.py | 77 +- python/mozbuild/mozpack/test/test_files.py | 1038 +- .../mozbuild/mozpack/test/test_manifests.py | 335 +- python/mozbuild/mozpack/test/test_mozjar.py | 320 +- python/mozbuild/mozpack/test/test_packager.py | 685 +- .../mozpack/test/test_packager_formats.py | 629 +- .../mozpack/test/test_packager_l10n.py | 200 +- .../mozpack/test/test_packager_unpack.py | 20 +- python/mozbuild/mozpack/test/test_path.py | 161 +- python/mozbuild/setup.py | 28 +- python/mozlint/mozlint/cli.py | 330 +- python/mozlint/mozlint/editor.py | 30 +- python/mozlint/mozlint/errors.py | 6 +- python/mozlint/mozlint/formatters/__init__.py | 12 +- python/mozlint/mozlint/formatters/compact.py | 9 +- python/mozlint/mozlint/formatters/summary.py | 29 +- .../mozlint/mozlint/formatters/treeherder.py | 5 +- python/mozlint/mozlint/formatters/unix.py | 3 +- python/mozlint/mozlint/parser.py | 93 +- python/mozlint/mozlint/pathutils.py | 49 +- python/mozlint/mozlint/result.py | 3 +- python/mozlint/mozlint/roller.py | 116 +- python/mozlint/mozlint/types.py | 60 +- python/mozlint/mozlint/util/implementation.py | 5 +- python/mozlint/mozlint/util/pip.py | 10 +- python/mozlint/mozlint/util/string.py | 4 +- python/mozlint/setup.py | 24 +- python/mozlint/test/conftest.py | 41 +- python/mozlint/test/linters/external.py | 32 +- python/mozlint/test/linters/global_payload.py | 17 +- python/mozlint/test/runcli.py | 6 +- python/mozlint/test/test_cli.py | 55 +- python/mozlint/test/test_editor.py | 54 +- python/mozlint/test/test_formatters.py | 4 +- python/mozlint/test/test_parser.py | 64 +- python/mozlint/test/test_pathutils.py | 200 +- python/mozlint/test/test_result.py | 12 +- python/mozlint/test/test_roller.py | 150 +- python/mozlint/test/test_types.py | 37 +- python/mozrelease/mozrelease/balrog.py | 61 +- .../mozrelease/mozrelease/buglist_creator.py | 162 +- python/mozrelease/mozrelease/chunking.py | 5 +- python/mozrelease/mozrelease/mach_commands.py | 110 +- .../mozrelease/mozrelease/partner_repack.py | 559 +- python/mozrelease/mozrelease/paths.py | 55 +- python/mozrelease/mozrelease/platforms.py | 15 +- python/mozrelease/mozrelease/update_verify.py | 180 +- python/mozrelease/mozrelease/util.py | 4 +- python/mozrelease/mozrelease/versions.py | 77 +- python/mozrelease/setup.py | 24 +- python/mozrelease/test/test_balrog.py | 49 +- .../mozrelease/test/test_buglist_creator.py | 199 +- python/mozrelease/test/test_update_verify.py | 312 +- python/mozrelease/test/test_versions.py | 118 +- python/mozterm/mozterm/terminal.py | 6 +- python/mozterm/mozterm/widgets.py | 6 +- python/mozterm/setup.py | 34 +- python/mozterm/test/test_terminal.py | 8 +- python/mozterm/test/test_widgets.py | 20 +- .../mozversioncontrol/__init__.py | 324 +- .../mozversioncontrol/repoupdate.py | 22 +- python/mozversioncontrol/setup.py | 30 +- python/mozversioncontrol/test/conftest.py | 14 +- python/mozversioncontrol/test/test_commit.py | 40 +- .../test/test_context_manager.py | 6 +- .../test/test_push_to_try.py | 41 +- .../test/test_workdir_outgoing.py | 66 +- .../test/test_working_directory.py | 10 +- remote/mach_commands.py | 320 +- remote/moz.build | 4 +- security/apps/gen_cert_header.py | 15 +- security/apps/moz.build | 47 +- security/certverifier/moz.build | 46 +- security/certverifier/tests/gtest/moz.build | 10 +- security/ct/moz.build | 56 +- security/ct/tests/gtest/createSTHTestData.py | 46 +- security/ct/tests/gtest/moz.build | 74 +- security/generate_certdata.py | 9 +- security/generate_mapfile.py | 27 +- security/manager/locales/moz.build | 2 +- security/manager/moz.build | 6 +- security/manager/pki/moz.build | 20 +- security/manager/pki/resources/moz.build | 2 +- security/manager/ssl/moz.build | 316 +- .../osclientcerts/dynamic-library/moz.build | 24 +- security/manager/ssl/osclientcerts/moz.build | 4 +- security/manager/ssl/tests/gtest/moz.build | 30 +- .../ssl/tests/mochitest/browser/moz.build | 8 +- .../tests/mochitest/mixedcontent/moz.build | 3 +- .../manager/ssl/tests/mochitest/moz.build | 6 +- .../stricttransportsecurity/moz.build | 5 +- security/manager/ssl/tests/moz.build | 12 +- .../ssl/tests/unit/bad_certs/moz.build | 12 +- security/manager/ssl/tests/unit/moz.build | 62 +- .../ssl/tests/unit/ocsp_certs/moz.build | 12 +- .../ssl/tests/unit/pkcs11testmodule/moz.build | 8 +- security/manager/ssl/tests/unit/pycert.py | 379 +- security/manager/ssl/tests/unit/pycms.py | 122 +- security/manager/ssl/tests/unit/pyct.py | 32 +- security/manager/ssl/tests/unit/pykey.py | 922 +- security/manager/ssl/tests/unit/sign_app.py | 258 +- .../unit/test_baseline_requirements/moz.build | 6 +- .../tests/unit/test_certDB_import/moz.build | 6 +- .../ssl/tests/unit/test_cert_eku/moz.build | 6 +- .../unit/test_cert_embedded_null/moz.build | 6 +- .../tests/unit/test_cert_keyUsage/moz.build | 6 +- .../ssl/tests/unit/test_cert_sha1/moz.build | 6 +- .../tests/unit/test_cert_signatures/moz.build | 6 +- .../ssl/tests/unit/test_cert_trust/moz.build | 6 +- .../ssl/tests/unit/test_cert_utf8/moz.build | 6 +- .../tests/unit/test_cert_version/generate.py | 75 +- .../tests/unit/test_cert_version/moz.build | 6 +- .../tests/unit/test_content_signing/moz.build | 6 +- .../tests/unit/test_content_signing/pysign.py | 6 +- .../manager/ssl/tests/unit/test_ct/moz.build | 12 +- .../unit/test_delegated_credentials/moz.build | 12 +- .../ssl/tests/unit/test_ev_certs/moz.build | 12 +- .../moz.build | 6 +- .../unit/test_intermediate_preloads/moz.build | 12 +- .../ssl/tests/unit/test_keysize/moz.build | 6 +- .../ssl/tests/unit/test_keysize_ev/moz.build | 12 +- .../unit/test_missing_intermediate/moz.build | 6 +- .../unit/test_name_constraints/moz.build | 6 +- .../ssl/tests/unit/test_ocsp_url/moz.build | 12 +- .../ssl/tests/unit/test_onecrl/moz.build | 6 +- .../ssl/tests/unit/test_sanctions/moz.build | 6 +- .../ssl/tests/unit/test_signed_apps/moz.build | 35 +- .../tests/unit/test_startcom_wosign/moz.build | 6 +- .../ssl/tests/unit/test_validity/moz.build | 12 +- .../ssl/tests/unit/tlsserver/cmd/moz.build | 29 +- .../ssl/tests/unit/tlsserver/lib/moz.build | 8 +- .../ssl/tests/unit/tlsserver/moz.build | 2 +- .../crtshToIdentifyingStruct.py | 55 +- security/manager/tools/getCTKnownLogs.py | 125 +- security/moz.build | 163 +- security/sandbox/common/moz.build | 34 +- security/sandbox/linux/broker/moz.build | 28 +- security/sandbox/linux/glue/moz.build | 24 +- security/sandbox/linux/gtest/moz.build | 16 +- security/sandbox/linux/interfaces/moz.build | 4 +- security/sandbox/linux/launch/moz.build | 18 +- security/sandbox/linux/moz.build | 188 +- security/sandbox/linux/reporter/moz.build | 18 +- security/sandbox/mac/moz.build | 16 +- security/sandbox/moz.build | 335 +- security/sandbox/test/mac_register_font.py | 58 +- .../win/src/remotesandboxbroker/moz.build | 32 +- .../sandbox/win/src/sandboxbroker/moz.build | 12 +- .../win/src/sandboxpermissions/moz.build | 10 +- .../sandbox/win/src/sandboxtarget/moz.build | 12 +- services/automation/moz.build | 10 +- services/common/moz.build | 46 +- services/common/tests/moz.build | 6 +- services/crypto/component/moz.build | 10 +- services/crypto/moz.build | 18 +- services/fxaccounts/moz.build | 52 +- services/fxaccounts/rust-bridge/moz.build | 8 +- services/interfaces/moz.build | 14 +- services/moz.build | 26 +- services/settings/dumps/blocklists/moz.build | 24 +- services/settings/dumps/main/moz.build | 26 +- services/settings/dumps/moz.build | 8 +- services/settings/dumps/pinning/moz.build | 6 +- .../settings/dumps/security-state/moz.build | 10 +- services/settings/moz.build | 36 +- services/settings/test/moz.build | 4 +- services/settings/test/unit/moz.build | 4 +- .../test_remote_settings_signatures/moz.build | 6 +- services/sync/locales/moz.build | 2 +- services/sync/moz.build | 92 +- .../style/counter_style/update_predefined.py | 8 +- servo/components/style/gecko/regen_atoms.py | 80 +- servo/components/style/properties/build.py | 47 +- servo/components/style/properties/data.py | 390 +- startupcache/moz.build | 22 +- startupcache/test/moz.build | 4 +- storage/build/moz.build | 8 +- storage/moz.build | 122 +- storage/test/gtest/moz.build | 36 +- storage/test/moz.build | 5 +- .../extract_locales_from_l10n_json.py | 4 +- .../extract_locales_from_l10n_json.py | 4 +- .../visual-metrics/run-visual-metrics.py | 47 +- .../docker/visual-metrics/similarity.py | 39 +- taskcluster/mach_commands.py | 646 +- taskcluster/moz.build | 16 +- taskcluster/scripts/misc/fetch-chromium.py | 157 +- taskcluster/scripts/misc/repack_rust.py | 428 +- taskcluster/scripts/misc/verify-updatebot.py | 77 +- taskcluster/taskgraph/__init__.py | 2 +- taskcluster/taskgraph/actions/__init__.py | 10 +- taskcluster/taskgraph/actions/add_new_jobs.py | 49 +- taskcluster/taskgraph/actions/add_talos.py | 47 +- taskcluster/taskgraph/actions/backfill.py | 232 +- taskcluster/taskgraph/actions/cancel.py | 16 +- taskcluster/taskgraph/actions/cancel_all.py | 27 +- .../taskgraph/actions/create_interactive.py | 139 +- .../taskgraph/actions/gecko_profile.py | 94 +- taskcluster/taskgraph/actions/isolate_test.py | 185 +- .../taskgraph/actions/merge_automation.py | 6 +- taskcluster/taskgraph/actions/openh264.py | 17 +- taskcluster/taskgraph/actions/purge_caches.py | 22 +- taskcluster/taskgraph/actions/registry.py | 199 +- .../taskgraph/actions/release_promotion.py | 425 +- taskcluster/taskgraph/actions/retrigger.py | 216 +- .../taskgraph/actions/retrigger_custom.py | 207 +- .../taskgraph/actions/run_missing_tests.py | 18 +- taskcluster/taskgraph/actions/util.py | 216 +- taskcluster/taskgraph/config.py | 225 +- taskcluster/taskgraph/create.py | 30 +- taskcluster/taskgraph/decision.py | 458 +- taskcluster/taskgraph/docker.py | 91 +- taskcluster/taskgraph/files_changed.py | 46 +- taskcluster/taskgraph/filter_tasks.py | 10 +- taskcluster/taskgraph/generator.py | 220 +- taskcluster/taskgraph/graph.py | 16 +- taskcluster/taskgraph/loader/multi_dep.py | 140 +- taskcluster/taskgraph/loader/single_dep.py | 28 +- taskcluster/taskgraph/loader/test.py | 92 +- taskcluster/taskgraph/loader/transform.py | 15 +- taskcluster/taskgraph/morph.py | 25 +- taskcluster/taskgraph/optimize/__init__.py | 332 +- taskcluster/taskgraph/optimize/backstop.py | 2 + taskcluster/taskgraph/optimize/bugbug.py | 59 +- taskcluster/taskgraph/optimize/schema.py | 26 +- taskcluster/taskgraph/optimize/strategies.py | 32 +- taskcluster/taskgraph/parameters.py | 247 +- taskcluster/taskgraph/target_tasks.py | 810 +- taskcluster/taskgraph/task.py | 50 +- taskcluster/taskgraph/taskgraph.py | 8 +- .../taskgraph/test/test_actions_util.py | 99 +- taskcluster/taskgraph/test/test_create.py | 48 +- taskcluster/taskgraph/test/test_decision.py | 135 +- .../taskgraph/test/test_files_changed.py | 46 +- taskcluster/taskgraph/test/test_generator.py | 215 +- taskcluster/taskgraph/test/test_graph.py | 209 +- taskcluster/taskgraph/test/test_morph.py | 64 +- taskcluster/taskgraph/test/test_optimize.py | 248 +- .../test/test_optimize_strategies.py | 519 +- taskcluster/taskgraph/test/test_parameters.py | 129 +- .../taskgraph/test/test_target_tasks.py | 136 +- .../taskgraph/test/test_taskcluster_yml.py | 1 - taskcluster/taskgraph/test/test_taskgraph.py | 161 +- .../taskgraph/test/test_transforms_base.py | 24 +- .../taskgraph/test/test_transforms_job.py | 48 +- .../taskgraph/test/test_try_option_syntax.py | 381 +- .../taskgraph/test/test_util_attributes.py | 93 +- .../taskgraph/test/test_util_backstop.py | 28 +- .../taskgraph/test/test_util_bugbug.py | 6 +- .../taskgraph/test/test_util_chunking.py | 350 +- .../taskgraph/test/test_util_docker.py | 182 +- .../test/test_util_parameterization.py | 154 +- .../taskgraph/test/test_util_python_path.py | 18 +- .../taskgraph/test/test_util_runnable_jobs.py | 77 +- .../taskgraph/test/test_util_schema.py | 164 +- .../taskgraph/test/test_util_taskcluster.py | 9 +- .../taskgraph/test/test_util_templates.py | 71 +- taskcluster/taskgraph/test/test_util_time.py | 37 +- .../taskgraph/test/test_util_treeherder.py | 15 +- .../taskgraph/test/test_util_verify.py | 69 +- taskcluster/taskgraph/test/test_util_yaml.py | 10 +- .../taskgraph/transforms/balrog_submit.py | 134 +- .../taskgraph/transforms/balrog_toplevel.py | 31 +- taskcluster/taskgraph/transforms/base.py | 14 +- taskcluster/taskgraph/transforms/beetmover.py | 144 +- .../transforms/beetmover_checksums.py | 110 +- .../transforms/beetmover_emefree_checksums.py | 88 +- .../transforms/beetmover_geckoview.py | 129 +- .../beetmover_langpack_checksums.py | 94 +- .../transforms/beetmover_push_to_release.py | 87 +- .../transforms/beetmover_repackage.py | 225 +- .../transforms/beetmover_repackage_l10n.py | 24 +- .../transforms/beetmover_repackage_partner.py | 269 +- .../taskgraph/transforms/beetmover_snap.py | 15 +- .../taskgraph/transforms/beetmover_source.py | 12 +- .../transforms/beetmover_source_checksums.py | 105 +- .../taskgraph/transforms/bouncer_check.py | 53 +- .../taskgraph/transforms/bouncer_locations.py | 13 +- .../transforms/bouncer_submission.py | 292 +- .../transforms/bouncer_submission_partners.py | 127 +- taskcluster/taskgraph/transforms/build.py | 146 +- .../taskgraph/transforms/build_attrs.py | 29 +- .../taskgraph/transforms/build_fat_aar.py | 54 +- .../taskgraph/transforms/build_lints.py | 25 +- .../taskgraph/transforms/build_signing.py | 50 +- .../taskgraph/transforms/cached_tasks.py | 49 +- .../taskgraph/transforms/chunk_partners.py | 34 +- .../taskgraph/transforms/code_review.py | 12 +- .../copy_attributes_from_dependent_task.py | 6 +- .../taskgraph/transforms/diffoscope.py | 224 +- .../taskgraph/transforms/docker_image.py | 242 +- taskcluster/taskgraph/transforms/fetch.py | 435 +- .../taskgraph/transforms/final_verify.py | 16 +- .../transforms/geckodriver_signing.py | 126 +- .../taskgraph/transforms/github_sync.py | 15 +- taskcluster/taskgraph/transforms/iris.py | 74 +- .../taskgraph/transforms/job/__init__.py | 369 +- .../taskgraph/transforms/job/common.py | 211 +- .../transforms/job/debian_package.py | 265 +- .../taskgraph/transforms/job/hazard.py | 68 +- taskcluster/taskgraph/transforms/job/mach.py | 76 +- .../taskgraph/transforms/job/mozharness.py | 492 +- .../transforms/job/mozharness_test.py | 525 +- .../taskgraph/transforms/job/python_test.py | 47 +- .../taskgraph/transforms/job/run_task.py | 293 +- .../taskgraph/transforms/job/spidermonkey.py | 136 +- .../taskgraph/transforms/job/toolchain.py | 255 +- taskcluster/taskgraph/transforms/l10n.py | 468 +- .../transforms/mac_notarization_poller.py | 96 +- .../taskgraph/transforms/maybe_release.py | 8 +- .../taskgraph/transforms/merge_automation.py | 10 +- .../taskgraph/transforms/name_sanity.py | 30 +- taskcluster/taskgraph/transforms/openh264.py | 14 +- .../taskgraph/transforms/openh264_signing.py | 89 +- taskcluster/taskgraph/transforms/partials.py | 160 +- .../transforms/partner_attribution.py | 24 +- .../partner_attribution_beetmover.py | 6 +- .../taskgraph/transforms/partner_repack.py | 57 +- .../taskgraph/transforms/partner_signing.py | 43 +- .../transforms/per_platform_dummy.py | 14 +- taskcluster/taskgraph/transforms/perftest.py | 146 +- .../taskgraph/transforms/python_update.py | 4 +- taskcluster/taskgraph/transforms/raptor.py | 363 +- taskcluster/taskgraph/transforms/release.py | 4 +- .../release_beetmover_signed_addons.py | 201 +- .../taskgraph/transforms/release_deps.py | 36 +- .../transforms/release_flatpak_push.py | 71 +- .../transforms/release_flatpak_repackage.py | 25 +- .../transforms/release_generate_checksums.py | 16 +- .../release_generate_checksums_beetmover.py | 96 +- .../release_generate_checksums_signing.py | 83 +- .../transforms/release_mark_as_shipped.py | 22 +- .../transforms/release_notifications.py | 33 +- .../release_sign_and_push_langpacks.py | 137 +- .../taskgraph/transforms/release_snap_push.py | 75 +- .../transforms/release_snap_repackage.py | 20 +- .../taskgraph/transforms/release_started.py | 40 +- .../transforms/release_version_bump.py | 21 +- taskcluster/taskgraph/transforms/repackage.py | 486 +- .../taskgraph/transforms/repackage_l10n.py | 10 +- .../taskgraph/transforms/repackage_partner.py | 301 +- .../taskgraph/transforms/repackage_routes.py | 30 +- .../taskgraph/transforms/repackage_signing.py | 106 +- .../taskgraph/transforms/repo_update.py | 4 +- .../transforms/reverse_chunk_deps.py | 13 +- .../taskgraph/transforms/run_pgo_profile.py | 23 +- .../taskgraph/transforms/scriptworker.py | 6 +- .../transforms/shippable_l10n_signing.py | 72 +- taskcluster/taskgraph/transforms/signing.py | 286 +- .../transforms/source_checksums_signing.py | 83 +- .../taskgraph/transforms/source_test.py | 193 +- .../taskgraph/transforms/spidermonkey.py | 6 +- .../transforms/strip_dependent_task.py | 2 +- taskcluster/taskgraph/transforms/task.py | 2544 ++-- taskcluster/taskgraph/transforms/tests.py | 2135 ++- taskcluster/taskgraph/transforms/try_job.py | 2 +- .../taskgraph/transforms/update_verify.py | 37 +- .../transforms/update_verify_config.py | 54 +- .../transforms/upload_generated_sources.py | 34 +- .../taskgraph/transforms/upload_symbols.py | 66 +- .../transforms/upstream_artifact_task.py | 17 +- .../taskgraph/transforms/visual_metrics.py | 7 +- .../transforms/visual_metrics_dep.py | 28 +- taskcluster/taskgraph/try_option_syntax.py | 588 +- taskcluster/taskgraph/util/attributes.py | 96 +- taskcluster/taskgraph/util/backstop.py | 6 +- taskcluster/taskgraph/util/bugbug.py | 26 +- taskcluster/taskgraph/util/cached_tasks.py | 56 +- taskcluster/taskgraph/util/chunking.py | 115 +- .../taskgraph/util/declarative_artifacts.py | 63 +- taskcluster/taskgraph/util/docker.py | 169 +- taskcluster/taskgraph/util/hash.py | 18 +- taskcluster/taskgraph/util/hg.py | 48 +- taskcluster/taskgraph/util/keyed_by.py | 24 +- .../taskgraph/util/parameterization.py | 50 +- taskcluster/taskgraph/util/partials.py | 162 +- taskcluster/taskgraph/util/partners.py | 293 +- taskcluster/taskgraph/util/perfile.py | 58 +- taskcluster/taskgraph/util/platforms.py | 34 +- taskcluster/taskgraph/util/python_path.py | 21 +- taskcluster/taskgraph/util/schema.py | 39 +- taskcluster/taskgraph/util/scriptworker.py | 627 +- .../taskgraph/util/signed_artifacts.py | 189 +- taskcluster/taskgraph/util/taskcluster.py | 183 +- taskcluster/taskgraph/util/taskgraph.py | 13 +- taskcluster/taskgraph/util/templates.py | 15 +- taskcluster/taskgraph/util/time.py | 37 +- taskcluster/taskgraph/util/treeherder.py | 37 +- taskcluster/taskgraph/util/verify.py | 220 +- taskcluster/taskgraph/util/workertypes.py | 68 +- taskcluster/taskgraph/util/yaml.py | 4 +- testing/awsy/awsy/__init__.py | 14 +- testing/awsy/awsy/awsy_test_case.py | 73 +- testing/awsy/awsy/parse_about_memory.py | 48 +- testing/awsy/awsy/process_perf_data.py | 156 +- testing/awsy/awsy/test_base_memory_usage.py | 46 +- testing/awsy/awsy/test_memory_usage.py | 88 +- testing/awsy/awsy/webservers.py | 38 +- testing/awsy/mach_commands.py | 343 +- testing/awsy/setup.py | 25 +- testing/condprofile/mach_commands.py | 13 +- testing/condprofile/moz.build | 2 +- testing/crashtest/moz.build | 2 +- .../harness/firefox_ui_harness/__init__.py | 2 +- .../firefox_ui_harness/arguments/base.py | 3 +- .../firefox_ui_harness/cli_functional.py | 11 +- .../firefox_ui_harness/runners/base.py | 21 +- testing/firefox-ui/harness/setup.py | 44 +- testing/firefox-ui/mach_commands.py | 78 +- .../safebrowsing/test_initial_download.py | 99 +- .../safebrowsing/test_notification.py | 67 +- .../safebrowsing/test_warning_pages.py | 59 +- .../security/test_ssl_status_after_restart.py | 31 +- .../sessionstore/session_store_test_case.py | 254 +- ..._restore_windows_after_restart_and_quit.py | 87 +- ..._restore_windows_after_windows_shutdown.py | 17 +- testing/geckodriver/mach_commands.py | 115 +- testing/geckodriver/moz.build | 1 - testing/gtest/bench.py | 19 +- testing/gtest/benchmark/moz.build | 6 +- testing/gtest/mach_test_package_commands.py | 89 +- testing/gtest/moz.build | 112 +- testing/gtest/mozilla/moz.build | 14 +- testing/gtest/remotegtests.py | 274 +- testing/gtest/rungtests.py | 131 +- testing/jsshell/benchmark.py | 176 +- testing/mach_commands.py | 857 +- testing/marionette/client/docs/conf.py | 57 +- .../client/marionette_driver/__init__.py | 2 +- .../client/marionette_driver/addons.py | 7 +- .../marionette_driver/date_time_value.py | 8 +- .../client/marionette_driver/decorators.py | 7 +- .../client/marionette_driver/errors.py | 21 +- .../client/marionette_driver/geckoinstance.py | 163 +- .../client/marionette_driver/gestures.py | 66 +- .../client/marionette_driver/keys.py | 126 +- .../marionette_driver/legacy_actions.py | 114 +- .../client/marionette_driver/localization.py | 8 +- .../client/marionette_driver/marionette.py | 411 +- .../client/marionette_driver/transport.py | 26 +- .../client/marionette_driver/wait.py | 20 +- testing/marionette/client/setup.py | 66 +- .../harness/marionette_harness/__init__.py | 2 +- .../marionette_test/__init__.py | 2 +- .../marionette_test/decorators.py | 43 +- .../marionette_test/testcases.py | 152 +- .../marionette_harness/runner/__init__.py | 4 +- .../harness/marionette_harness/runner/base.py | 912 +- .../marionette_harness/runner/httpd.py | 97 +- .../runner/mixins/__init__.py | 4 +- .../runner/mixins/window_manager.py | 61 +- .../marionette_harness/runner/serve.py | 48 +- .../harness/marionette_harness/runtests.py | 56 +- .../tests/harness_unit/conftest.py | 113 +- .../tests/harness_unit/test_httpd.py | 2 +- .../harness_unit/test_marionette_arguments.py | 53 +- .../harness_unit/test_marionette_harness.py | 33 +- .../harness_unit/test_marionette_runner.py | 315 +- .../test_marionette_test_result.py | 19 +- .../tests/harness_unit/test_serve.py | 2 +- .../tests/unit/test_accessibility.py | 147 +- .../tests/unit/test_addons.py | 23 +- .../tests/unit/test_capabilities.py | 90 +- .../tests/unit/test_checkbox_chrome.py | 5 +- .../tests/unit/test_chrome.py | 9 +- .../tests/unit/test_chrome_action.py | 29 +- .../tests/unit/test_chrome_element_css.py | 10 +- .../tests/unit/test_cli_arguments.py | 7 +- .../tests/unit/test_click.py | 205 +- .../tests/unit/test_click_chrome.py | 5 +- .../tests/unit/test_click_scrolling.py | 81 +- .../tests/unit/test_context.py | 11 +- .../tests/unit/test_cookies.py | 62 +- .../tests/unit/test_crash.py | 58 +- .../tests/unit/test_data_driven.py | 32 +- .../tests/unit/test_element_rect.py | 9 +- .../tests/unit/test_element_rect_chrome.py | 11 +- .../tests/unit/test_element_retrieval.py | 175 +- .../tests/unit/test_element_state.py | 57 +- .../tests/unit/test_element_state_chrome.py | 13 +- .../tests/unit/test_errors.py | 31 +- .../tests/unit/test_execute_async_script.py | 146 +- .../tests/unit/test_execute_isolate.py | 24 +- .../tests/unit/test_execute_sandboxes.py | 43 +- .../tests/unit/test_execute_script.py | 278 +- .../tests/unit/test_expected.py | 9 +- .../tests/unit/test_file_upload.py | 33 +- .../tests/unit/test_findelement_chrome.py | 53 +- .../tests/unit/test_geckoinstance.py | 18 +- .../tests/unit/test_get_current_url_chrome.py | 1 - .../tests/unit/test_key_actions.py | 31 +- .../tests/unit/test_localization.py | 67 +- .../tests/unit/test_marionette.py | 34 +- .../tests/unit/test_modal_dialogs.py | 17 +- .../tests/unit/test_mouse_action.py | 79 +- .../tests/unit/test_navigation.py | 214 +- .../tests/unit/test_pagesource.py | 27 +- .../tests/unit/test_pagesource_chrome.py | 5 +- .../tests/unit/test_position.py | 5 +- .../tests/unit/test_prefs.py | 93 +- .../tests/unit/test_prefs_enforce.py | 13 +- .../tests/unit/test_profile_management.py | 20 +- .../tests/unit/test_proxy.py | 99 +- .../tests/unit/test_quit_restart.py | 163 +- .../tests/unit/test_reftest.py | 76 +- .../tests/unit/test_rendered_element.py | 9 +- .../tests/unit/test_report.py | 3 +- .../tests/unit/test_run_js_test.py | 4 +- .../tests/unit/test_screen_orientation.py | 14 +- .../tests/unit/test_screenshot.py | 97 +- .../tests/unit/test_select.py | 78 +- .../tests/unit/test_session.py | 1 - .../tests/unit/test_skip_setup.py | 14 +- .../tests/unit/test_switch_frame_chrome.py | 25 +- .../tests/unit/test_switch_window_chrome.py | 35 +- .../tests/unit/test_switch_window_content.py | 13 +- .../tests/unit/test_text.py | 9 +- .../tests/unit/test_text_chrome.py | 10 +- .../tests/unit/test_timeouts.py | 26 +- .../tests/unit/test_title_chrome.py | 7 +- .../tests/unit/test_transport.py | 5 +- .../tests/unit/test_typing.py | 43 +- .../unit/test_unhandled_prompt_behavior.py | 48 +- .../tests/unit/test_visibility.py | 80 +- .../tests/unit/test_wait.py | 34 +- .../tests/unit/test_window_close_chrome.py | 1 - .../tests/unit/test_window_close_content.py | 15 +- .../tests/unit/test_window_handles_chrome.py | 72 +- .../tests/unit/test_window_handles_content.py | 7 +- .../tests/unit/test_window_management.py | 23 +- .../tests/unit/test_window_maximize.py | 14 +- .../tests/unit/test_window_rect.py | 194 +- .../tests/unit/test_window_status_chrome.py | 1 - .../tests/unit/test_window_status_content.py | 1 - .../tests/unit/test_window_type_chrome.py | 4 +- testing/marionette/harness/setup.py | 70 +- testing/marionette/mach_commands.py | 64 +- .../marionette/mach_test_package_commands.py | 38 +- testing/mochitest/BrowserTestUtils/moz.build | 16 +- testing/mochitest/bisection.py | 193 +- testing/mochitest/leaks.py | 225 +- testing/mochitest/mach_commands.py | 358 +- .../mochitest/mach_test_package_commands.py | 116 +- testing/mochitest/manifests/moz.build | 2 +- testing/mochitest/moz.build | 284 +- testing/mochitest/pywebsocket_wrapper.py | 6 +- testing/mochitest/runjunit.py | 525 +- testing/mochitest/runtests.py | 1771 +-- testing/mochitest/runtestsremote.py | 147 +- testing/mochitest/ssltunnel/moz.build | 12 +- testing/mochitest/tests/SimpleTest/moz.build | 36 +- testing/mochitest/tests/moz.build | 10 +- testing/mochitest/tests/python/conftest.py | 65 +- .../python/test_basic_mochitest_plain.py | 64 +- .../tests/python/test_build_profile.py | 16 +- .../tests/python/test_get_active_tests.py | 74 +- .../tests/python/test_message_logger.py | 114 +- testing/modules/moz.build | 32 +- testing/moz.build | 9 +- .../docs/_static/structured_example.py | 9 +- testing/mozbase/docs/conf.py | 72 +- .../manifestparser/manifestparser/cli.py | 104 +- .../manifestparser/expression.py | 76 +- .../manifestparser/manifestparser/filters.py | 118 +- .../manifestparser/manifestparser/ini.py | 51 +- .../manifestparser/manifestparser.py | 228 +- .../manifestparser/manifestparser/util.py | 16 +- testing/mozbase/manifestparser/setup.py | 46 +- .../manifestparser/tests/test_chunking.py | 125 +- .../tests/test_convert_directory.py | 82 +- .../tests/test_convert_symlinks.py | 77 +- .../tests/test_default_overrides.py | 88 +- .../tests/test_expressionparser.py | 20 +- .../manifestparser/tests/test_filters.py | 104 +- .../tests/test_manifestparser.py | 335 +- .../manifestparser/tests/test_read_ini.py | 51 +- .../manifestparser/tests/test_testmanifest.py | 75 +- testing/mozbase/moz.build | 100 +- testing/mozbase/mozcrash/mozcrash/mozcrash.py | 350 +- testing/mozbase/mozcrash/setup.py | 46 +- testing/mozbase/mozcrash/tests/conftest.py | 69 +- testing/mozbase/mozcrash/tests/test_basic.py | 10 +- .../mozcrash/tests/test_java_exception.py | 16 +- .../mozbase/mozcrash/tests/test_save_path.py | 8 +- .../mozbase/mozcrash/tests/test_stackwalk.py | 18 +- .../mozcrash/tests/test_symbols_path.py | 20 +- testing/mozbase/mozdebug/mozdebug/mozdebug.py | 181 +- testing/mozbase/mozdebug/setup.py | 43 +- testing/mozbase/mozdebug/tests/test.py | 24 +- .../mozbase/mozdevice/mozdevice/__init__.py | 13 +- testing/mozbase/mozdevice/mozdevice/adb.py | 1421 +- .../mozdevice/mozdevice/adb_android.py | 1 + testing/mozbase/mozdevice/setup.py | 46 +- testing/mozbase/mozdevice/tests/conftest.py | 47 +- testing/mozbase/mozdevice/tests/test_chown.py | 28 +- .../tests/test_escape_command_line.py | 12 +- .../mozdevice/tests/test_is_app_installed.py | 15 +- .../mozdevice/tests/test_socket_connection.py | 93 +- testing/mozbase/mozfile/mozfile/mozfile.py | 35 +- testing/mozbase/mozfile/tests/stubs.py | 33 +- testing/mozbase/mozfile/tests/test_extract.py | 30 +- .../mozbase/mozfile/tests/test_move_remove.py | 29 +- testing/mozbase/mozfile/tests/test_tempdir.py | 3 +- .../mozbase/mozfile/tests/test_tempfile.py | 16 +- testing/mozbase/mozfile/tests/test_tree.py | 12 +- testing/mozbase/mozfile/tests/test_url.py | 10 +- testing/mozbase/mozfile/tests/test_which.py | 16 +- .../mozgeckoprofiler/symFileManager.py | 4 +- .../mozgeckoprofiler/symbolication.py | 1 + .../mozgeckoprofiler/symbolicationRequest.py | 1 + .../mozgeckoprofiler/viewgeckoprofile.py | 4 +- testing/mozbase/mozhttpd/mozhttpd/__init__.py | 2 +- testing/mozbase/mozhttpd/mozhttpd/handlers.py | 8 +- testing/mozbase/mozhttpd/mozhttpd/mozhttpd.py | 125 +- testing/mozbase/mozhttpd/setup.py | 43 +- testing/mozbase/mozhttpd/tests/filelisting.py | 8 +- testing/mozbase/mozinfo/mozinfo/__init__.py | 1 - testing/mozbase/mozinfo/mozinfo/mozinfo.py | 179 +- .../mozbase/mozinfo/mozinfo/string_version.py | 2 +- testing/mozbase/mozinfo/tests/test.py | 63 +- .../mozbase/mozinstall/mozinstall/__init__.py | 2 +- .../mozinstall/mozinstall/mozinstall.py | 97 +- testing/mozbase/mozinstall/setup.py | 71 +- testing/mozbase/mozinstall/tests/conftest.py | 4 +- .../mozbase/mozinstall/tests/test_binary.py | 36 +- .../mozbase/mozinstall/tests/test_install.py | 55 +- .../mozinstall/tests/test_is_installer.py | 19 +- .../mozinstall/tests/test_uninstall.py | 18 +- testing/mozbase/mozleak/mozleak/__init__.py | 2 +- testing/mozbase/mozleak/mozleak/leaklog.py | 149 +- testing/mozbase/mozleak/mozleak/lsan.py | 83 +- testing/mozbase/mozleak/setup.py | 22 +- testing/mozbase/mozleak/tests/test_lsan.py | 30 +- testing/mozbase/mozlog/mozlog/__init__.py | 16 +- testing/mozbase/mozlog/mozlog/capture.py | 4 +- testing/mozbase/mozlog/mozlog/commandline.py | 161 +- .../mozlog/mozlog/formatters/__init__.py | 13 +- .../mozlog/mozlog/formatters/errorsummary.py | 59 +- .../mozlog/mozlog/formatters/grouping.py | 213 +- .../mozlog/mozlog/formatters/html/__init__.py | 3 +- .../mozlog/mozlog/formatters/html/html.py | 280 +- .../mozlog/mozlog/formatters/html/xmlgen.py | 119 +- .../mozlog/mozlog/formatters/machformatter.py | 333 +- .../mozlog/mozlog/formatters/process.py | 11 +- .../mozlog/mozlog/formatters/tbplformatter.py | 179 +- .../mozlog/mozlog/formatters/unittest.py | 37 +- .../mozbase/mozlog/mozlog/formatters/xunit.py | 37 +- .../mozlog/mozlog/handlers/__init__.py | 11 +- .../mozbase/mozlog/mozlog/handlers/base.py | 14 +- .../mozlog/mozlog/handlers/bufferhandler.py | 27 +- .../mozlog/mozlog/handlers/statushandler.py | 32 +- .../mozlog/mozlog/handlers/summaryhandler.py | 134 +- .../mozlog/mozlog/handlers/valgrindhandler.py | 59 +- testing/mozbase/mozlog/mozlog/logtypes.py | 84 +- .../mozlog/mozlog/pytest_mozlog/plugin.py | 74 +- .../mozbase/mozlog/mozlog/scripts/__init__.py | 21 +- .../mozbase/mozlog/mozlog/scripts/format.py | 30 +- .../mozbase/mozlog/mozlog/scripts/logmerge.py | 16 +- .../mozbase/mozlog/mozlog/scripts/unstable.py | 54 +- testing/mozbase/mozlog/mozlog/stdadapter.py | 9 +- .../mozbase/mozlog/mozlog/structuredlog.py | 296 +- .../mozlog/mozlog/unstructured/logger.py | 66 +- .../mozlog/unstructured/loggingmixin.py | 5 +- .../mozlog/mozlog/unstructured/loglistener.py | 9 +- testing/mozbase/mozlog/setup.py | 65 +- .../mozbase/mozlog/tests/test_formatters.py | 437 +- testing/mozbase/mozlog/tests/test_logger.py | 255 +- testing/mozbase/mozlog/tests/test_logtypes.py | 72 +- .../mozbase/mozlog/tests/test_structured.py | 840 +- .../mozbase/moznetwork/moznetwork/__init__.py | 2 +- .../moznetwork/moznetwork/moznetwork.py | 109 +- testing/mozbase/moznetwork/setup.py | 28 +- .../moznetwork/tests/test_moznetwork.py | 38 +- testing/mozbase/mozpower/mozpower/__init__.py | 18 +- .../mozpower/mozpower/intel_power_gadget.py | 292 +- .../mozpower/mozpower/macintelpower.py | 23 +- testing/mozbase/mozpower/mozpower/mozpower.py | 124 +- .../mozpower/mozpower/mozpowerutils.py | 6 +- .../mozbase/mozpower/mozpower/powerbase.py | 15 +- testing/mozbase/mozpower/setup.py | 46 +- testing/mozbase/mozpower/tests/conftest.py | 57 +- .../mozpower/tests/test_intelpowergadget.py | 117 +- .../mozbase/mozpower/tests/test_mozpower.py | 218 +- .../mozbase/mozpower/tests/test_powerbase.py | 23 +- .../mozprocess/mozprocess/processhandler.py | 578 +- testing/mozbase/mozprocess/mozprocess/qijo.py | 109 +- .../mozprocess/mozprocess/winprocess.py | 380 +- testing/mozbase/mozprocess/setup.py | 52 +- .../mozbase/mozprocess/tests/proclaunch.py | 134 +- testing/mozbase/mozprocess/tests/proctest.py | 33 +- .../mozprocess/tests/scripts/infinite_loop.py | 2 +- .../mozprocess/tests/scripts/procnonewline.py | 1 + .../mozbase/mozprocess/tests/test_detached.py | 20 +- testing/mozbase/mozprocess/tests/test_kill.py | 66 +- testing/mozbase/mozprocess/tests/test_misc.py | 37 +- .../mozbase/mozprocess/tests/test_output.py | 29 +- .../mozbase/mozprocess/tests/test_params.py | 27 +- testing/mozbase/mozprocess/tests/test_pid.py | 14 +- testing/mozbase/mozprocess/tests/test_poll.py | 90 +- .../mozprocess/tests/test_process_reader.py | 51 +- testing/mozbase/mozprocess/tests/test_wait.py | 88 +- .../mozbase/mozprofile/mozprofile/addons.py | 90 +- testing/mozbase/mozprofile/mozprofile/cli.py | 101 +- testing/mozbase/mozprofile/mozprofile/diff.py | 34 +- .../mozprofile/mozprofile/permissions.py | 79 +- .../mozbase/mozprofile/mozprofile/prefs.py | 38 +- .../mozbase/mozprofile/mozprofile/profile.py | 229 +- testing/mozbase/mozprofile/mozprofile/view.py | 13 +- testing/mozbase/mozprofile/setup.py | 65 +- .../mozbase/mozprofile/tests/addon_stubs.py | 35 +- .../mozbase/mozprofile/tests/test_addonid.py | 10 +- .../mozbase/mozprofile/tests/test_addons.py | 178 +- .../mozprofile/tests/test_bug758250.py | 10 +- .../mozprofile/tests/test_chrome_profile.py | 24 +- .../mozprofile/tests/test_clone_cleanup.py | 11 +- .../mozbase/mozprofile/tests/test_nonce.py | 18 +- .../mozprofile/tests/test_permissions.py | 84 +- .../mozprofile/tests/test_preferences.py | 141 +- .../mozbase/mozprofile/tests/test_profile.py | 44 +- .../mozprofile/tests/test_profile_view.py | 31 +- .../mozprofile/tests/test_server_locations.py | 45 +- testing/mozbase/mozproxy/mozproxy/__init__.py | 3 +- .../mozproxy/backends/mitm/android.py | 9 +- .../mozproxy/backends/mitm/desktop.py | 8 +- .../mozproxy/mozproxy/backends/mitm/mitm.py | 44 +- .../mitm/scripts/alternate-server-replay.py | 72 +- .../mitm/scripts/inject-deterministic.py | 66 +- testing/mozbase/mozproxy/mozproxy/driver.py | 93 +- .../mozbase/mozproxy/mozproxy/recordings.py | 46 +- testing/mozbase/mozproxy/mozproxy/utils.py | 18 +- testing/mozbase/mozproxy/setup.py | 6 +- .../mozproxy/tests/test_command_line.py | 30 +- testing/mozbase/mozproxy/tests/test_proxy.py | 13 +- .../mozrunner/mozrunner/application.py | 43 +- .../mozrunner/mozrunner/base/browser.py | 52 +- .../mozrunner/mozrunner/base/device.py | 84 +- .../mozrunner/mozrunner/base/runner.py | 58 +- testing/mozbase/mozrunner/mozrunner/cli.py | 94 +- .../mozrunner/mozrunner/devices/__init__.py | 10 +- .../mozrunner/devices/android_device.py | 554 +- .../mozrunner/mozrunner/devices/base.py | 62 +- .../mozrunner/mozrunner/devices/emulator.py | 97 +- .../mozrunner/devices/emulator_battery.py | 31 +- .../mozrunner/devices/emulator_geo.py | 3 +- .../mozrunner/devices/emulator_screen.py | 48 +- .../mozbase/mozrunner/mozrunner/runners.py | 58 +- testing/mozbase/mozrunner/mozrunner/utils.py | 140 +- testing/mozbase/mozrunner/setup.py | 71 +- testing/mozbase/mozrunner/tests/conftest.py | 29 +- testing/mozbase/mozrunner/tests/test_crash.py | 12 +- .../mozrunner/tests/test_interactive.py | 2 +- testing/mozbase/mozrunner/tests/test_start.py | 6 +- .../mozbase/mozrunner/tests/test_states.py | 2 +- testing/mozbase/mozrunner/tests/test_stop.py | 2 +- .../mozbase/mozrunner/tests/test_threads.py | 2 +- testing/mozbase/mozrunner/tests/test_wait.py | 2 +- .../mozscreenshot/mozscreenshot/__init__.py | 38 +- testing/mozbase/mozscreenshot/setup.py | 24 +- .../mozsystemmonitor/resourcemonitor.py | 168 +- testing/mozbase/mozsystemmonitor/setup.py | 30 +- .../tests/test_resource_monitor.py | 55 +- testing/mozbase/moztest/moztest/__init__.py | 3 +- .../moztest/moztest/adapters/__init__.py | 2 +- .../mozbase/moztest/moztest/adapters/unit.py | 95 +- testing/mozbase/moztest/moztest/resolve.py | 759 +- testing/mozbase/moztest/moztest/results.py | 234 +- .../moztest/moztest/selftest/fixtures.py | 27 +- .../moztest/moztest/selftest/output.py | 10 +- testing/mozbase/moztest/setup.py | 47 +- .../tests/data/srcdir/carrot/moz.build | 2 +- .../tests/data/srcdir/dragonfruit/moz.build | 2 +- .../moztest/tests/data/srcdir/fig/moz.build | 4 +- .../moztest/tests/data/srcdir/moz.build | 4 +- testing/mozbase/moztest/tests/test.py | 39 +- testing/mozbase/moztest/tests/test_resolve.py | 512 +- .../mozbase/mozversion/mozversion/errors.py | 3 +- .../mozversion/mozversion/mozversion.py | 74 +- testing/mozbase/mozversion/setup.py | 41 +- testing/mozbase/mozversion/tests/test_apk.py | 32 +- .../mozbase/mozversion/tests/test_binary.py | 84 +- testing/mozbase/setup_development.py | 112 +- testing/mozbase/versioninfo.py | 84 +- .../configs/android/android_common.py | 56 +- .../mozharness/configs/android/android_hw.py | 12 +- .../mozharness/configs/android/android_pgo.py | 14 +- .../configs/android/androidarm_4_3.py | 2 +- .../configs/android/androidx86_7_0.py | 2 +- testing/mozharness/configs/android/wrench.py | 2 +- .../mozharness/configs/awsy/linux_config.py | 7 +- .../mozharness/configs/awsy/macosx_config.py | 7 +- .../awsy/taskcluster_windows_config.py | 20 +- .../mozharness/configs/balrog/production.py | 25 +- testing/mozharness/configs/balrog/staging.py | 18 +- .../configs/builds/build_pool_specifics.py | 4 +- .../builds/releng_base_android_64_builds.py | 74 +- .../configs/builds/releng_base_firefox.py | 2 +- .../builds/releng_base_linux_32_builds.py | 70 +- .../builds/releng_base_linux_64_builds.py | 70 +- .../builds/releng_base_mac_64_cross_builds.py | 67 +- .../releng_base_windows_32_mingw_builds.py | 60 +- .../releng_base_windows_64_mingw_builds.py | 62 +- .../releng_sub_android_configs/64_aarch64.py | 4 +- .../64_aarch64_beta.py | 4 +- .../64_aarch64_beta_debug.py | 6 +- .../64_aarch64_debug.py | 6 +- .../releng_sub_android_configs/64_api_16.py | 4 +- .../64_api_16_beta.py | 4 +- .../64_api_16_beta_debug.py | 6 +- .../64_api_16_debug.py | 6 +- .../64_api_16_debug_ccov.py | 13 +- .../64_api_16_debug_searchfox.py | 10 +- .../64_api_16_gradle_dependencies.py | 15 +- .../64_api_16_partner_sample1.py | 8 +- .../64_api_16_profile_generate.py | 4 +- .../64_geckoview_docs.py | 29 +- .../releng_sub_android_configs/64_x86.py | 4 +- .../releng_sub_android_configs/64_x86_64.py | 4 +- .../64_x86_64_beta.py | 4 +- .../64_x86_64_beta_debug.py | 6 +- .../64_x86_64_debug.py | 6 +- .../64_x86_64_fuzzing_asan.py | 4 +- .../releng_sub_android_configs/64_x86_beta.py | 4 +- .../64_x86_beta_debug.py | 6 +- .../64_x86_debug.py | 6 +- .../64_x86_fuzzing_debug.py | 6 +- .../releng_sub_linux_configs/32_debug.py | 28 +- .../releng_sub_linux_configs/32_rusttests.py | 32 +- .../32_rusttests_debug.py | 32 +- .../64_add-on-devel.py | 24 +- .../releng_sub_linux_configs/64_asan.py | 34 +- .../64_asan_and_debug.py | 36 +- .../64_asan_reporter_tc.py | 30 +- .../releng_sub_linux_configs/64_asan_tc.py | 26 +- .../64_asan_tc_and_debug.py | 28 +- .../64_code_coverage_debug.py | 26 +- .../64_code_coverage_opt.py | 26 +- .../releng_sub_linux_configs/64_debug.py | 28 +- .../64_fuzzing_asan_tc.py | 30 +- .../64_fuzzing_debug.py | 30 +- .../64_fuzzing_tsan_tc.py | 26 +- .../64_noopt_debug.py | 28 +- .../releng_sub_linux_configs/64_rusttests.py | 30 +- .../64_rusttests_debug.py | 32 +- .../64_searchfox_and_debug.py | 44 +- .../releng_sub_linux_configs/64_source.py | 16 +- .../64_stat_and_debug.py | 42 +- .../releng_sub_linux_configs/64_tsan_tc.py | 26 +- .../releng_sub_linux_configs/64_valgrind.py | 36 +- .../releng_sub_mac_configs/64_add-on-devel.py | 24 +- .../64_code_coverage_debug.py | 26 +- .../64_code_coverage_opt.py | 24 +- .../releng_sub_mac_configs/64_cross_debug.py | 28 +- .../64_cross_debug_searchfox.py | 38 +- .../64_cross_fuzzing_asan.py | 32 +- .../64_cross_fuzzing_debug.py | 30 +- .../64_cross_noopt_debug.py | 28 +- .../builds/releng_sub_mac_configs/64_debug.py | 36 +- .../64_stat_and_debug.py | 38 +- .../32_add-on-devel.py | 32 +- .../releng_sub_windows_configs/32_debug.py | 34 +- .../32_mingwclang.py | 6 +- .../32_stat_and_debug.py | 36 +- .../64_add-on-devel.py | 30 +- .../releng_sub_windows_configs/64_debug.py | 32 +- .../64_mingwclang.py | 6 +- .../configs/builds/taskcluster_base_win32.py | 4 +- .../configs/builds/taskcluster_base_win64.py | 4 +- .../builds/taskcluster_base_windows.py | 62 +- .../builds/taskcluster_sub_win32/debug.py | 8 +- .../taskcluster_sub_win32/noopt_debug.py | 8 +- .../taskcluster_sub_win64/asan_debug.py | 8 +- .../asan_reporter_opt.py | 4 +- .../builds/taskcluster_sub_win64/ccov_opt.py | 6 +- .../builds/taskcluster_sub_win64/debug.py | 8 +- .../taskcluster_sub_win64/noopt_debug.py | 8 +- .../builds/taskcluster_sub_win64/plain_opt.py | 10 +- .../taskcluster_sub_win64/rusttests_opt.py | 14 +- .../taskcluster_sub_win64/searchfox_debug.py | 12 +- .../mozharness/configs/developer_config.py | 10 +- .../configs/firefox_ui_tests/qa_jenkins.py | 5 +- .../firefox_ui_tests/releng_release.py | 11 +- .../configs/firefox_ui_tests/taskcluster.py | 1 - .../firefox_ui_tests/taskcluster_windows.py | 7 +- .../mozharness/configs/l10n_bumper/jamun.py | 97 +- .../configs/l10n_bumper/mozilla-beta.py | 91 +- .../configs/l10n_bumper/mozilla-central.py | 92 +- .../configs/l10n_bumper/mozilla-esr68.py | 34 +- .../marionette/mac_taskcluster_config.py | 10 +- .../configs/marionette/prod_config.py | 26 +- .../configs/marionette/test_config.py | 15 +- .../configs/marionette/windows_config.py | 22 +- .../marionette/windows_taskcluster_config.py | 140 +- .../configs/merge_day/beta_to_release.py | 10 +- .../configs/merge_day/bump_central.py | 11 +- .../mozharness/configs/merge_day/bump_esr.py | 10 +- .../configs/merge_day/central_to_beta.py | 46 +- .../configs/merge_day/early_to_late_beta.py | 6 +- .../configs/merge_day/release_to_esr.py | 19 +- .../merge_day/staging_beta_migration.py | 5 +- .../mozharness/configs/openh264/linux32.py | 33 +- .../mozharness/configs/openh264/linux64.py | 33 +- .../mozharness/configs/openh264/macosx64.py | 55 +- testing/mozharness/configs/openh264/win32.py | 59 +- .../configs/openh264/win64-aarch64.py | 63 +- testing/mozharness/configs/openh264/win64.py | 59 +- .../configs/raptor/android_hw_config.py | 6 +- .../raptor/linux64_config_taskcluster.py | 6 +- .../mozharness/configs/raptor/linux_config.py | 4 +- .../mozharness/configs/raptor/mac_config.py | 4 +- .../configs/raptor/windows_config.py | 33 +- .../configs/raptor/windows_vm_config.py | 28 +- .../releases/dev_postrelease_fennec_beta.py | 2 +- .../dev_postrelease_fennec_release.py | 2 +- .../releases/dev_postrelease_firefox_beta.py | 2 +- .../dev_postrelease_firefox_release.py | 2 +- .../dev_postrelease_firefox_release_birch.py | 2 +- .../releases/dev_updates_firefox_beta.py | 1 - .../dev_updates_firefox_devedition.py | 1 - .../releases/dev_updates_firefox_release.py | 4 +- .../dev_updates_firefox_release_birch.py | 4 +- .../configs/releases/updates_firefox_beta.py | 1 - .../releases/updates_firefox_devedition.py | 1 - .../releases/updates_firefox_release.py | 4 +- .../mozharness/configs/remove_executables.py | 2 +- testing/mozharness/configs/repackage/base.py | 2 +- .../configs/repackage/linux32_signed.py | 6 +- .../configs/repackage/linux64_signed.py | 6 +- .../configs/repackage/osx_partner.py | 4 +- .../configs/repackage/osx_signed.py | 4 +- .../configs/repackage/win32_partner.py | 10 +- .../configs/repackage/win32_sfx_stub.py | 2 +- .../configs/repackage/win32_signed.py | 10 +- .../repackage/win64-aarch64_sfx_stub.py | 2 +- .../configs/repackage/win64_partner.py | 10 +- .../configs/repackage/win64_signed.py | 10 +- testing/mozharness/configs/servo/mac.py | 2 +- .../configs/single_locale/linux32.py | 1 - .../configs/single_locale/linux64.py | 1 - .../configs/single_locale/macosx64.py | 2 - .../single_locale/tc_android-api-16.py | 33 +- .../configs/single_locale/tc_common.py | 2 +- .../configs/single_locale/tc_linux32.py | 2 +- .../configs/single_locale/tc_linux_common.py | 2 +- .../configs/single_locale/tc_macosx64.py | 3 +- .../configs/single_locale/tc_win32.py | 7 +- .../configs/single_locale/tc_win64.py | 7 +- .../mozharness/configs/single_locale/win32.py | 1 - .../configs/single_locale/win64-aarch64.py | 1 - .../mozharness/configs/single_locale/win64.py | 1 - .../talos/linux64_config_taskcluster.py | 6 +- .../mozharness/configs/talos/linux_config.py | 4 +- .../mozharness/configs/talos/mac_config.py | 4 +- .../configs/talos/windows_config.py | 17 +- .../talos/windows_taskcluster_config.py | 12 +- .../configs/talos/windows_vm_config.py | 12 +- .../mozharness/configs/taskcluster_nightly.py | 5 +- testing/mozharness/configs/test/test.py | 27 +- .../mozharness/configs/test/test_override.py | 4 +- .../mozharness/configs/test/test_override2.py | 4 +- .../configs/unittests/linux_unittest.py | 148 +- .../configs/unittests/mac_unittest.py | 103 +- .../configs/unittests/win_unittest.py | 243 +- .../configs/web_platform_tests/prod_config.py | 13 +- .../web_platform_tests/prod_config_mac.py | 13 +- .../web_platform_tests/prod_config_windows.py | 15 +- .../prod_config_windows_taskcluster.py | 126 +- .../configs/web_platform_tests/test_config.py | 14 +- .../web_platform_tests/test_config_windows.py | 18 +- testing/mozharness/docs/conf.py | 152 +- .../examples/action_config_script.py | 114 +- testing/mozharness/examples/silent_script.py | 10 +- testing/mozharness/examples/venv.py | 38 +- testing/mozharness/examples/verbose_script.py | 31 +- .../external_tools/extract_and_run_command.py | 42 +- testing/mozharness/external_tools/gittool.py | 101 +- .../mouse_and_screen_resolution.py | 74 +- .../external_tools/packagesymbols.py | 43 +- .../external_tools/robustcheckout.py | 589 +- testing/mozharness/external_tools/tooltool.py | 954 +- testing/mozharness/mach_commands.py | 150 +- testing/mozharness/mozharness/__init__.py | 2 +- testing/mozharness/mozharness/base/config.py | 298 +- .../mozharness/mozharness/base/diskutils.py | 77 +- testing/mozharness/mozharness/base/errors.py | 248 +- testing/mozharness/mozharness/base/log.py | 247 +- .../mozharness/mozharness/base/parallel.py | 7 +- testing/mozharness/mozharness/base/python.py | 634 +- testing/mozharness/mozharness/base/script.py | 970 +- .../mozharness/mozharness/base/transfer.py | 6 +- .../mozharness/mozharness/base/vcs/gittool.py | 58 +- .../mozharness/base/vcs/mercurial.py | 225 +- .../mozharness/mozharness/base/vcs/vcsbase.py | 70 +- .../mozharness/lib/python/authentication.py | 7 +- .../mozharness/mozilla/automation.py | 38 +- .../mozharness/mozilla/bouncer/submitter.py | 39 +- .../mozharness/mozilla/checksums.py | 16 +- .../mozharness/mozilla/firefox/autoconfig.py | 64 +- .../mozharness/mozilla/l10n/locales.py | 72 +- .../mozilla/l10n/multi_locale_build.py | 131 +- .../mozharness/mozharness/mozilla/merkle.py | 46 +- .../mozharness/mozharness/mozilla/mozbase.py | 11 +- .../mozharness/mozilla/repo_manipulation.py | 136 +- .../mozharness/mozharness/mozilla/secrets.py | 29 +- .../mozharness/mozilla/structuredlog.py | 145 +- .../mozharness/mozilla/testing/android.py | 428 +- .../mozilla/testing/codecoverage.py | 410 +- .../mozilla/testing/firefox_ui_tests.py | 403 +- .../mozilla/testing/per_test_base.py | 314 +- .../mozharness/mozilla/testing/talos.py | 641 +- .../mozharness/mozilla/testing/try_tools.py | 150 +- .../mozharness/mozilla/testing/unittest.py | 120 +- .../mozilla/testing/verify_tools.py | 37 +- .../mozharness/mozharness/mozilla/tooltool.py | 80 +- .../mozharness/mozharness/mozilla/vcstools.py | 19 +- .../scripts/android_emulator_pgo.py | 158 +- .../scripts/android_emulator_unittest.py | 488 +- .../scripts/android_hardware_unittest.py | 434 +- testing/mozharness/scripts/android_wrench.py | 142 +- testing/mozharness/scripts/awsy_script.py | 327 +- testing/mozharness/scripts/configtest.py | 100 +- testing/mozharness/scripts/desktop_l10n.py | 321 +- .../scripts/desktop_partner_repacks.py | 212 +- .../scripts/firefox_ui_tests/functional.py | 2 +- .../scripts/firefox_ui_tests/update.py | 2 +- .../firefox_ui_tests/update_release.py | 269 +- .../mozharness/scripts/fx_desktop_build.py | 57 +- testing/mozharness/scripts/l10n_bumper.py | 271 +- .../scripts/merge_day/gecko_migration.py | 431 +- testing/mozharness/scripts/multil10n.py | 2 +- testing/mozharness/scripts/openh264_build.py | 422 +- testing/mozharness/scripts/raptor_script.py | 2 +- .../scripts/release/generate-checksums.py | 157 +- testing/mozharness/scripts/repackage.py | 145 +- testing/mozharness/scripts/talos_script.py | 2 +- .../scripts/telemetry/telemetry_client.py | 238 +- .../mozharness/scripts/web_platform_tests.py | 552 +- testing/mozharness/setup.py | 46 +- testing/mozharness/test/test_base_config.py | 291 +- .../mozharness/test/test_base_diskutils.py | 60 +- testing/mozharness/test/test_base_log.py | 13 +- testing/mozharness/test/test_base_parallel.py | 4 +- testing/mozharness/test/test_base_python.py | 36 +- testing/mozharness/test/test_base_script.py | 575 +- .../test/test_base_vcs_mercurial.py | 188 +- testing/mozharness/test/test_l10n_locales.py | 87 +- .../test/test_mozilla_automation.py | 10 +- .../test/test_mozilla_building_buildbase.py | 94 +- .../test/test_mozilla_structured.py | 56 +- testing/parse_build_tests_ccov.py | 71 +- testing/perfdocs/moz.build | 2 +- testing/performance/hooks_android_view.py | 55 +- testing/profiles/moz.build | 24 +- .../constants/raptor_tests_constants.py | 588 +- testing/raptor/logger/logger.py | 14 +- testing/raptor/mach_commands.py | 247 +- testing/raptor/moz.build | 2 +- testing/raptor/raptor/benchmark.py | 45 +- testing/raptor/raptor/browsertime/android.py | 67 +- testing/raptor/raptor/browsertime/base.py | 94 +- testing/raptor/raptor/browsertime/desktop.py | 12 +- testing/raptor/raptor/cmdline.py | 599 +- testing/raptor/raptor/control_server.py | 36 +- testing/raptor/raptor/cpu.py | 44 +- testing/raptor/raptor/filters.py | 54 +- testing/raptor/raptor/gecko_profile.py | 176 +- testing/raptor/raptor/gen_test_config.py | 22 +- testing/raptor/raptor/manifest.py | 448 +- testing/raptor/raptor/memory.py | 26 +- testing/raptor/raptor/output.py | 96 +- testing/raptor/raptor/outputhandler.py | 6 +- testing/raptor/raptor/performance_tuning.py | 204 +- testing/raptor/raptor/perftest.py | 54 +- testing/raptor/raptor/power.py | 56 +- testing/raptor/raptor/raptor.py | 9 +- testing/raptor/raptor/results.py | 84 +- testing/raptor/raptor/signal_handler.py | 1 - testing/raptor/raptor/utils.py | 46 +- testing/raptor/raptor/webextension/android.py | 37 +- testing/raptor/raptor/webextension/base.py | 31 +- testing/raptor/test/test_control_server.py | 65 +- testing/raptor/test/test_manifest.py | 466 +- testing/raptor/test/test_playback.py | 16 +- testing/raptor/test/test_raptor.py | 30 +- testing/raptor/test/test_utils.py | 50 +- testing/remotecppunittests.py | 188 +- testing/runcppunittests.py | 219 +- testing/runtimes/moz.build | 2 +- testing/specialpowers/moz.build | 28 +- testing/talos/INSTALL.py | 30 +- testing/talos/mach_commands.py | 101 +- testing/talos/moz.build | 2 +- testing/talos/setup.py | 82 +- testing/talos/talos/cmanager.py | 21 +- testing/talos/talos/cmanager_base.py | 3 +- testing/talos/talos/cmanager_linux.py | 46 +- testing/talos/talos/cmanager_mac.py | 36 +- testing/talos/talos/cmanager_win32.py | 139 +- testing/talos/talos/cmdline.py | 394 +- testing/talos/talos/config.py | 226 +- testing/talos/talos/ffsetup.py | 194 +- testing/talos/talos/filter.py | 54 +- testing/talos/talos/gecko_profile.py | 186 +- testing/talos/talos/heavy.py | 29 +- testing/talos/talos/mainthreadio.py | 116 +- testing/talos/talos/output.py | 176 +- testing/talos/talos/results.py | 241 +- testing/talos/talos/run_tests.py | 307 +- testing/talos/talos/scripts/report.py | 72 +- testing/talos/talos/talos_process.py | 77 +- testing/talos/talos/talosconfig.py | 65 +- testing/talos/talos/ttest.py | 232 +- testing/talos/talos/unittests/conftest.py | 16 +- testing/talos/talos/unittests/test_config.py | 1358 +- testing/talos/talos/unittests/test_test.py | 147 +- testing/talos/talos/utils.py | 79 +- testing/talos/talos/whitelist.py | 96 +- testing/talos/talos/xtalos/etlparser.py | 331 +- testing/talos/talos/xtalos/parse_xperf.py | 63 +- testing/talos/talos/xtalos/start_xperf.py | 76 +- testing/talos/talos/xtalos/xperf_analyzer.py | 459 +- testing/talos/talos/xtalos/xtalos.py | 141 +- testing/talos/talos_from_code.py | 78 +- testing/testinfo.py | 793 +- testing/tools/iceserver/iceserver.py | 401 +- testing/tools/mach_test_package_bootstrap.py | 180 +- testing/tools/minidumpwriter/moz.build | 12 +- testing/tools/screenshot/moz.build | 29 +- .../websocketprocessbridge.py | 28 +- testing/tps/create_venv.py | 187 +- testing/tps/mach_commands.py | 16 +- testing/tps/setup.py | 69 +- testing/tps/tps/__init__.py | 4 +- testing/tps/tps/cli.py | 204 +- testing/tps/tps/firefoxrunner.py | 24 +- testing/tps/tps/phase.py | 51 +- testing/tps/tps/testrunner.py | 347 +- testing/web-platform/mach_commands.py | 339 +- testing/web-platform/mach_commands_base.py | 32 +- .../mach_test_package_commands.py | 46 +- testing/web-platform/manifestdownload.py | 112 +- testing/web-platform/manifestupdate.py | 157 +- testing/web-platform/metasummary.py | 85 +- testing/web-platform/moz.build | 22 +- .../mozilla/tests/binast/serve.py | 14 +- .../mozilla/tests/webdriver/conftest.py | 4 +- .../tests/webdriver/send_alert_text.py | 6 +- .../take_full_screenshot/__init__.py | 8 +- .../webdriver/take_full_screenshot/iframe.py | 9 +- .../take_full_screenshot/screenshot.py | 10 +- testing/web-platform/test_metamerge.py | 12 +- testing/web-platform/unittestrunner.py | 42 +- testing/web-platform/update/__init__.py | 8 +- testing/web-platform/update/fetchlogs.py | 32 +- testing/web-platform/update/github.py | 27 +- testing/web-platform/update/tree.py | 17 +- testing/web-platform/update/update.py | 25 +- .../web-platform/update/updatecommandline.py | 31 +- testing/web-platform/update/upstream.py | 174 +- testing/web-platform/vcs.py | 19 +- testing/xpcshell/example/moz.build | 2 +- testing/xpcshell/mach_commands.py | 134 +- .../xpcshell/mach_test_package_commands.py | 24 +- testing/xpcshell/moz-http2/moz.build | 4 +- testing/xpcshell/moz.build | 6 +- testing/xpcshell/remotexpcshelltests.py | 253 +- testing/xpcshell/runxpcshelltests.py | 1072 +- testing/xpcshell/selftest.py | 364 +- testing/xpcshell/xpcshellcommandline.py | 506 +- toolkit/actors/moz.build | 106 +- .../components/aboutcheckerboard/moz.build | 6 +- toolkit/components/aboutmemory/moz.build | 10 +- toolkit/components/aboutperformance/moz.build | 8 +- toolkit/components/aboutprocesses/moz.build | 8 +- toolkit/components/alerts/moz.build | 30 +- toolkit/components/antitracking/moz.build | 100 +- toolkit/components/apppicker/moz.build | 6 +- toolkit/components/asyncshutdown/moz.build | 16 +- toolkit/components/autocomplete/moz.build | 32 +- .../backgroundhangmonitor/moz.build | 46 +- toolkit/components/bitsdownload/moz.build | 24 +- toolkit/components/browser/build/moz.build | 4 +- toolkit/components/browser/moz.build | 46 +- toolkit/components/build/moz.build | 14 +- toolkit/components/captivedetect/moz.build | 14 +- .../components/cascade_bloom_filter/moz.build | 22 +- toolkit/components/certviewer/moz.build | 16 +- toolkit/components/cleardata/moz.build | 24 +- .../test_service_worker_at_shutdown.py | 11 +- toolkit/components/clearsitedata/moz.build | 16 +- toolkit/components/cloudstorage/moz.build | 10 +- toolkit/components/commandlines/moz.build | 30 +- toolkit/components/contentprefs/moz.build | 22 +- .../components/contextualidentity/moz.build | 8 +- toolkit/components/corroborator/moz.build | 8 +- toolkit/components/crashes/moz.build | 27 +- toolkit/components/crashmonitor/moz.build | 12 +- toolkit/components/ctypes/moz.build | 16 +- toolkit/components/ctypes/tests/moz.build | 26 +- toolkit/components/downloads/moz.build | 46 +- .../components/enterprisepolicies/moz.build | 30 +- .../enterprisepolicies/tests/moz.build | 6 +- toolkit/components/extensions/moz.build | 150 +- .../components/extensions/schemas/moz.build | 2 +- .../components/extensions/storage/moz.build | 14 +- ..._serviceworkers_purged_on_pref_disabled.py | 20 +- .../extensions/webrequest/moz.build | 44 +- .../featuregates/gen_feature_definitions.py | 110 +- toolkit/components/featuregates/moz.build | 19 +- .../python/test_gen_feature_definitions.py | 148 +- toolkit/components/filewatcher/moz.build | 20 +- .../components/finalizationwitness/moz.build | 16 +- toolkit/components/find/moz.build | 20 +- toolkit/components/forgetaboutsite/moz.build | 10 +- toolkit/components/fuzzyfox/moz.build | 8 +- toolkit/components/gfx/moz.build | 10 +- .../build_scripts/glean_parser_ext/cpp.py | 2 +- .../build_scripts/glean_parser_ext/js.py | 56 +- .../glean_parser_ext/run_glean_parser.py | 16 +- .../build_scripts/glean_parser_ext/rust.py | 30 +- .../glean_parser_ext/string_table.py | 15 +- toolkit/components/glean/gtest/moz.build | 8 +- toolkit/components/glean/metrics_index.py | 6 +- toolkit/components/glean/moz.build | 106 +- .../glean/pytest/test_glean_parser_cpp.py | 17 +- .../glean/pytest/test_glean_parser_js.py | 15 +- .../glean/pytest/test_glean_parser_rust.py | 53 +- toolkit/components/glean/sphinx/glean.py | 8 +- toolkit/components/glean/xpcom/moz.build | 22 +- toolkit/components/httpsonlyerror/moz.build | 8 +- toolkit/components/kvstore/moz.build | 16 +- toolkit/components/lz4/moz.build | 14 +- .../components/maintenanceservice/moz.build | 46 +- toolkit/components/mediasniffer/moz.build | 16 +- toolkit/components/messaging-system/moz.build | 26 +- toolkit/components/moz.build | 208 +- toolkit/components/mozintl/moz.build | 20 +- toolkit/components/mozprotocol/moz.build | 12 +- toolkit/components/narrate/moz.build | 12 +- toolkit/components/normandy/moz.build | 18 +- .../normandy/test/browser/moz.build | 20 +- .../components/normandy/test/create_xpi.py | 2 +- toolkit/components/osfile/modules/moz.build | 30 +- toolkit/components/osfile/moz.build | 22 +- toolkit/components/parentalcontrols/moz.build | 26 +- toolkit/components/passwordmgr/moz.build | 88 +- toolkit/components/pdfjs/moz.build | 8 +- toolkit/components/perfmonitoring/moz.build | 23 +- toolkit/components/pictureinpicture/moz.build | 14 +- toolkit/components/places/moz.build | 116 +- .../components/places/tests/gtest/moz.build | 4 +- toolkit/components/places/tests/moz.build | 102 +- toolkit/components/printing/moz.build | 10 +- toolkit/components/printingui/ipc/moz.build | 30 +- toolkit/components/printingui/moz.build | 18 +- toolkit/components/processsingleton/moz.build | 12 +- toolkit/components/processtools/moz.build | 18 +- toolkit/components/promiseworker/moz.build | 12 +- .../components/promiseworker/worker/moz.build | 2 +- toolkit/components/prompts/moz.build | 20 +- toolkit/components/prompts/src/moz.build | 8 +- toolkit/components/reader/moz.build | 24 +- toolkit/components/reflect/moz.build | 12 +- toolkit/components/remote/moz.build | 52 +- .../components/remotebrowserutils/moz.build | 8 +- .../components/remotepagemanager/moz.build | 12 +- .../components/reputationservice/moz.build | 49 +- .../reputationservice/test/gtest/moz.build | 13 +- .../components/resistfingerprinting/moz.build | 16 +- .../resistfingerprinting/tests/moz.build | 6 +- toolkit/components/satchel/moz.build | 44 +- toolkit/components/search/moz.build | 42 +- toolkit/components/securityreporter/moz.build | 12 +- toolkit/components/sessionstore/moz.build | 26 +- toolkit/components/shell/moz.build | 4 +- toolkit/components/startup/moz.build | 18 +- toolkit/components/startup/public/moz.build | 5 +- toolkit/components/statusfilter/moz.build | 10 +- .../telemetry/build_scripts/gen_event_data.py | 117 +- .../telemetry/build_scripts/gen_event_enum.py | 8 +- .../build_scripts/gen_histogram_data.py | 143 +- .../build_scripts/gen_histogram_enum.py | 42 +- .../build_scripts/gen_histogram_phf.py | 27 +- .../build_scripts/gen_process_data.py | 14 +- .../build_scripts/gen_process_enum.py | 7 +- .../build_scripts/gen_scalar_data.py | 111 +- .../build_scripts/gen_scalar_enum.py | 2 +- .../build_scripts/gen_userinteraction_data.py | 31 +- .../build_scripts/gen_userinteraction_phf.py | 25 +- .../build_scripts/mozparsers/parse_events.py | 304 +- .../build_scripts/mozparsers/parse_scalars.py | 318 +- .../mozparsers/parse_user_interactions.py | 142 +- .../mozparsers/shared_telemetry_utils.py | 85 +- .../build_scripts/run_glean_parser.py | 2 +- .../telemetry/build_scripts/setup.py | 26 +- .../telemetry/geckoview/gtest/moz.build | 24 +- toolkit/components/telemetry/moz.build | 312 +- .../components/telemetry/pingsender/moz.build | 18 +- .../telemetry/tests/gtest/moz.build | 27 +- .../tests/integration/tests/conftest.py | 2 +- .../telemetry_harness/fog_ping_server.py | 7 +- .../harness/telemetry_harness/fog_testcase.py | 4 +- .../harness/telemetry_harness/ping_server.py | 8 +- .../harness/telemetry_harness/testcase.py | 18 +- .../tests/marionette/mach_commands.py | 21 +- .../client/test_deletion_request_ping.py | 6 +- .../client/test_fog_deletion_request_ping.py | 4 +- .../tests/client/test_main_tab_scalars.py | 8 +- .../test_search_counts_across_sessions.py | 28 +- .../unit/test_ping_server_received_ping.py | 4 +- toolkit/components/telemetry/tests/moz.build | 14 +- .../tests/python/test_gen_event_data_json.py | 8 +- .../tests/python/test_gen_scalar_data_json.py | 10 +- .../python/test_histogramtools_non_strict.py | 96 +- .../python/test_histogramtools_strict.py | 255 +- .../tests/python/test_parse_events.py | 47 +- .../tests/python/test_parse_scalars.py | 76 +- .../tests/python/test_usecounters.py | 26 +- toolkit/components/terminator/moz.build | 18 +- toolkit/components/thumbnails/moz.build | 32 +- toolkit/components/timermanager/moz.build | 14 +- toolkit/components/tooltiptext/moz.build | 10 +- toolkit/components/typeaheadfind/moz.build | 14 +- .../updateagent/UpdateUrlConstants.py | 10 +- toolkit/components/updateagent/moz.build | 12 +- toolkit/components/url-classifier/moz.build | 110 +- .../url-classifier/tests/gtest/moz.build | 46 +- .../components/url-classifier/tests/moz.build | 14 +- toolkit/components/urlformatter/moz.build | 16 +- toolkit/components/utils/moz.build | 28 +- toolkit/components/viaduct/moz.build | 26 +- toolkit/components/viewconfig/moz.build | 6 +- toolkit/components/viewsource/moz.build | 10 +- toolkit/components/windowcreator/moz.build | 16 +- .../components/windowcreator/test/moz.build | 6 +- toolkit/components/windowwatcher/moz.build | 52 +- .../components/windowwatcher/test/moz.build | 7 +- toolkit/components/workerloader/moz.build | 10 +- toolkit/components/xulstore/moz.build | 28 +- .../components/xulstore/tests/gtest/moz.build | 8 +- toolkit/content/moz.build | 348 +- .../tests/chrome/file_about_networking_wsh.py | 9 +- toolkit/content/tests/moz.build | 12 +- .../breakpad-windows-libxul/moz.build | 18 +- .../breakpad-windows-standalone/moz.build | 16 +- toolkit/crashreporter/client/moz.build | 98 +- .../generate_crash_reporter_sources.py | 58 +- toolkit/crashreporter/injector/moz.build | 16 +- .../crashreporter/minidump-analyzer/moz.build | 41 +- toolkit/crashreporter/moz.build | 156 +- toolkit/crashreporter/rust/moz.build | 2 +- toolkit/crashreporter/test/gtest/moz.build | 10 +- toolkit/crashreporter/test/moz.build | 76 +- toolkit/crashreporter/tools/symbolstore.py | 169 +- .../crashreporter/tools/unit-symbolstore.py | 79 +- toolkit/crashreporter/tools/upload_symbols.py | 147 +- toolkit/library/build/dependentlibs.py | 72 +- toolkit/library/build/moz.build | 28 +- toolkit/library/dummydll/moz.build | 8 +- toolkit/library/gen_buildid.py | 18 +- toolkit/library/gtest/moz.build | 18 +- toolkit/library/gtest/rust/moz.build | 9 +- toolkit/library/libxul.so-gdb.py | 8 +- toolkit/library/moz.build | 420 +- toolkit/library/rust/moz.build | 26 +- toolkit/locales/gen_multilocale.py | 2 +- toolkit/locales/generate_locale_ini.py | 2 +- toolkit/locales/generate_update_locale.py | 2 +- toolkit/locales/moz.build | 56 +- toolkit/modules/moz.build | 422 +- toolkit/modules/subprocess/moz.build | 30 +- .../test/xpcshell/data_test_script.py | 22 +- toolkit/moz.build | 109 +- toolkit/moz.configure | 2108 +-- toolkit/mozapps/defaultagent/moz.build | 42 +- toolkit/mozapps/defaultagent/rust/moz.build | 2 +- toolkit/mozapps/downloads/moz.build | 16 +- toolkit/mozapps/downloads/tests/moz.build | 4 +- .../mozapps/extensions/gen_built_in_addons.py | 25 +- toolkit/mozapps/extensions/internal/moz.build | 24 +- toolkit/mozapps/extensions/moz.build | 99 +- .../mozapps/extensions/test/browser/moz.build | 29 +- toolkit/mozapps/extensions/test/create_xpi.py | 9 +- toolkit/mozapps/extensions/test/moz.build | 18 +- toolkit/mozapps/handling/moz.build | 10 +- toolkit/mozapps/installer/find-dupes.py | 92 +- toolkit/mozapps/installer/informulate.py | 123 +- toolkit/mozapps/installer/l10n-repack.py | 81 +- toolkit/mozapps/installer/moz.build | 8 +- toolkit/mozapps/installer/packager.py | 206 +- toolkit/mozapps/installer/strip.py | 8 +- toolkit/mozapps/installer/unpack.py | 12 +- .../windows/nsis/preprocess-locale.py | 152 +- toolkit/mozapps/preferences/moz.build | 6 +- toolkit/mozapps/update/common/moz.build | 74 +- toolkit/mozapps/update/moz.build | 26 +- toolkit/mozapps/update/tests/moz.build | 130 +- .../mozapps/update/updater/bspatch/moz.build | 10 +- .../mozapps/update/updater/gen_cert_header.py | 34 +- toolkit/mozapps/update/updater/moz.build | 101 +- .../update/updater/updater-common.build | 102 +- .../update/updater/updater-dep/moz.build | 11 +- .../update/updater/updater-xpcshell/moz.build | 11 +- toolkit/pluginproblem/moz.build | 2 +- toolkit/profile/gtest/moz.build | 10 +- toolkit/profile/moz.build | 42 +- toolkit/system/androidproxy/moz.build | 10 +- toolkit/system/gnome/moz.build | 26 +- toolkit/system/osxproxy/moz.build | 14 +- toolkit/system/osxproxy/tests/gtest/moz.build | 10 +- toolkit/system/unixproxy/moz.build | 16 +- toolkit/system/windowsDHCPClient/moz.build | 16 +- .../windowsDHCPClient/tests/gtest/moz.build | 14 +- toolkit/system/windowsproxy/moz.build | 15 +- .../system/windowsproxy/tests/gtest/moz.build | 10 +- toolkit/themes/linux/global/moz.build | 2 +- toolkit/themes/linux/moz.build | 3 +- toolkit/themes/linux/mozapps/moz.build | 2 +- toolkit/themes/mobile/moz.build | 2 +- toolkit/themes/moz.build | 22 +- toolkit/themes/osx/global/moz.build | 2 +- toolkit/themes/osx/moz.build | 3 +- toolkit/themes/osx/mozapps/moz.build | 2 +- toolkit/themes/windows/global/moz.build | 2 +- toolkit/themes/windows/moz.build | 3 +- toolkit/themes/windows/mozapps/moz.build | 2 +- toolkit/xre/moz.build | 328 +- toolkit/xre/test/gtest/moz.build | 12 +- .../test/marionette/test_fission_autostart.py | 404 +- toolkit/xre/test/win/moz.build | 26 +- tools/code-coverage/moz.build | 28 +- tools/compare-locales/mach_commands.py | 90 +- .../minidump_stackwalk/curl-compat/moz.build | 6 +- .../minidump_stackwalk/moz.build | 36 +- tools/crashreporter/moz.configure | 2 +- tools/fuzzing/common/moz.build | 12 +- tools/fuzzing/faulty/moz.build | 13 +- tools/fuzzing/interface/harness/moz.build | 6 +- tools/fuzzing/interface/moz.build | 16 +- tools/fuzzing/ipc/moz.build | 14 +- tools/fuzzing/messagemanager/moz.build | 10 +- tools/fuzzing/moz.build | 34 +- tools/fuzzing/registry/moz.build | 8 +- tools/fuzzing/rust/moz.build | 8 +- tools/fuzzing/shmem/moz.build | 10 +- tools/github-sync/converter.py | 127 +- tools/github-sync/read-json.py | 8 +- tools/jprof/moz.build | 24 +- tools/jprof/split-profile.py | 19 +- tools/jprof/stub/moz.build | 8 +- tools/lint/black.yml | 55 +- tools/mach_commands.py | 412 +- tools/moz.build | 22 +- tools/performance/moz.build | 8 +- tools/phabricator/mach_commands.py | 18 +- tools/power/moz.build | 17 +- tools/profiler/moz.build | 202 +- tools/profiler/tests/gtest/moz.build | 42 +- tools/quitter/moz.build | 12 +- tools/rb/find_leakers.py | 58 +- tools/rb/fix_stacks.py | 34 +- tools/tryselect/cli.py | 163 +- tools/tryselect/mach_commands.py | 225 +- tools/tryselect/preset.py | 21 +- tools/tryselect/push.py | 100 +- tools/tryselect/selectors/again.py | 113 +- tools/tryselect/selectors/auto.py | 57 +- tools/tryselect/selectors/chooser/__init__.py | 68 +- tools/tryselect/selectors/chooser/app.py | 106 +- tools/tryselect/selectors/empty.py | 16 +- tools/tryselect/selectors/fuzzy.py | 322 +- tools/tryselect/selectors/preview.py | 52 +- tools/tryselect/selectors/release.py | 141 +- tools/tryselect/selectors/syntax.py | 476 +- tools/tryselect/task_config.py | 291 +- tools/tryselect/tasks.py | 51 +- tools/tryselect/test/conftest.py | 50 +- tools/tryselect/test/test_again.py | 10 +- tools/tryselect/test/test_auto.py | 4 +- tools/tryselect/test/test_chooser.py | 41 +- tools/tryselect/test/test_fuzzy.py | 18 +- .../test/test_mozharness_integration.py | 97 +- tools/tryselect/test/test_presets.py | 45 +- tools/tryselect/test/test_task_configs.py | 105 +- tools/tryselect/test/test_tasks.py | 38 +- tools/tryselect/util/dicttools.py | 8 +- tools/tryselect/util/estimates.py | 22 +- tools/tryselect/util/manage_estimates.py | 44 +- .../make_incremental_updates.py | 356 +- tools/update-packaging/moz.build | 1 - tools/update-programs/moz.configure | 13 +- tools/update-verify/python/util/commands.py | 10 +- .../release/compare-directories.py | 187 +- .../release/replace-updater-certs.py | 8 +- tools/update-verify/scripts/chunked-verify.py | 13 +- tools/vcs/mach_commands.py | 247 +- uriloader/base/moz.build | 34 +- uriloader/exthandler/moz.build | 169 +- uriloader/exthandler/tests/moz.build | 23 +- uriloader/moz.build | 14 +- uriloader/prefetch/moz.build | 34 +- uriloader/preload/gtest/moz.build | 13 +- uriloader/preload/moz.build | 22 +- view/moz.build | 18 +- widget/android/bindings/moz.build | 51 +- widget/android/jni/moz.build | 28 +- widget/android/moz.build | 271 +- widget/cocoa/moz.build | 261 +- widget/gtk/moz.build | 198 +- widget/gtk/mozgtk/gtk2/moz.build | 22 +- widget/gtk/mozgtk/gtk3/moz.build | 20 +- widget/gtk/mozgtk/moz.build | 2 +- widget/gtk/mozgtk/stub/moz.build | 8 +- widget/gtk/mozwayland/moz.build | 9 +- widget/gtk/wayland/moz.build | 30 +- widget/headless/moz.build | 38 +- widget/headless/tests/moz.build | 2 +- widget/moz.build | 456 +- widget/tests/gtest/moz.build | 6 +- widget/tests/moz.build | 8 +- widget/uikit/moz.build | 16 +- widget/windows/moz.build | 232 +- widget/windows/tests/moz.build | 21 +- widget/x11/moz.build | 6 +- xpcom/base/ErrorList.py | 180 +- xpcom/base/moz.build | 361 +- xpcom/build/Services.py | 173 +- xpcom/build/moz.build | 122 +- xpcom/components/moz.build | 87 +- xpcom/ds/Atom.py | 4 +- xpcom/ds/StaticAtoms.py | 223 +- xpcom/ds/moz.build | 238 +- xpcom/ds/test/test_dafsa.py | 66 +- xpcom/ds/tools/incremental_dafsa.py | 14 +- xpcom/ds/tools/make_dafsa.py | 63 +- xpcom/ds/tools/perfecthash.py | 156 +- xpcom/glue/moz.build | 6 +- xpcom/glue/standalone/moz.build | 26 +- xpcom/idl-parser/setup.py | 18 +- xpcom/idl-parser/xpidl/header.py | 315 +- xpcom/idl-parser/xpidl/jsonxpt.py | 230 +- xpcom/idl-parser/xpidl/moz.build | 4 +- xpcom/idl-parser/xpidl/runtests.py | 127 +- xpcom/idl-parser/xpidl/rust.py | 306 +- xpcom/idl-parser/xpidl/rust_macros.py | 55 +- xpcom/idl-parser/xpidl/xpidl.py | 1038 +- xpcom/io/moz.build | 224 +- xpcom/moz.build | 44 +- xpcom/reflect/moz.build | 3 +- xpcom/reflect/xptcall/md/moz.build | 8 +- xpcom/reflect/xptcall/md/test/moz.build | 8 +- xpcom/reflect/xptcall/md/unix/moz.build | 333 +- xpcom/reflect/xptcall/md/win32/moz.build | 61 +- xpcom/reflect/xptcall/md/win32/preprocess.py | 12 +- xpcom/reflect/xptcall/moz.build | 14 +- xpcom/reflect/xptinfo/moz.build | 10 +- xpcom/reflect/xptinfo/xptcodegen.py | 406 +- xpcom/rust/gtest/moz.build | 10 +- xpcom/string/moz.build | 80 +- xpcom/system/moz.build | 29 +- xpcom/tests/gtest/moz.build | 200 +- xpcom/tests/moz.build | 34 +- xpcom/tests/windows/moz.build | 8 +- xpcom/threads/moz.build | 196 +- xpcom/windbgdlg/moz.build | 4 +- xpfe/appshell/moz.build | 42 +- 3432 files changed, 164855 insertions(+), 129246 deletions(-) diff --git a/.ycm_extra_conf.py b/.ycm_extra_conf.py index 16ec74c9d09347..26b584e90532f1 100644 --- a/.ycm_extra_conf.py +++ b/.ycm_extra_conf.py @@ -12,56 +12,55 @@ old_bytecode = sys.dont_write_bytecode sys.dont_write_bytecode = True -path = os.path.abspath(os.path.join(os.path.dirname(__file__), 'mach')) +path = os.path.abspath(os.path.join(os.path.dirname(__file__), "mach")) # If mach is not here, we're on the objdir go to the srcdir. if not os.path.exists(path): - with open(os.path.join(os.path.dirname(__file__), 'mozinfo.json')) as info: + with open(os.path.join(os.path.dirname(__file__), "mozinfo.json")) as info: config = json.loads(info.read()) - path = os.path.join(config['topsrcdir'], 'mach') + path = os.path.join(config["topsrcdir"], "mach") sys.dont_write_bytecode = old_bytecode + def _is_likely_cpp_header(filename): - if not filename.endswith('.h'): + if not filename.endswith(".h"): return False - if filename.endswith('Inlines.h') or filename.endswith('-inl.h'): + if filename.endswith("Inlines.h") or filename.endswith("-inl.h"): return True - cpp_file = filename[:-1] + 'cpp' + cpp_file = filename[:-1] + "cpp" return os.path.exists(cpp_file) def Settings(**kwargs): - if kwargs[ 'language' ] == 'cfamily': - return FlagsForFile(kwargs['filename']) + if kwargs["language"] == "cfamily": + return FlagsForFile(kwargs["filename"]) # This is useful for generic language server protocols, like rust-analyzer, # to discover the right project root instead of guessing based on where the # closest Cargo.toml is. return { - 'project_directory': '.', + "project_directory": ".", } def FlagsForFile(filename): - output = subprocess.check_output([path, 'compileflags', filename]) - output = output.decode('utf-8') + output = subprocess.check_output([path, "compileflags", filename]) + output = output.decode("utf-8") flag_list = shlex.split(output) # This flag is added by Fennec for android build and causes ycmd to fail to parse the file. # Removing this flag is a workaround until ycmd starts to handle this flag properly. # https://github.com/Valloric/YouCompleteMe/issues/1490 - final_flags = [x for x in flag_list if not x.startswith('-march=armv')] + final_flags = [x for x in flag_list if not x.startswith("-march=armv")] if _is_likely_cpp_header(filename): final_flags += ["-x", "c++"] - return { - 'flags': final_flags, - 'do_cache': True - } + return {"flags": final_flags, "do_cache": True} + -if __name__ == '__main__': +if __name__ == "__main__": print(FlagsForFile(sys.argv[1])) diff --git a/accessible/android/moz.build b/accessible/android/moz.build index 7cb195352caf1a..456a9d7ec642da 100644 --- a/accessible/android/moz.build +++ b/accessible/android/moz.build @@ -4,38 +4,39 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXPORTS.mozilla.a11y += ['AccessibleWrap.h', - 'HyperTextAccessibleWrap.h', - 'SessionAccessibility.h', - 'TraversalRule.h', +EXPORTS.mozilla.a11y += [ + "AccessibleWrap.h", + "HyperTextAccessibleWrap.h", + "SessionAccessibility.h", + "TraversalRule.h", ] SOURCES += [ - 'AccessibleWrap.cpp', - 'DocAccessibleWrap.cpp', - 'Platform.cpp', - 'ProxyAccessibleWrap.cpp', - 'RootAccessibleWrap.cpp', - 'SessionAccessibility.cpp', - 'TraversalRule.cpp', + "AccessibleWrap.cpp", + "DocAccessibleWrap.cpp", + "Platform.cpp", + "ProxyAccessibleWrap.cpp", + "RootAccessibleWrap.cpp", + "SessionAccessibility.cpp", + "TraversalRule.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/ipc', - '/accessible/ipc/other', - '/accessible/xpcom', - '/accessible/xul', - '/dom/base', - '/widget', - '/widget/android', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/ipc", + "/accessible/ipc/other", + "/accessible/xpcom", + "/accessible/xul", + "/dom/base", + "/widget", + "/widget/android", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/accessible/aom/moz.build b/accessible/aom/moz.build index 15bdeeaa792ca6..88b941435ee8e5 100644 --- a/accessible/aom/moz.build +++ b/accessible/aom/moz.build @@ -5,40 +5,40 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'AccessibleNode.h', + "AccessibleNode.h", ] UNIFIED_SOURCES += [ - 'AccessibleNode.cpp', + "AccessibleNode.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', + "/accessible/base", + "/accessible/generic", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/accessible/windows/ia2', - '/accessible/windows/msaa', + "/accessible/windows/ia2", + "/accessible/windows/msaa", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/atk/moz.build b/accessible/atk/moz.build index 1029db42692378..67fb1b9bc2c76a 100644 --- a/accessible/atk/moz.build +++ b/accessible/atk/moz.build @@ -5,61 +5,61 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.a11y += [ - 'AccessibleWrap.h', - 'HyperTextAccessibleWrap.h', + "AccessibleWrap.h", + "HyperTextAccessibleWrap.h", ] SOURCES += [ - 'AccessibleWrap.cpp', - 'ApplicationAccessibleWrap.cpp', - 'AtkSocketAccessible.cpp', - 'DocAccessibleWrap.cpp', - 'DOMtoATK.cpp', - 'nsMaiHyperlink.cpp', - 'nsMaiInterfaceAction.cpp', - 'nsMaiInterfaceComponent.cpp', - 'nsMaiInterfaceDocument.cpp', - 'nsMaiInterfaceEditableText.cpp', - 'nsMaiInterfaceHyperlinkImpl.cpp', - 'nsMaiInterfaceHypertext.cpp', - 'nsMaiInterfaceImage.cpp', - 'nsMaiInterfaceSelection.cpp', - 'nsMaiInterfaceTable.cpp', - 'nsMaiInterfaceTableCell.cpp', - 'nsMaiInterfaceText.cpp', - 'nsMaiInterfaceValue.cpp', - 'Platform.cpp', - 'RootAccessibleWrap.cpp', - 'UtilInterface.cpp', + "AccessibleWrap.cpp", + "ApplicationAccessibleWrap.cpp", + "AtkSocketAccessible.cpp", + "DocAccessibleWrap.cpp", + "DOMtoATK.cpp", + "nsMaiHyperlink.cpp", + "nsMaiInterfaceAction.cpp", + "nsMaiInterfaceComponent.cpp", + "nsMaiInterfaceDocument.cpp", + "nsMaiInterfaceEditableText.cpp", + "nsMaiInterfaceHyperlinkImpl.cpp", + "nsMaiInterfaceHypertext.cpp", + "nsMaiInterfaceImage.cpp", + "nsMaiInterfaceSelection.cpp", + "nsMaiInterfaceTable.cpp", + "nsMaiInterfaceTableCell.cpp", + "nsMaiInterfaceText.cpp", + "nsMaiInterfaceValue.cpp", + "Platform.cpp", + "RootAccessibleWrap.cpp", + "UtilInterface.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/ipc', - '/accessible/ipc/other', - '/accessible/xpcom', - '/accessible/xul', - '/other-licenses/atk-1.0', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/ipc", + "/accessible/ipc/other", + "/accessible/xpcom", + "/accessible/xul", + "/other-licenses/atk-1.0", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - CFLAGS += CONFIG['TK_CFLAGS'] - CXXFLAGS += CONFIG['TK_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + CFLAGS += CONFIG["TK_CFLAGS"] + CXXFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['MOZ_ENABLE_DBUS']: - CXXFLAGS += CONFIG['MOZ_DBUS_CFLAGS'] +if CONFIG["MOZ_ENABLE_DBUS"]: + CXXFLAGS += CONFIG["MOZ_DBUS_CFLAGS"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): +if CONFIG["CC_TYPE"] in ("clang", "gcc"): # Used in G_DEFINE_TYPE_EXTENDED macro, probably fixed in newer glib / # gobject headers. See bug 1243331 comment 3. CXXFLAGS += [ - '-Wno-error=unused-function', - '-Wno-error=shadow', - '-Wno-unused-local-typedefs', + "-Wno-error=unused-function", + "-Wno-error=shadow", + "-Wno-unused-local-typedefs", ] diff --git a/accessible/base/moz.build b/accessible/base/moz.build index e577a4af13183e..6eb2d96b04a4a8 100644 --- a/accessible/base/moz.build +++ b/accessible/base/moz.build @@ -4,117 +4,114 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXPORTS += [ - 'AccEvent.h', - 'nsAccessibilityService.h' -] +EXPORTS += ["AccEvent.h", "nsAccessibilityService.h"] EXPORTS.mozilla.a11y += [ - 'AccTypes.h', - 'DocManager.h', - 'FocusManager.h', - 'IDSet.h', - 'Platform.h', - 'RelationType.h', - 'Role.h', - 'SelectionManager.h', - 'States.h', + "AccTypes.h", + "DocManager.h", + "FocusManager.h", + "IDSet.h", + "Platform.h", + "RelationType.h", + "Role.h", + "SelectionManager.h", + "States.h", ] -if CONFIG['MOZ_DEBUG']: +if CONFIG["MOZ_DEBUG"]: EXPORTS.mozilla.a11y += [ - 'Logging.h', + "Logging.h", ] UNIFIED_SOURCES += [ - 'AccessibleOrProxy.cpp', - 'AccEvent.cpp', - 'AccGroupInfo.cpp', - 'AccIterator.cpp', - 'ARIAMap.cpp', - 'ARIAStateMap.cpp', - 'Asserts.cpp', - 'DocManager.cpp', - 'EmbeddedObjCollector.cpp', - 'EventQueue.cpp', - 'EventTree.cpp', - 'Filters.cpp', - 'FocusManager.cpp', - 'NotificationController.cpp', - 'nsAccessibilityService.cpp', - 'nsAccessiblePivot.cpp', - 'nsAccUtils.cpp', - 'nsCoreUtils.cpp', - 'nsEventShell.cpp', - 'nsTextEquivUtils.cpp', - 'Pivot.cpp', - 'SelectionManager.cpp', - 'StyleInfo.cpp', - 'TextAttrs.cpp', - 'TextRange.cpp', - 'TextUpdater.cpp', - 'TreeWalker.cpp', + "AccessibleOrProxy.cpp", + "AccEvent.cpp", + "AccGroupInfo.cpp", + "AccIterator.cpp", + "ARIAMap.cpp", + "ARIAStateMap.cpp", + "Asserts.cpp", + "DocManager.cpp", + "EmbeddedObjCollector.cpp", + "EventQueue.cpp", + "EventTree.cpp", + "Filters.cpp", + "FocusManager.cpp", + "NotificationController.cpp", + "nsAccessibilityService.cpp", + "nsAccessiblePivot.cpp", + "nsAccUtils.cpp", + "nsCoreUtils.cpp", + "nsEventShell.cpp", + "nsTextEquivUtils.cpp", + "Pivot.cpp", + "SelectionManager.cpp", + "StyleInfo.cpp", + "TextAttrs.cpp", + "TextRange.cpp", + "TextUpdater.cpp", + "TreeWalker.cpp", ] -if CONFIG['A11Y_LOG']: +if CONFIG["A11Y_LOG"]: UNIFIED_SOURCES += [ - 'Logging.cpp', + "Logging.cpp", ] LOCAL_INCLUDES += [ - '/accessible/generic', - '/accessible/html', - '/accessible/ipc', - '/dom/base', - '/dom/xul', + "/accessible/generic", + "/accessible/html", + "/accessible/ipc", + "/dom/base", + "/dom/xul", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": LOCAL_INCLUDES += [ - '/accessible/ipc/win', + "/accessible/ipc/win", ] else: LOCAL_INCLUDES += [ - '/accessible/ipc/other', + "/accessible/ipc/other", ] LOCAL_INCLUDES += [ - '/accessible/xpcom', - '/accessible/xul', - '/dom/base', - '/ipc/chromium/src', - '/layout/generic', - '/layout/style', - '/layout/xul', - '/layout/xul/tree/', + "/accessible/xpcom", + "/accessible/xul", + "/dom/base", + "/ipc/chromium/src", + "/layout/generic", + "/layout/style", + "/layout/xul", + "/layout/xul/tree/", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] - CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': + CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/accessible/windows/ia2', - '/accessible/windows/msaa', + "/accessible/windows/ia2", + "/accessible/windows/msaa", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/accessible/generic/moz.build b/accessible/generic/moz.build index 7c0899dc409de5..8612b7d6f66104 100644 --- a/accessible/generic/moz.build +++ b/accessible/generic/moz.build @@ -5,73 +5,73 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.a11y += [ - 'Accessible.h', - 'DocAccessible.h', - 'HyperTextAccessible.h', - 'OuterDocAccessible.h', + "Accessible.h", + "DocAccessible.h", + "HyperTextAccessible.h", + "OuterDocAccessible.h", ] UNIFIED_SOURCES += [ - 'Accessible.cpp', - 'ApplicationAccessible.cpp', - 'ARIAGridAccessible.cpp', - 'BaseAccessibles.cpp', - 'DocAccessible.cpp', - 'FormControlAccessible.cpp', - 'HyperTextAccessible.cpp', - 'ImageAccessible.cpp', - 'OuterDocAccessible.cpp', - 'RootAccessible.cpp', - 'TableAccessible.cpp', - 'TableCellAccessible.cpp', - 'TextLeafAccessible.cpp', + "Accessible.cpp", + "ApplicationAccessible.cpp", + "ARIAGridAccessible.cpp", + "BaseAccessibles.cpp", + "DocAccessible.cpp", + "FormControlAccessible.cpp", + "HyperTextAccessible.cpp", + "ImageAccessible.cpp", + "OuterDocAccessible.cpp", + "RootAccessible.cpp", + "TableAccessible.cpp", + "TableCellAccessible.cpp", + "TextLeafAccessible.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/html', - '/accessible/xpcom', - '/accessible/xul', - '/dom/base', - '/dom/xul', - '/layout/generic', - '/layout/xul', + "/accessible/base", + "/accessible/html", + "/accessible/xpcom", + "/accessible/xul", + "/dom/base", + "/dom/xul", + "/layout/generic", + "/layout/xul", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": LOCAL_INCLUDES += [ - '/accessible/ipc/win', + "/accessible/ipc/win", ] else: LOCAL_INCLUDES += [ - '/accessible/ipc/other', + "/accessible/ipc/other", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/accessible/windows/ia2', - '/accessible/windows/msaa', + "/accessible/windows/ia2", + "/accessible/windows/msaa", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/accessible/html/moz.build b/accessible/html/moz.build index 3f778b11a6a027..5b74f6741fb6b6 100644 --- a/accessible/html/moz.build +++ b/accessible/html/moz.build @@ -5,51 +5,51 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'HTMLCanvasAccessible.cpp', - 'HTMLElementAccessibles.cpp', - 'HTMLFormControlAccessible.cpp', - 'HTMLImageMapAccessible.cpp', - 'HTMLLinkAccessible.cpp', - 'HTMLListAccessible.cpp', - 'HTMLSelectAccessible.cpp', - 'HTMLTableAccessible.cpp', + "HTMLCanvasAccessible.cpp", + "HTMLElementAccessibles.cpp", + "HTMLFormControlAccessible.cpp", + "HTMLImageMapAccessible.cpp", + "HTMLLinkAccessible.cpp", + "HTMLListAccessible.cpp", + "HTMLSelectAccessible.cpp", + "HTMLTableAccessible.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/xpcom', - '/layout/forms', - '/layout/generic', - '/layout/tables', - '/layout/xul', + "/accessible/base", + "/accessible/generic", + "/accessible/xpcom", + "/layout/forms", + "/layout/generic", + "/layout/tables", + "/layout/xul", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/accessible/windows/ia2', - '/accessible/windows/msaa', + "/accessible/windows/ia2", + "/accessible/windows/msaa", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/accessible/interfaces/gecko/moz.build b/accessible/interfaces/gecko/moz.build index ea953ab41d8763..bc0ecb3ef2209f 100644 --- a/accessible/interfaces/gecko/moz.build +++ b/accessible/interfaces/gecko/moz.build @@ -4,30 +4,36 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeneratedFile('IGeckoCustom.h', 'IGeckoCustom_p.c', 'IGeckoCustom_i.c', - 'IGeckoCustom_dlldata.c', 'IGeckoCustom.tlb', - inputs=['IGeckoCustom.idl'], - script='/build/midl.py', entry_point='midl', - flags=['-dlldata', OBJDIR + '/IGeckoCustom_dlldata.c']) +GeneratedFile( + "IGeckoCustom.h", + "IGeckoCustom_p.c", + "IGeckoCustom_i.c", + "IGeckoCustom_dlldata.c", + "IGeckoCustom.tlb", + inputs=["IGeckoCustom.idl"], + script="/build/midl.py", + entry_point="midl", + flags=["-dlldata", OBJDIR + "/IGeckoCustom_dlldata.c"], +) SOURCES += [ - '!IGeckoCustom_dlldata.c', - '!IGeckoCustom_i.c', - '!IGeckoCustom_p.c', + "!IGeckoCustom_dlldata.c", + "!IGeckoCustom_i.c", + "!IGeckoCustom_p.c", ] EXPORTS += [ - '!IGeckoCustom.h', - '!IGeckoCustom_i.c', + "!IGeckoCustom.h", + "!IGeckoCustom_i.c", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # Suppress warnings from the MIDL generated code. -if CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["CC_TYPE"] == "clang-cl": CFLAGS += [ - '-Wno-extern-initializer', - '-Wno-incompatible-pointer-types', - '-Wno-missing-braces', - '-Wno-unused-const-variable', + "-Wno-extern-initializer", + "-Wno-incompatible-pointer-types", + "-Wno-missing-braces", + "-Wno-unused-const-variable", ] diff --git a/accessible/interfaces/ia2/moz.build b/accessible/interfaces/ia2/moz.build index 00ad4add0cb205..e3861a2fa2c1eb 100644 --- a/accessible/interfaces/ia2/moz.build +++ b/accessible/interfaces/ia2/moz.build @@ -4,94 +4,128 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeckoSharedLibrary('IA2Marshal', linkage=None) +GeckoSharedLibrary("IA2Marshal", linkage=None) -DEFINES['REGISTER_PROXY_DLL'] = True +DEFINES["REGISTER_PROXY_DLL"] = True -DEFFILE = 'IA2Marshal.def' +DEFFILE = "IA2Marshal.def" OS_LIBS += [ - 'uuid', - 'kernel32', - 'rpcrt4', - 'ole32', - 'oleaut32', + "uuid", + "kernel32", + "rpcrt4", + "ole32", + "oleaut32", ] midl_enums = [ - 'AccessibleEventId', - 'AccessibleRole', - 'AccessibleStates', - 'IA2CommonTypes', + "AccessibleEventId", + "AccessibleRole", + "AccessibleStates", + "IA2CommonTypes", ] midl_interfaces = [ - 'Accessible2', - 'Accessible2_2', - 'Accessible2_3', - 'AccessibleAction', - 'AccessibleApplication', - 'AccessibleComponent', - 'AccessibleDocument', - 'AccessibleEditableText', - 'AccessibleHyperlink', - 'AccessibleHypertext', - 'AccessibleHypertext2', - 'AccessibleImage', - 'AccessibleRelation', - 'AccessibleTable', - 'AccessibleTable2', - 'AccessibleTableCell', - 'AccessibleText', - 'AccessibleText2', - 'AccessibleValue', + "Accessible2", + "Accessible2_2", + "Accessible2_3", + "AccessibleAction", + "AccessibleApplication", + "AccessibleComponent", + "AccessibleDocument", + "AccessibleEditableText", + "AccessibleHyperlink", + "AccessibleHypertext", + "AccessibleHypertext2", + "AccessibleImage", + "AccessibleRelation", + "AccessibleTable", + "AccessibleTable2", + "AccessibleTableCell", + "AccessibleText", + "AccessibleText2", + "AccessibleValue", ] for enum in midl_enums: - GeneratedFile(enum + '.h', inputs=['/other-licenses/ia2/' + enum + '.idl'], - script='/build/midl.py', entry_point='midl', - flags=['-app_config', '-I', TOPSRCDIR + '/other-licenses/ia2']) + GeneratedFile( + enum + ".h", + inputs=["/other-licenses/ia2/" + enum + ".idl"], + script="/build/midl.py", + entry_point="midl", + flags=["-app_config", "-I", TOPSRCDIR + "/other-licenses/ia2"], + ) - EXPORTS += ['!' + enum + '.h'] + EXPORTS += ["!" + enum + ".h"] for iface in midl_interfaces: - GeneratedFile(iface + '.h', iface + '_p.c', iface + '_i.c', iface + '_dlldata.c', - inputs=['/other-licenses/ia2/' + iface + '.idl'], - script='/build/midl.py', entry_point='midl', - flags=['-app_config', '-I', TOPSRCDIR + '/other-licenses/ia2', - '-dlldata', OBJDIR + '/' + iface + '_dlldata.c']) - - EXPORTS += ['!' + iface + '.h', '!' + iface + '_i.c'] - - for p in [iface + '_p.c', iface + '_i.c']: - SOURCES += ['!%s' % p] + GeneratedFile( + iface + ".h", + iface + "_p.c", + iface + "_i.c", + iface + "_dlldata.c", + inputs=["/other-licenses/ia2/" + iface + ".idl"], + script="/build/midl.py", + entry_point="midl", + flags=[ + "-app_config", + "-I", + TOPSRCDIR + "/other-licenses/ia2", + "-dlldata", + OBJDIR + "/" + iface + "_dlldata.c", + ], + ) + + EXPORTS += ["!" + iface + ".h", "!" + iface + "_i.c"] + + for p in [iface + "_p.c", iface + "_i.c"]: + SOURCES += ["!%s" % p] # Give some symbols a unique name in each translation unit, to avoid # collisions caused by https://llvm.org/pr41817. - if CONFIG['CC_TYPE'] == 'clang-cl': - SOURCES['!%s' % p].flags += ['-DObject_StubDesc=Object_StubDesc__%s' % p[:-2]] - SOURCES['!%s' % p].flags += ['-DUserMarshalRoutines=UserMarshalRoutines__%s' % p[:-2]] + if CONFIG["CC_TYPE"] == "clang-cl": + SOURCES["!%s" % p].flags += [ + "-DObject_StubDesc=Object_StubDesc__%s" % p[:-2] + ] + SOURCES["!%s" % p].flags += [ + "-DUserMarshalRoutines=UserMarshalRoutines__%s" % p[:-2] + ] # Warning: the build system doesn't know about the dependency of IA2Marshal.rc on # IA2Typelib.tlb. We rely on the IA2Typelib.h output forcing the command to run # during export, before rc files are treated during compile. -GeneratedFile('IA2Typelib.h', 'IA2Typelib_i.c', 'IA2Typelib.tlb', - inputs=['IA2Typelib.idl'], script='/build/midl.py', entry_point='midl', - flags=['-app_config', '-I', TOPSRCDIR + '/other-licenses/ia2', - '-D', '_MIDL_DECLARE_WIREM_HANDLE']) - -GeneratedFile('dlldata.c', inputs=['!' + iface + '_dlldata.c' for iface in midl_interfaces], - script='/build/midl.py', entry_point='merge_dlldata') - -SOURCES += ['!dlldata.c'] - -RCINCLUDE = 'IA2Marshal.rc' +GeneratedFile( + "IA2Typelib.h", + "IA2Typelib_i.c", + "IA2Typelib.tlb", + inputs=["IA2Typelib.idl"], + script="/build/midl.py", + entry_point="midl", + flags=[ + "-app_config", + "-I", + TOPSRCDIR + "/other-licenses/ia2", + "-D", + "_MIDL_DECLARE_WIREM_HANDLE", + ], +) + +GeneratedFile( + "dlldata.c", + inputs=["!" + iface + "_dlldata.c" for iface in midl_interfaces], + script="/build/midl.py", + entry_point="merge_dlldata", +) + +SOURCES += ["!dlldata.c"] + +RCINCLUDE = "IA2Marshal.rc" # Suppress warnings from the MIDL generated code. -if CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["CC_TYPE"] == "clang-cl": CFLAGS += [ - '-Wno-extern-initializer', - '-Wno-incompatible-pointer-types', - '-Wno-missing-braces', - '-Wno-unused-const-variable', + "-Wno-extern-initializer", + "-Wno-incompatible-pointer-types", + "-Wno-missing-braces", + "-Wno-unused-const-variable", ] diff --git a/accessible/interfaces/moz.build b/accessible/interfaces/moz.build index 093b7d9eeb4230..c5b6be6347d3cc 100644 --- a/accessible/interfaces/moz.build +++ b/accessible/interfaces/moz.build @@ -4,42 +4,42 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows' and CONFIG['COMPILE_ENVIRONMENT']: - DIRS += ['gecko', 'msaa', 'ia2'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows" and CONFIG["COMPILE_ENVIRONMENT"]: + DIRS += ["gecko", "msaa", "ia2"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - XPIDL_SOURCES += ['nsIAccessibleMacInterface.idl'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + XPIDL_SOURCES += ["nsIAccessibleMacInterface.idl"] XPIDL_SOURCES += [ - 'nsIAccessibilityService.idl', - 'nsIAccessible.idl', - 'nsIAccessibleAnnouncementEvent.idl', - 'nsIAccessibleApplication.idl', - 'nsIAccessibleCaretMoveEvent.idl', - 'nsIAccessibleDocument.idl', - 'nsIAccessibleEditableText.idl', - 'nsIAccessibleEvent.idl', - 'nsIAccessibleHideEvent.idl', - 'nsIAccessibleHyperLink.idl', - 'nsIAccessibleHyperText.idl', - 'nsIAccessibleImage.idl', - 'nsIAccessibleObjectAttributeChangedEvent.idl', - 'nsIAccessiblePivot.idl', - 'nsIAccessibleRelation.idl', - 'nsIAccessibleRole.idl', - 'nsIAccessibleScrollingEvent.idl', - 'nsIAccessibleSelectable.idl', - 'nsIAccessibleStateChangeEvent.idl', - 'nsIAccessibleStates.idl', - 'nsIAccessibleTable.idl', - 'nsIAccessibleTableChangeEvent.idl', - 'nsIAccessibleText.idl', - 'nsIAccessibleTextChangeEvent.idl', - 'nsIAccessibleTextRange.idl', - 'nsIAccessibleTextSelectionChangeEvent.idl', - 'nsIAccessibleTypes.idl', - 'nsIAccessibleValue.idl', - 'nsIAccessibleVirtualCursorChangeEvent.idl', + "nsIAccessibilityService.idl", + "nsIAccessible.idl", + "nsIAccessibleAnnouncementEvent.idl", + "nsIAccessibleApplication.idl", + "nsIAccessibleCaretMoveEvent.idl", + "nsIAccessibleDocument.idl", + "nsIAccessibleEditableText.idl", + "nsIAccessibleEvent.idl", + "nsIAccessibleHideEvent.idl", + "nsIAccessibleHyperLink.idl", + "nsIAccessibleHyperText.idl", + "nsIAccessibleImage.idl", + "nsIAccessibleObjectAttributeChangedEvent.idl", + "nsIAccessiblePivot.idl", + "nsIAccessibleRelation.idl", + "nsIAccessibleRole.idl", + "nsIAccessibleScrollingEvent.idl", + "nsIAccessibleSelectable.idl", + "nsIAccessibleStateChangeEvent.idl", + "nsIAccessibleStates.idl", + "nsIAccessibleTable.idl", + "nsIAccessibleTableChangeEvent.idl", + "nsIAccessibleText.idl", + "nsIAccessibleTextChangeEvent.idl", + "nsIAccessibleTextRange.idl", + "nsIAccessibleTextSelectionChangeEvent.idl", + "nsIAccessibleTypes.idl", + "nsIAccessibleValue.idl", + "nsIAccessibleVirtualCursorChangeEvent.idl", ] -XPIDL_MODULE = 'accessibility' +XPIDL_MODULE = "accessibility" diff --git a/accessible/interfaces/msaa/moz.build b/accessible/interfaces/msaa/moz.build index 72468dd00c5d5c..7a1053317a37e5 100644 --- a/accessible/interfaces/msaa/moz.build +++ b/accessible/interfaces/msaa/moz.build @@ -4,48 +4,54 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeckoSharedLibrary('AccessibleMarshal', linkage=None) +GeckoSharedLibrary("AccessibleMarshal", linkage=None) # Missing here, is the notion that changes to the idl files included by # ISimpleDOM.idl (e.g. ISimpleDOMNode.idl) should rebuild the outputs. -GeneratedFile('ISimpleDOM.h', 'ISimpleDOM_p.c', 'ISimpleDOM_i.c', - 'ISimpleDOM_dlldata.c', 'ISimpleDOM.tlb', - inputs=['ISimpleDOM.idl'], - script='/build/midl.py', entry_point='midl', - flags=['-I', SRCDIR, '-robust', '-dlldata', OBJDIR + '/ISimpleDOM_dlldata.c']) +GeneratedFile( + "ISimpleDOM.h", + "ISimpleDOM_p.c", + "ISimpleDOM_i.c", + "ISimpleDOM_dlldata.c", + "ISimpleDOM.tlb", + inputs=["ISimpleDOM.idl"], + script="/build/midl.py", + entry_point="midl", + flags=["-I", SRCDIR, "-robust", "-dlldata", OBJDIR + "/ISimpleDOM_dlldata.c"], +) SOURCES += [ - '!ISimpleDOM_dlldata.c', - '!ISimpleDOM_i.c', - '!ISimpleDOM_p.c', - 'AccessibleMarshalThunk.c', + "!ISimpleDOM_dlldata.c", + "!ISimpleDOM_i.c", + "!ISimpleDOM_p.c", + "AccessibleMarshalThunk.c", ] EXPORTS += [ - '!ISimpleDOM.h', - '!ISimpleDOM_i.c', + "!ISimpleDOM.h", + "!ISimpleDOM_i.c", ] -DEFINES['REGISTER_PROXY_DLL'] = True +DEFINES["REGISTER_PROXY_DLL"] = True # The following line is required to preserve compatibility with older versions # of AccessibleMarshal.dll. -DEFINES['PROXY_CLSID'] = 'IID_ISimpleDOMNode' +DEFINES["PROXY_CLSID"] = "IID_ISimpleDOMNode" -DEFFILE = 'AccessibleMarshal.def' +DEFFILE = "AccessibleMarshal.def" OS_LIBS += [ - 'kernel32', - 'rpcrt4', - 'oleaut32', + "kernel32", + "rpcrt4", + "oleaut32", ] -RCINCLUDE = 'AccessibleMarshal.rc' +RCINCLUDE = "AccessibleMarshal.rc" # Suppress warnings from the MIDL generated code. -if CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["CC_TYPE"] == "clang-cl": CFLAGS += [ - '-Wno-extern-initializer', - '-Wno-incompatible-pointer-types', - '-Wno-missing-braces', - '-Wno-unused-const-variable', + "-Wno-extern-initializer", + "-Wno-incompatible-pointer-types", + "-Wno-missing-braces", + "-Wno-unused-const-variable", ] diff --git a/accessible/ipc/extension/android/moz.build b/accessible/ipc/extension/android/moz.build index ad4cbd6889a38e..50871e326bce72 100644 --- a/accessible/ipc/extension/android/moz.build +++ b/accessible/ipc/extension/android/moz.build @@ -4,26 +4,26 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['ACCESSIBILITY']: - IPDL_SOURCES += ['PDocAccessiblePlatformExt.ipdl'] +if CONFIG["ACCESSIBILITY"]: + IPDL_SOURCES += ["PDocAccessiblePlatformExt.ipdl"] EXPORTS.mozilla.a11y += [ - 'DocAccessiblePlatformExtChild.h', - 'DocAccessiblePlatformExtParent.h', + "DocAccessiblePlatformExtChild.h", + "DocAccessiblePlatformExtParent.h", ] SOURCES += [ - 'DocAccessiblePlatformExtChild.cpp', - 'DocAccessiblePlatformExtParent.cpp', + "DocAccessiblePlatformExtChild.cpp", + "DocAccessiblePlatformExtParent.cpp", ] LOCAL_INCLUDES += [ - '/accessible/android', - '/accessible/generic', - '/accessible/ipc/other', - '/widget/android', + "/accessible/android", + "/accessible/generic", + "/accessible/ipc/other", + "/widget/android", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/ipc/extension/mac/moz.build b/accessible/ipc/extension/mac/moz.build index b3b75e12474f14..e7a7bf3bf8de6f 100644 --- a/accessible/ipc/extension/mac/moz.build +++ b/accessible/ipc/extension/mac/moz.build @@ -4,25 +4,25 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['ACCESSIBILITY']: - IPDL_SOURCES += ['PDocAccessiblePlatformExt.ipdl'] +if CONFIG["ACCESSIBILITY"]: + IPDL_SOURCES += ["PDocAccessiblePlatformExt.ipdl"] EXPORTS.mozilla.a11y += [ - 'DocAccessiblePlatformExtChild.h', - 'DocAccessiblePlatformExtParent.h', + "DocAccessiblePlatformExtChild.h", + "DocAccessiblePlatformExtParent.h", ] SOURCES += [ - 'DocAccessiblePlatformExtChild.cpp', + "DocAccessiblePlatformExtChild.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/ipc/other', - '/accessible/mac', + "/accessible/base", + "/accessible/generic", + "/accessible/ipc/other", + "/accessible/mac", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/ipc/extension/moz.build b/accessible/ipc/extension/moz.build index c532dfc83be829..3c82fd90b400b2 100644 --- a/accessible/ipc/extension/moz.build +++ b/accessible/ipc/extension/moz.build @@ -4,11 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] -if toolkit == 'android': - DIRS += ['android'] -elif toolkit == 'cocoa': - DIRS += ['mac'] +if toolkit == "android": + DIRS += ["android"] +elif toolkit == "cocoa": + DIRS += ["mac"] else: - DIRS += ['other'] + DIRS += ["other"] diff --git a/accessible/ipc/extension/other/moz.build b/accessible/ipc/extension/other/moz.build index 1a4030e4f45cf8..dbbc98702c4ab5 100644 --- a/accessible/ipc/extension/other/moz.build +++ b/accessible/ipc/extension/other/moz.build @@ -4,14 +4,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['ACCESSIBILITY']: - IPDL_SOURCES += ['PDocAccessiblePlatformExt.ipdl'] +if CONFIG["ACCESSIBILITY"]: + IPDL_SOURCES += ["PDocAccessiblePlatformExt.ipdl"] EXPORTS.mozilla.a11y += [ - 'DocAccessiblePlatformExtChild.h', - 'DocAccessiblePlatformExtParent.h', + "DocAccessiblePlatformExtChild.h", + "DocAccessiblePlatformExtParent.h", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/ipc/moz.build b/accessible/ipc/moz.build index 57d7310050264e..b50625e40ee007 100644 --- a/accessible/ipc/moz.build +++ b/accessible/ipc/moz.build @@ -4,65 +4,65 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['OS_ARCH'] == 'WINNT': - DIRS += ['win'] +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["win"] LOCAL_INCLUDES += [ - '/accessible/ipc/win', - '/accessible/windows/ia2', - '/accessible/windows/msaa', + "/accessible/ipc/win", + "/accessible/windows/ia2", + "/accessible/windows/msaa", ] else: - DIRS += ['other', 'extension'] + DIRS += ["other", "extension"] LOCAL_INCLUDES += [ - '/accessible/ipc/other', + "/accessible/ipc/other", ] - if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': + if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] - elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': + elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] - elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': + elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] EXPORTS.mozilla.a11y += [ - 'IPCTypes.h', + "IPCTypes.h", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['ACCESSIBILITY']: +if CONFIG["ACCESSIBILITY"]: EXPORTS.mozilla.a11y += [ - 'DocAccessibleChildBase.h', - 'DocAccessibleParent.h', - 'ProxyAccessibleBase.h', - 'ProxyAccessibleShared.h', + "DocAccessibleChildBase.h", + "DocAccessibleParent.h", + "ProxyAccessibleBase.h", + "ProxyAccessibleShared.h", ] UNIFIED_SOURCES += [ - 'DocAccessibleChildBase.cpp', - 'DocAccessibleParent.cpp', - 'ProxyAccessibleBase.cpp', + "DocAccessibleChildBase.cpp", + "DocAccessibleParent.cpp", + "ProxyAccessibleBase.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/xpcom', + "/accessible/base", + "/accessible/generic", + "/accessible/xpcom", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/accessible/ipc/other/moz.build b/accessible/ipc/other/moz.build index 129b9dffee3d63..db386c6eb3e5e2 100644 --- a/accessible/ipc/other/moz.build +++ b/accessible/ipc/other/moz.build @@ -5,48 +5,48 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['ACCESSIBILITY']: - IPDL_SOURCES += ['PDocAccessible.ipdl'] +if CONFIG["ACCESSIBILITY"]: + IPDL_SOURCES += ["PDocAccessible.ipdl"] EXPORTS.mozilla.a11y += [ - 'DocAccessibleChild.h', - 'ProxyAccessible.h', + "DocAccessibleChild.h", + "ProxyAccessible.h", ] SOURCES += [ - 'DocAccessibleChild.cpp', - 'ProxyAccessible.cpp', + "DocAccessibleChild.cpp", + "ProxyAccessible.cpp", ] LOCAL_INCLUDES += [ - '../../base', - '../../generic', - '../../xpcom', + "../../base", + "../../generic", + "../../xpcom", ] - if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': + if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] - elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': + elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] - elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': + elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/ipc/win/handler/moz.build b/accessible/ipc/win/handler/moz.build index 4e13b363ca6a12..1c57026012dd81 100644 --- a/accessible/ipc/win/handler/moz.build +++ b/accessible/ipc/win/handler/moz.build @@ -4,16 +4,16 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SharedLibrary('AccessibleHandler') +SharedLibrary("AccessibleHandler") EXPORTS.mozilla.a11y += [ - 'AccessibleHandler.h', - 'HandlerDataCleanup.h', + "AccessibleHandler.h", + "HandlerDataCleanup.h", ] LOCAL_INCLUDES += [ - '/accessible/interfaces/ia2', - '/ipc/mscom/oop', + "/accessible/interfaces/ia2", + "/ipc/mscom/oop", ] # We want to generate distinct UUIDs on a per-channel basis, so we need @@ -21,79 +21,103 @@ LOCAL_INCLUDES += [ # These defines allow us to separate local builds from automated builds, # as well as separate beta from release. flags = [] -if CONFIG['MOZ_UPDATE_CHANNEL'] == 'default': - flags += ['-DUSE_LOCAL_UUID'] -elif CONFIG['MOZ_UPDATE_CHANNEL'] == 'beta': - flags += ['-DUSE_BETA_UUID'] - -GeneratedFile('HandlerData.h', 'HandlerData_p.c', 'HandlerData_i.c', 'HandlerData_c.c', - 'HandlerData_dlldata.c', 'HandlerData.tlb', - inputs=['HandlerData.idl'], - script='/build/midl.py', entry_point='midl', - flags=flags + ['-I', TOPOBJDIR, '-I', TOPOBJDIR + '/dist/include', - '-I', TOPSRCDIR + '/other-licenses/ia2', '-I', SRCDIR, - '-acf', SRCDIR + '/HandlerData.acf', - '-dlldata', OBJDIR + '/HandlerData_dlldata.c']) +if CONFIG["MOZ_UPDATE_CHANNEL"] == "default": + flags += ["-DUSE_LOCAL_UUID"] +elif CONFIG["MOZ_UPDATE_CHANNEL"] == "beta": + flags += ["-DUSE_BETA_UUID"] + +GeneratedFile( + "HandlerData.h", + "HandlerData_p.c", + "HandlerData_i.c", + "HandlerData_c.c", + "HandlerData_dlldata.c", + "HandlerData.tlb", + inputs=["HandlerData.idl"], + script="/build/midl.py", + entry_point="midl", + flags=flags + + [ + "-I", + TOPOBJDIR, + "-I", + TOPOBJDIR + "/dist/include", + "-I", + TOPSRCDIR + "/other-licenses/ia2", + "-I", + SRCDIR, + "-acf", + SRCDIR + "/HandlerData.acf", + "-dlldata", + OBJDIR + "/HandlerData_dlldata.c", + ], +) SOURCES += [ - '!HandlerData_c.c', - '!HandlerData_dlldata.c', - '!HandlerData_i.c', - '!HandlerData_p.c', - 'AccessibleHandler.cpp', - 'AccessibleHandlerControl.cpp', - 'HandlerChildEnumerator.cpp', - 'HandlerRelation.cpp', - 'HandlerTextLeaf.cpp', + "!HandlerData_c.c", + "!HandlerData_dlldata.c", + "!HandlerData_i.c", + "!HandlerData_p.c", + "AccessibleHandler.cpp", + "AccessibleHandlerControl.cpp", + "HandlerChildEnumerator.cpp", + "HandlerRelation.cpp", + "HandlerTextLeaf.cpp", ] EXPORTS += [ - '!HandlerData.h', - '!HandlerData_i.c', + "!HandlerData.h", + "!HandlerData_i.c", ] # Give some symbols a unique name in each translation unit, to avoid # collisions caused by https://llvm.org/pr41817. -if CONFIG['CC_TYPE'] == 'clang-cl': - SOURCES['!HandlerData_p.c'].flags += ['-DHandlerData__MIDL_ProcFormatString=HandlerData__MIDL_ProcFormatString__HandlerData_p'] - SOURCES['!HandlerData_p.c'].flags += ['-DHandlerData__MIDL_TypeFormatString=HandlerData__MIDL_TypeFormatString__HandlerData_p'] - for p in ('dlldata', 'c', 'i', 'p'): - SOURCES['!HandlerData_%s.c' % p].flags += ['-DUserMarshalRoutines=UserMarshalRoutines__HandlerData_%s' % p] +if CONFIG["CC_TYPE"] == "clang-cl": + SOURCES["!HandlerData_p.c"].flags += [ + "-DHandlerData__MIDL_ProcFormatString=HandlerData__MIDL_ProcFormatString__HandlerData_p" + ] + SOURCES["!HandlerData_p.c"].flags += [ + "-DHandlerData__MIDL_TypeFormatString=HandlerData__MIDL_TypeFormatString__HandlerData_p" + ] + for p in ("dlldata", "c", "i", "p"): + SOURCES["!HandlerData_%s.c" % p].flags += [ + "-DUserMarshalRoutines=UserMarshalRoutines__HandlerData_%s" % p + ] -DEFFILE = 'AccessibleHandler.def' +DEFFILE = "AccessibleHandler.def" USE_LIBS += [ - 'mscom_oop', + "mscom_oop", ] OS_LIBS += [ - 'rpcrt4', - 'oleacc', + "rpcrt4", + "oleacc", ] -RCINCLUDE = 'AccessibleHandler.rc' +RCINCLUDE = "AccessibleHandler.rc" # Suppress warnings from the MIDL generated code. -if CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["CC_TYPE"] == "clang-cl": CFLAGS += [ - '-Wno-extern-initializer', - '-Wno-incompatible-pointer-types', - '-Wno-missing-braces', - '-Wno-unused-const-variable', + "-Wno-extern-initializer", + "-Wno-incompatible-pointer-types", + "-Wno-missing-braces", + "-Wno-unused-const-variable", ] # Since we are defining our own COM entry points (DllRegisterServer et al), # but we still want to be able to delegate some work to the generated code, # we add the prefix "Proxy" to all of the generated counterparts. -DEFINES['ENTRY_PREFIX'] = 'Proxy' -DEFINES['REGISTER_PROXY_DLL'] = True -LIBRARY_DEFINES['MOZ_MSCOM_REMARSHAL_NO_HANDLER'] = True +DEFINES["ENTRY_PREFIX"] = "Proxy" +DEFINES["REGISTER_PROXY_DLL"] = True +LIBRARY_DEFINES["MOZ_MSCOM_REMARSHAL_NO_HANDLER"] = True # This DLL may be loaded into other processes, so we need static libs for # Windows 7 and Windows 8. USE_STATIC_LIBS = True -LIBRARY_DEFINES['UNICODE'] = True -LIBRARY_DEFINES['_UNICODE'] = True -LIBRARY_DEFINES['MOZ_NO_MOZALLOC'] = True +LIBRARY_DEFINES["UNICODE"] = True +LIBRARY_DEFINES["_UNICODE"] = True +LIBRARY_DEFINES["MOZ_NO_MOZALLOC"] = True DisableStlWrapping() diff --git a/accessible/ipc/win/moz.build b/accessible/ipc/win/moz.build index acc640b7292c90..ab6b3beb7836e7 100644 --- a/accessible/ipc/win/moz.build +++ b/accessible/ipc/win/moz.build @@ -4,67 +4,69 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['COMPILE_ENVIRONMENT'] and CONFIG['ACCESSIBILITY']: +if CONFIG["COMPILE_ENVIRONMENT"] and CONFIG["ACCESSIBILITY"]: DIRS += [ - 'handler', - 'typelib', + "handler", + "typelib", ] -if CONFIG['ACCESSIBILITY']: - IPDL_SOURCES += ['PDocAccessible.ipdl'] +if CONFIG["ACCESSIBILITY"]: + IPDL_SOURCES += ["PDocAccessible.ipdl"] - if not CONFIG['HAVE_64BIT_BUILD']: + if not CONFIG["HAVE_64BIT_BUILD"]: EXPORTS += [ - 'IAccessible32.manifest', + "IAccessible32.manifest", ] EXPORTS += [ - 'IAccessible64.manifest', + "IAccessible64.manifest", ] EXPORTS.mozilla.a11y += [ - 'COMPtrTypes.h', - 'DocAccessibleChild.h', - 'HandlerProvider.h', - 'PlatformChild.h', - 'ProxyAccessible.h' + "COMPtrTypes.h", + "DocAccessibleChild.h", + "HandlerProvider.h", + "PlatformChild.h", + "ProxyAccessible.h", ] SOURCES += [ - '!./handler/HandlerData_c.c', - 'COMPtrTypes.cpp', - 'DocAccessibleChild.cpp', - 'HandlerProvider.cpp', - 'PlatformChild.cpp', - 'ProxyAccessible.cpp', + "!./handler/HandlerData_c.c", + "COMPtrTypes.cpp", + "DocAccessibleChild.cpp", + "HandlerProvider.cpp", + "PlatformChild.cpp", + "ProxyAccessible.cpp", ] # Give some symbols a unique name in each translation unit, to avoid # collisions caused by https://llvm.org/pr41817. - if CONFIG['CC_TYPE'] == 'clang-cl': - SOURCES['!./handler/HandlerData_c.c'].flags += ['-DUserMarshalRoutines=UserMarshalRoutines__HandlerData_c'] + if CONFIG["CC_TYPE"] == "clang-cl": + SOURCES["!./handler/HandlerData_c.c"].flags += [ + "-DUserMarshalRoutines=UserMarshalRoutines__HandlerData_c" + ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/windows/ia2', - '/accessible/windows/msaa', - '/accessible/xpcom', + "/accessible/base", + "/accessible/generic", + "/accessible/windows/ia2", + "/accessible/windows/msaa", + "/accessible/xpcom", ] # Suppress warnings from the MIDL generated code. - if CONFIG['CC_TYPE'] == 'clang-cl': + if CONFIG["CC_TYPE"] == "clang-cl": CFLAGS += [ - '-Wno-extern-initializer', - '-Wno-incompatible-pointer-types', - '-Wno-missing-braces', - '-Wno-unused-const-variable', + "-Wno-extern-initializer", + "-Wno-incompatible-pointer-types", + "-Wno-missing-braces", + "-Wno-unused-const-variable", ] CXXFLAGS += [ - '-Wno-missing-braces', + "-Wno-missing-braces", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/ipc/win/typelib/moz.build b/accessible/ipc/win/typelib/moz.build index 5d5496b81e7558..2992deda1a5b35 100644 --- a/accessible/ipc/win/typelib/moz.build +++ b/accessible/ipc/win/typelib/moz.build @@ -5,9 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. FINAL_TARGET_FILES += [ - '!Accessible.tlb', + "!Accessible.tlb", ] -GeneratedFile('Accessible.tlb', - inputs=['Accessible.idl'], - script='/build/midl.py', entry_point='midl') +GeneratedFile( + "Accessible.tlb", + inputs=["Accessible.idl"], + script="/build/midl.py", + entry_point="midl", +) diff --git a/accessible/mac/SelectorMapGen.py b/accessible/mac/SelectorMapGen.py index 65ca1e3a15a6cf..5864120fbc4167 100755 --- a/accessible/mac/SelectorMapGen.py +++ b/accessible/mac/SelectorMapGen.py @@ -28,8 +28,9 @@ def gen_mm(fd, protocol_file): fd.write("#import \n\n") fd.write("namespace mozilla {\nnamespace a11y {\nnamespace mac {\n\n") - sections = re.findall(r"#pragma mark - (\w+)\n(.*?)(?=(?:#pragma mark|@end))", - protocol, re.DOTALL) + sections = re.findall( + r"#pragma mark - (\w+)\n(.*?)(?=(?:#pragma mark|@end))", protocol, re.DOTALL + ) for name, text in sections: write_map(fd, name, text) @@ -38,8 +39,9 @@ def gen_mm(fd, protocol_file): def gen_h(fd, protocol_file): protocol = open(protocol_file).read() - sections = re.findall(r"#pragma mark - (\w+)\n(.*?)(?=(?:#pragma mark|@end))", - protocol, re.DOTALL) + sections = re.findall( + r"#pragma mark - (\w+)\n(.*?)(?=(?:#pragma mark|@end))", protocol, re.DOTALL + ) fd.write("/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n") fd.write("#ifndef _MacSelectorMap_H_\n") @@ -55,6 +57,7 @@ def gen_h(fd, protocol_file): # For debugging if __name__ == "__main__": import sys + gen_mm(sys.stdout, "accessible/mac/MOXAccessibleProtocol.h") gen_h(sys.stdout, "accessible/mac/MOXAccessibleProtocol.h") diff --git a/accessible/mac/moz.build b/accessible/mac/moz.build index 8f39d28b98f51b..7fafafee198679 100644 --- a/accessible/mac/moz.build +++ b/accessible/mac/moz.build @@ -5,63 +5,69 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'mozAccessibleProtocol.h', + "mozAccessibleProtocol.h", ] EXPORTS.mozilla.a11y += [ - 'AccessibleWrap.h', - 'HyperTextAccessibleWrap.h', - 'RangeTypes.h', + "AccessibleWrap.h", + "HyperTextAccessibleWrap.h", + "RangeTypes.h", ] UNIFIED_SOURCES += [ - 'AccessibleWrap.mm', - 'DocAccessibleWrap.mm', - 'GeckoTextMarker.mm', - 'HyperTextAccessibleWrap.mm', - 'MacUtils.mm', - 'MOXAccessibleBase.mm', - 'MOXLandmarkAccessibles.mm', - 'MOXMathAccessibles.mm', - 'MOXSearchInfo.mm', - 'MOXTextMarkerDelegate.mm', - 'MOXWebAreaAccessible.mm', - 'mozAccessible.mm', - 'mozActionElements.mm', - 'mozHTMLAccessible.mm', - 'mozRootAccessible.mm', - 'mozSelectableElements.mm', - 'mozTableAccessible.mm', - 'mozTextAccessible.mm', - 'Platform.mm', - 'RootAccessibleWrap.mm', - 'RotorRules.mm', + "AccessibleWrap.mm", + "DocAccessibleWrap.mm", + "GeckoTextMarker.mm", + "HyperTextAccessibleWrap.mm", + "MacUtils.mm", + "MOXAccessibleBase.mm", + "MOXLandmarkAccessibles.mm", + "MOXMathAccessibles.mm", + "MOXSearchInfo.mm", + "MOXTextMarkerDelegate.mm", + "MOXWebAreaAccessible.mm", + "mozAccessible.mm", + "mozActionElements.mm", + "mozHTMLAccessible.mm", + "mozRootAccessible.mm", + "mozSelectableElements.mm", + "mozTableAccessible.mm", + "mozTextAccessible.mm", + "Platform.mm", + "RootAccessibleWrap.mm", + "RotorRules.mm", ] SOURCES += [ - '!MacSelectorMap.mm', + "!MacSelectorMap.mm", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/ipc', - '/accessible/ipc/other', - '/accessible/xul', - '/layout/generic', - '/layout/xul', - '/widget', - '/widget/cocoa', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/ipc", + "/accessible/ipc/other", + "/accessible/xul", + "/layout/generic", + "/layout/xul", + "/widget", + "/widget/cocoa", ] -GeneratedFile('MacSelectorMap.h', - script='/accessible/mac/SelectorMapGen.py', entry_point='gen_h', - inputs=['MOXAccessibleProtocol.h']) -GeneratedFile('MacSelectorMap.mm', - script='/accessible/mac/SelectorMapGen.py', entry_point='gen_mm', - inputs=['MOXAccessibleProtocol.h']) +GeneratedFile( + "MacSelectorMap.h", + script="/accessible/mac/SelectorMapGen.py", + entry_point="gen_h", + inputs=["MOXAccessibleProtocol.h"], +) +GeneratedFile( + "MacSelectorMap.mm", + script="/accessible/mac/SelectorMapGen.py", + entry_point="gen_mm", + inputs=["MOXAccessibleProtocol.h"], +) -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/accessible/moz.build b/accessible/moz.build index 2fc823a6781f87..f0ddf25cb39612 100644 --- a/accessible/moz.build +++ b/accessible/moz.build @@ -4,46 +4,39 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] -if toolkit == 'gtk': - DIRS += ['atk'] -elif toolkit == 'windows': - DIRS += ['windows'] -elif toolkit == 'cocoa': - DIRS += ['mac'] -elif toolkit == 'android': - DIRS += ['android'] +if toolkit == "gtk": + DIRS += ["atk"] +elif toolkit == "windows": + DIRS += ["windows"] +elif toolkit == "cocoa": + DIRS += ["mac"] +elif toolkit == "android": + DIRS += ["android"] else: - DIRS += ['other'] + DIRS += ["other"] -DIRS += [ 'aom', - 'base', - 'generic', - 'html', - 'interfaces', - 'ipc', - 'xpcom' -] +DIRS += ["aom", "base", "generic", "html", "interfaces", "ipc", "xpcom"] -if CONFIG['MOZ_XUL']: - DIRS += ['xul'] +if CONFIG["MOZ_XUL"]: + DIRS += ["xul"] -TEST_DIRS += ['tests/mochitest'] +TEST_DIRS += ["tests/mochitest"] BROWSER_CHROME_MANIFESTS += [ - 'tests/browser/bounds/browser.ini', - 'tests/browser/browser.ini', - 'tests/browser/e10s/browser.ini', - 'tests/browser/events/browser.ini', - 'tests/browser/fission/browser.ini', - 'tests/browser/general/browser.ini', - 'tests/browser/hittest/browser.ini', - 'tests/browser/mac/browser.ini', - 'tests/browser/scroll/browser.ini', - 'tests/browser/states/browser.ini', - 'tests/browser/telemetry/browser.ini', - 'tests/browser/tree/browser.ini' + "tests/browser/bounds/browser.ini", + "tests/browser/browser.ini", + "tests/browser/e10s/browser.ini", + "tests/browser/events/browser.ini", + "tests/browser/fission/browser.ini", + "tests/browser/general/browser.ini", + "tests/browser/hittest/browser.ini", + "tests/browser/mac/browser.ini", + "tests/browser/scroll/browser.ini", + "tests/browser/states/browser.ini", + "tests/browser/telemetry/browser.ini", + "tests/browser/tree/browser.ini", ] with Files("**"): diff --git a/accessible/other/moz.build b/accessible/other/moz.build index 4887a4abb69701..1e4fc8cabc3bfe 100644 --- a/accessible/other/moz.build +++ b/accessible/other/moz.build @@ -5,23 +5,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.a11y += [ - 'AccessibleWrap.h', - 'HyperTextAccessibleWrap.h', + "AccessibleWrap.h", + "HyperTextAccessibleWrap.h", ] SOURCES += [ - 'AccessibleWrap.cpp', - 'Platform.cpp', + "AccessibleWrap.cpp", + "Platform.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/xul', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/xul", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/accessible/tests/mochitest/moz.build b/accessible/tests/mochitest/moz.build index ad92136301ffde..d3efffb62b9272 100644 --- a/accessible/tests/mochitest/moz.build +++ b/accessible/tests/mochitest/moz.build @@ -5,33 +5,33 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. A11Y_MANIFESTS += [ - 'a11y.ini', - 'actions/a11y.ini', - 'aom/a11y.ini', - 'attributes/a11y.ini', - 'bounds/a11y.ini', - 'editabletext/a11y.ini', - 'elm/a11y.ini', - 'events/a11y.ini', - 'events/docload/a11y.ini', - 'focus/a11y.ini', - 'hittest/a11y.ini', - 'hyperlink/a11y.ini', - 'hypertext/a11y.ini', - 'name/a11y.ini', - 'pivot/a11y.ini', - 'relations/a11y.ini', - 'role/a11y.ini', - 'scroll/a11y.ini', - 'selectable/a11y.ini', - 'states/a11y.ini', - 'table/a11y.ini', - 'text/a11y.ini', - 'textattrs/a11y.ini', - 'textcaret/a11y.ini', - 'textrange/a11y.ini', - 'textselection/a11y.ini', - 'tree/a11y.ini', - 'treeupdate/a11y.ini', - 'value/a11y.ini', + "a11y.ini", + "actions/a11y.ini", + "aom/a11y.ini", + "attributes/a11y.ini", + "bounds/a11y.ini", + "editabletext/a11y.ini", + "elm/a11y.ini", + "events/a11y.ini", + "events/docload/a11y.ini", + "focus/a11y.ini", + "hittest/a11y.ini", + "hyperlink/a11y.ini", + "hypertext/a11y.ini", + "name/a11y.ini", + "pivot/a11y.ini", + "relations/a11y.ini", + "role/a11y.ini", + "scroll/a11y.ini", + "selectable/a11y.ini", + "states/a11y.ini", + "table/a11y.ini", + "text/a11y.ini", + "textattrs/a11y.ini", + "textcaret/a11y.ini", + "textrange/a11y.ini", + "textselection/a11y.ini", + "tree/a11y.ini", + "treeupdate/a11y.ini", + "value/a11y.ini", ] diff --git a/accessible/windows/ia2/moz.build b/accessible/windows/ia2/moz.build index ed8096178fbe59..7357fe48a87102 100644 --- a/accessible/windows/ia2/moz.build +++ b/accessible/windows/ia2/moz.build @@ -5,54 +5,54 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'ia2Accessible.h', - 'ia2AccessibleAction.h', - 'ia2AccessibleComponent.h', - 'ia2AccessibleEditableText.h', - 'ia2AccessibleHyperlink.h', - 'ia2AccessibleHypertext.h', - 'ia2AccessibleText.h', - 'ia2AccessibleValue.h', + "ia2Accessible.h", + "ia2AccessibleAction.h", + "ia2AccessibleComponent.h", + "ia2AccessibleEditableText.h", + "ia2AccessibleHyperlink.h", + "ia2AccessibleHypertext.h", + "ia2AccessibleText.h", + "ia2AccessibleValue.h", ] UNIFIED_SOURCES += [ - 'ia2Accessible.cpp', - 'ia2AccessibleAction.cpp', - 'ia2AccessibleComponent.cpp', - 'ia2AccessibleEditableText.cpp', - 'ia2AccessibleHyperlink.cpp', - 'ia2AccessibleHypertext.cpp', - 'ia2AccessibleImage.cpp', - 'ia2AccessibleRelation.cpp', - 'ia2AccessibleText.cpp', - 'ia2AccessibleValue.cpp', + "ia2Accessible.cpp", + "ia2AccessibleAction.cpp", + "ia2AccessibleComponent.cpp", + "ia2AccessibleEditableText.cpp", + "ia2AccessibleHyperlink.cpp", + "ia2AccessibleHypertext.cpp", + "ia2AccessibleImage.cpp", + "ia2AccessibleRelation.cpp", + "ia2AccessibleText.cpp", + "ia2AccessibleValue.cpp", ] # These files cannot be built in unified mode because they both include # AccessibleTable2_i.c. SOURCES += [ - 'ia2AccessibleTable.cpp', - 'ia2AccessibleTableCell.cpp', + "ia2AccessibleTable.cpp", + "ia2AccessibleTableCell.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/windows', - '/accessible/windows/msaa', - '/accessible/xpcom', - '/accessible/xul', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/windows", + "/accessible/windows/msaa", + "/accessible/xpcom", + "/accessible/xul", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # The Windows MIDL code generator creates things like: # # #endif !_MIDL_USE_GUIDDEF_ # # which clang-cl complains about. MSVC doesn't, so turn this warning off. -if CONFIG['CC_TYPE'] == 'clang-cl': - CXXFLAGS += ['-Wno-extra-tokens'] +if CONFIG["CC_TYPE"] == "clang-cl": + CXXFLAGS += ["-Wno-extra-tokens"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/accessible/windows/moz.build b/accessible/windows/moz.build index 4a62560a43c3cb..c5c2f9ff2fed7d 100644 --- a/accessible/windows/moz.build +++ b/accessible/windows/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['msaa', 'ia2', 'sdn', 'uia'] +DIRS += ["msaa", "ia2", "sdn", "uia"] EXPORTS.mozilla.a11y += [ - 'ProxyWrappers.h', + "ProxyWrappers.h", ] diff --git a/accessible/windows/msaa/moz.build b/accessible/windows/msaa/moz.build index 9b91e0f6cd1b13..5b8163f2fd9407 100644 --- a/accessible/windows/msaa/moz.build +++ b/accessible/windows/msaa/moz.build @@ -5,71 +5,71 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'IUnknownImpl.h', + "IUnknownImpl.h", ] EXPORTS.mozilla.a11y += [ - 'AccessibleWrap.h', - 'Compatibility.h', - 'HyperTextAccessibleWrap.h', - 'LazyInstantiator.h', - 'MsaaIdGenerator.h', - 'nsWinUtils.h', + "AccessibleWrap.h", + "Compatibility.h", + "HyperTextAccessibleWrap.h", + "LazyInstantiator.h", + "MsaaIdGenerator.h", + "nsWinUtils.h", ] UNIFIED_SOURCES += [ - 'AccessibleWrap.cpp', - 'ApplicationAccessibleWrap.cpp', - 'ARIAGridAccessibleWrap.cpp', - 'Compatibility.cpp', - 'CompatibilityUIA.cpp', - 'DocAccessibleWrap.cpp', - 'EnumVariant.cpp', - 'GeckoCustom.cpp', - 'HTMLTableAccessibleWrap.cpp', - 'HTMLWin32ObjectAccessible.cpp', - 'HyperTextAccessibleWrap.cpp', - 'ImageAccessibleWrap.cpp', - 'IUnknownImpl.cpp', - 'MsaaIdGenerator.cpp', - 'nsWinUtils.cpp', - 'Platform.cpp', - 'RootAccessibleWrap.cpp', - 'TextLeafAccessibleWrap.cpp', + "AccessibleWrap.cpp", + "ApplicationAccessibleWrap.cpp", + "ARIAGridAccessibleWrap.cpp", + "Compatibility.cpp", + "CompatibilityUIA.cpp", + "DocAccessibleWrap.cpp", + "EnumVariant.cpp", + "GeckoCustom.cpp", + "HTMLTableAccessibleWrap.cpp", + "HTMLWin32ObjectAccessible.cpp", + "HyperTextAccessibleWrap.cpp", + "ImageAccessibleWrap.cpp", + "IUnknownImpl.cpp", + "MsaaIdGenerator.cpp", + "nsWinUtils.cpp", + "Platform.cpp", + "RootAccessibleWrap.cpp", + "TextLeafAccessibleWrap.cpp", ] SOURCES += [ # This file cannot be built in unified mode because it redefines _WIN32_WINNT - 'LazyInstantiator.cpp', + "LazyInstantiator.cpp", # This file cannot be built in unified mode because it includes ISimpleDOMNode_i.c. - 'ServiceProvider.cpp', + "ServiceProvider.cpp", ] OS_LIBS += [ - 'ntdll', + "ntdll", ] -if CONFIG['MOZ_XUL']: +if CONFIG["MOZ_XUL"]: UNIFIED_SOURCES += [ - 'XULListboxAccessibleWrap.cpp', - 'XULMenuAccessibleWrap.cpp', - 'XULTreeGridAccessibleWrap.cpp', + "XULListboxAccessibleWrap.cpp", + "XULMenuAccessibleWrap.cpp", + "XULTreeGridAccessibleWrap.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/ipc', - '/accessible/ipc/win', - '/accessible/windows', - '/accessible/windows/ia2', - '/accessible/windows/sdn', - '/accessible/windows/uia', - '/accessible/xpcom', - '/accessible/xul', - '/dom/base', - '/layout/style', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/ipc", + "/accessible/ipc/win", + "/accessible/windows", + "/accessible/windows/ia2", + "/accessible/windows/sdn", + "/accessible/windows/uia", + "/accessible/xpcom", + "/accessible/xul", + "/dom/base", + "/layout/style", ] # The Windows MIDL code generator creates things like: @@ -77,9 +77,9 @@ LOCAL_INCLUDES += [ # #endif !_MIDL_USE_GUIDDEF_ # # which clang-cl complains about. MSVC doesn't, so turn this warning off. -if CONFIG['CC_TYPE'] == 'clang-cl': - CXXFLAGS += ['-Wno-extra-tokens'] +if CONFIG["CC_TYPE"] == "clang-cl": + CXXFLAGS += ["-Wno-extra-tokens"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/windows/sdn/moz.build b/accessible/windows/sdn/moz.build index ae0d5e20dcb0c5..8d76b7482ef355 100644 --- a/accessible/windows/sdn/moz.build +++ b/accessible/windows/sdn/moz.build @@ -5,20 +5,20 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'sdnAccessible.cpp', - 'sdnDocAccessible.cpp', - 'sdnTextAccessible.cpp', + "sdnAccessible.cpp", + "sdnDocAccessible.cpp", + "sdnTextAccessible.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/windows/msaa', - '/accessible/xpcom', - '/accessible/xul', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/windows/msaa", + "/accessible/xpcom", + "/accessible/xul", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/windows/uia/moz.build b/accessible/windows/uia/moz.build index afc150e11989db..058aacc579efa2 100644 --- a/accessible/windows/uia/moz.build +++ b/accessible/windows/uia/moz.build @@ -5,18 +5,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'uiaRawElmProvider.cpp', + "uiaRawElmProvider.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/windows/msaa', - '/accessible/xpcom', - '/accessible/xul', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/windows/msaa", + "/accessible/xpcom", + "/accessible/xul", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/accessible/xpcom/AccEventGen.py b/accessible/xpcom/AccEventGen.py index 633063031dc1cb..ef253bd2a91491 100755 --- a/accessible/xpcom/AccEventGen.py +++ b/accessible/xpcom/AccEventGen.py @@ -14,8 +14,13 @@ # Load the webidl configuration file. glbl = {} -exec(open(mozpath.join(buildconfig.topsrcdir, 'dom', 'bindings', 'Bindings.conf')).read(), glbl) -webidlconfig = glbl['DOMInterfaces'] +exec( + open( + mozpath.join(buildconfig.topsrcdir, "dom", "bindings", "Bindings.conf") + ).read(), + glbl, +) +webidlconfig = glbl["DOMInterfaces"] # Instantiate the parser. p = xpidl.IDLParser() @@ -26,13 +31,14 @@ def findIDL(includePath, interfaceFileName): path = mozpath.join(d, interfaceFileName) if os.path.exists(path): return path - raise BaseException("No IDL file found for interface %s " - "in include path %r" - % (interfaceFileName, includePath)) + raise BaseException( + "No IDL file found for interface %s " + "in include path %r" % (interfaceFileName, includePath) + ) def loadEventIDL(parser, includePath, eventname): - eventidl = ("nsIAccessible%s.idl" % eventname) + eventidl = "nsIAccessible%s.idl" % eventname idlFile = findIDL(includePath, eventidl) idl = p.parse(open(idlFile).read(), idlFile) idl.resolve(includePath, p, webidlconfig) @@ -43,7 +49,7 @@ class Configuration: def __init__(self, filename): config = {} exec(open(filename).read(), config) - self.simple_events = config.get('simple_events', []) + self.simple_events = config.get("simple_events", []) def firstCap(str): @@ -51,26 +57,28 @@ def firstCap(str): def writeAttributeParams(a): - return ("%s a%s" % (a.realtype.nativeType('in'), firstCap(a.name))) + return "%s a%s" % (a.realtype.nativeType("in"), firstCap(a.name)) def print_header_file(fd, conf, incdirs): idl_paths = set() fd.write("/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n") - fd.write("#ifndef _mozilla_a11y_generated_AccEvents_h_\n" - "#define _mozilla_a11y_generated_AccEvents_h_\n\n") - fd.write("#include \"nscore.h\"\n") - fd.write("#include \"nsCOMPtr.h\"\n") - fd.write("#include \"nsCycleCollectionParticipant.h\"\n") - fd.write("#include \"nsString.h\"\n") + fd.write( + "#ifndef _mozilla_a11y_generated_AccEvents_h_\n" + "#define _mozilla_a11y_generated_AccEvents_h_\n\n" + ) + fd.write('#include "nscore.h"\n') + fd.write('#include "nsCOMPtr.h"\n') + fd.write('#include "nsCycleCollectionParticipant.h"\n') + fd.write('#include "nsString.h"\n') for e in conf.simple_events: - fd.write("#include \"nsIAccessible%s.h\"\n" % e) + fd.write('#include "nsIAccessible%s.h"\n' % e) for e in conf.simple_events: idl, idl_path = loadEventIDL(p, incdirs, e) idl_paths.add(idl_path) for iface in filter(lambda p: p.kind == "interface", idl.productions): - classname = ("xpcAcc%s" % e) + classname = "xpcAcc%s" % e baseinterfaces = interfaces(iface) fd.write("\nclass %s final : public %s\n" % (classname, iface.name)) @@ -114,7 +122,7 @@ def interfaceAttributeTypes(idl): def print_cpp(idl, fd, conf, eventname): for p in idl.productions: - if p.kind == 'interface': + if p.kind == "interface": write_cpp(eventname, p, fd) @@ -135,7 +143,7 @@ def print_cpp_file(fd, conf, incdirs): types.extend(interfaceAttributeTypes(idl)) for c in types: - fd.write("#include \"%s.h\"\n" % c) + fd.write('#include "%s.h"\n' % c) fd.write("\n") for e in conf.simple_events: @@ -147,37 +155,40 @@ def print_cpp_file(fd, conf, incdirs): def attributeVariableTypeAndName(a): - if a.realtype.nativeType('in').endswith('*'): - l = ["nsCOMPtr<%s> m%s;" % (a.realtype.nativeType('in').strip('* '), - firstCap(a.name))] - elif a.realtype.nativeType('in').count("nsAString"): + if a.realtype.nativeType("in").endswith("*"): + l = [ + "nsCOMPtr<%s> m%s;" + % (a.realtype.nativeType("in").strip("* "), firstCap(a.name)) + ] + elif a.realtype.nativeType("in").count("nsAString"): l = ["nsString m%s;" % firstCap(a.name)] - elif a.realtype.nativeType('in').count("nsACString"): + elif a.realtype.nativeType("in").count("nsACString"): l = ["nsCString m%s;" % firstCap(a.name)] else: - l = ["%sm%s;" % (a.realtype.nativeType('in'), - firstCap(a.name))] + l = ["%sm%s;" % (a.realtype.nativeType("in"), firstCap(a.name))] return ", ".join(l) def writeAttributeGetter(fd, classname, a): fd.write("NS_IMETHODIMP\n") fd.write("%s::Get%s(" % (classname, firstCap(a.name))) - if a.realtype.nativeType('in').endswith('*'): - fd.write("%s** a%s" % (a.realtype.nativeType('in').strip('* '), firstCap(a.name))) - elif a.realtype.nativeType('in').count("nsAString"): + if a.realtype.nativeType("in").endswith("*"): + fd.write( + "%s** a%s" % (a.realtype.nativeType("in").strip("* "), firstCap(a.name)) + ) + elif a.realtype.nativeType("in").count("nsAString"): fd.write("nsAString& a%s" % firstCap(a.name)) - elif a.realtype.nativeType('in').count("nsACString"): + elif a.realtype.nativeType("in").count("nsACString"): fd.write("nsACString& a%s" % firstCap(a.name)) else: - fd.write("%s*a%s" % (a.realtype.nativeType('in'), firstCap(a.name))) + fd.write("%s*a%s" % (a.realtype.nativeType("in"), firstCap(a.name))) fd.write(")\n") fd.write("{\n") - if a.realtype.nativeType('in').endswith('*'): + if a.realtype.nativeType("in").endswith("*"): fd.write(" NS_IF_ADDREF(*a%s = m%s);\n" % (firstCap(a.name), firstCap(a.name))) - elif a.realtype.nativeType('in').count("nsAString"): + elif a.realtype.nativeType("in").count("nsAString"): fd.write(" a%s = m%s;\n" % (firstCap(a.name), firstCap(a.name))) - elif a.realtype.nativeType('in').count("nsACString"): + elif a.realtype.nativeType("in").count("nsACString"): fd.write(" a%s = m%s;\n" % (firstCap(a.name), firstCap(a.name))) else: fd.write(" *a%s = m%s;\n" % (firstCap(a.name), firstCap(a.name))) @@ -207,7 +218,9 @@ def allAttributes(iface): def write_cpp(eventname, iface, fd): classname = "xpcAcc%s" % eventname attributes = allAttributes(iface) - ccattributes = filter(lambda m: m.realtype.nativeType('in').endswith('*'), attributes) + ccattributes = filter( + lambda m: m.realtype.nativeType("in").endswith("*"), attributes + ) fd.write("NS_IMPL_CYCLE_COLLECTION(%s" % classname) for c in ccattributes: fd.write(", m%s" % firstCap(c.name)) @@ -228,8 +241,8 @@ def write_cpp(eventname, iface, fd): def get_conf(conf_file): conf = Configuration(conf_file) inc_dir = [ - mozpath.join(buildconfig.topsrcdir, 'accessible', 'interfaces'), - mozpath.join(buildconfig.topsrcdir, 'xpcom', 'base'), + mozpath.join(buildconfig.topsrcdir, "accessible", "interfaces"), + mozpath.join(buildconfig.topsrcdir, "xpcom", "base"), ] return conf, inc_dir @@ -238,6 +251,8 @@ def gen_files(fd, conf_file): deps = set() conf, inc_dir = get_conf(conf_file) deps.update(print_header_file(fd, conf, inc_dir)) - with open(os.path.join(os.path.dirname(fd.name), 'xpcAccEvents.cpp'), 'w') as cpp_fd: + with open( + os.path.join(os.path.dirname(fd.name), "xpcAccEvents.cpp"), "w" + ) as cpp_fd: deps.update(print_cpp_file(cpp_fd, conf, inc_dir)) return deps diff --git a/accessible/xpcom/moz.build b/accessible/xpcom/moz.build index 97d906eb72a24d..f45f3abcb2307c 100644 --- a/accessible/xpcom/moz.build +++ b/accessible/xpcom/moz.build @@ -5,76 +5,77 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'nsAccessibleRelation.cpp', - 'xpcAccessibilityService.cpp', - 'xpcAccessible.cpp', - 'xpcAccessibleApplication.cpp', - 'xpcAccessibleDocument.cpp', - 'xpcAccessibleGeneric.cpp', - 'xpcAccessibleHyperLink.cpp', - 'xpcAccessibleHyperText.cpp', - 'xpcAccessibleImage.cpp', - 'xpcAccessibleSelectable.cpp', - 'xpcAccessibleTable.cpp', - 'xpcAccessibleTableCell.cpp', - 'xpcAccessibleTextRange.cpp', - 'xpcAccessibleValue.cpp', + "nsAccessibleRelation.cpp", + "xpcAccessibilityService.cpp", + "xpcAccessible.cpp", + "xpcAccessibleApplication.cpp", + "xpcAccessibleDocument.cpp", + "xpcAccessibleGeneric.cpp", + "xpcAccessibleHyperLink.cpp", + "xpcAccessibleHyperText.cpp", + "xpcAccessibleImage.cpp", + "xpcAccessibleSelectable.cpp", + "xpcAccessibleTable.cpp", + "xpcAccessibleTableCell.cpp", + "xpcAccessibleTextRange.cpp", + "xpcAccessibleValue.cpp", ] SOURCES += [ - '!xpcAccEvents.cpp', + "!xpcAccEvents.cpp", ] EXPORTS += [ - '!xpcAccEvents.h', - 'xpcAccessibilityService.h', + "!xpcAccEvents.h", + "xpcAccessibilityService.h", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', + "/accessible/base", + "/accessible/generic", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/accessible/windows/msaa', + "/accessible/windows/msaa", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/ipc', - '/accessible/ipc/other', - '/accessible/mac', - ] - UNIFIED_SOURCES += [ - 'xpcAccessibleMacInterface.mm' + "/accessible/ipc", + "/accessible/ipc/other", + "/accessible/mac", ] + UNIFIED_SOURCES += ["xpcAccessibleMacInterface.mm"] EXPORTS += [ - 'xpcAccessibleMacInterface.h', + "xpcAccessibleMacInterface.h", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] GeneratedFile( - 'xpcAccEvents.h', 'xpcAccEvents.cpp', - script='AccEventGen.py', entry_point='gen_files', + "xpcAccEvents.h", + "xpcAccEvents.cpp", + script="AccEventGen.py", + entry_point="gen_files", inputs=[ - 'AccEvents.conf', - ]) + "AccEvents.conf", + ], +) -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/accessible/xul/moz.build b/accessible/xul/moz.build index aa816eef0513f6..72a55cccea083b 100644 --- a/accessible/xul/moz.build +++ b/accessible/xul/moz.build @@ -5,55 +5,55 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'XULAlertAccessible.cpp', - 'XULComboboxAccessible.cpp', - 'XULElementAccessibles.cpp', - 'XULFormControlAccessible.cpp', - 'XULListboxAccessible.cpp', - 'XULMenuAccessible.cpp', - 'XULSelectControlAccessible.cpp', - 'XULTabAccessible.cpp', - 'XULTreeAccessible.cpp', - 'XULTreeGridAccessible.cpp', + "XULAlertAccessible.cpp", + "XULComboboxAccessible.cpp", + "XULElementAccessibles.cpp", + "XULFormControlAccessible.cpp", + "XULListboxAccessible.cpp", + "XULMenuAccessible.cpp", + "XULSelectControlAccessible.cpp", + "XULTabAccessible.cpp", + "XULTreeAccessible.cpp", + "XULTreeGridAccessible.cpp", ] LOCAL_INCLUDES += [ - '/accessible/base', - '/accessible/generic', - '/accessible/html', - '/accessible/xpcom', - '/dom/base', - '/dom/xul', - '/layout/generic', - '/layout/xul', - '/layout/xul/tree', + "/accessible/base", + "/accessible/generic", + "/accessible/html", + "/accessible/xpcom", + "/dom/base", + "/dom/xul", + "/layout/generic", + "/layout/xul", + "/layout/xul/tree", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": LOCAL_INCLUDES += [ - '/accessible/atk', + "/accessible/atk", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/accessible/windows/ia2', - '/accessible/windows/msaa', + "/accessible/windows/ia2", + "/accessible/windows/msaa", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/accessible/mac', + "/accessible/mac", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/accessible/android', + "/accessible/android", ] else: LOCAL_INCLUDES += [ - '/accessible/other', + "/accessible/other", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/browser/actors/moz.build b/browser/actors/moz.build index f00621df222ebd..1f9f8ef1c032f8 100644 --- a/browser/actors/moz.build +++ b/browser/actors/moz.build @@ -29,61 +29,61 @@ with Files("WebRTCChild.jsm"): BUG_COMPONENT = ("Firefox", "Site Permissions") FINAL_TARGET_FILES.actors += [ - 'AboutNewInstallChild.jsm', - 'AboutNewInstallParent.jsm', - 'AboutNewTabChild.jsm', - 'AboutNewTabParent.jsm', - 'AboutPluginsChild.jsm', - 'AboutPluginsParent.jsm', - 'AboutPrivateBrowsingChild.jsm', - 'AboutPrivateBrowsingParent.jsm', - 'AboutProtectionsChild.jsm', - 'AboutProtectionsParent.jsm', - 'AboutReaderChild.jsm', - 'AboutReaderParent.jsm', - 'AboutTabCrashedChild.jsm', - 'AboutTabCrashedParent.jsm', - 'BlockedSiteChild.jsm', - 'BlockedSiteParent.jsm', - 'BrowserProcessChild.jsm', - 'BrowserTabChild.jsm', - 'BrowserTabParent.jsm', - 'ClickHandlerChild.jsm', - 'ClickHandlerParent.jsm', - 'ContentMetaChild.jsm', - 'ContentMetaParent.jsm', - 'ContentSearchChild.jsm', - 'ContentSearchParent.jsm', - 'ContextMenuChild.jsm', - 'ContextMenuParent.jsm', - 'DecoderDoctorChild.jsm', - 'DecoderDoctorParent.jsm', - 'DOMFullscreenChild.jsm', - 'DOMFullscreenParent.jsm', - 'EncryptedMediaChild.jsm', - 'EncryptedMediaParent.jsm', - 'FormValidationChild.jsm', - 'FormValidationParent.jsm', - 'LightweightThemeChild.jsm', - 'LinkHandlerChild.jsm', - 'LinkHandlerParent.jsm', - 'NetErrorChild.jsm', - 'NetErrorParent.jsm', - 'PageInfoChild.jsm', - 'PageStyleChild.jsm', - 'PageStyleParent.jsm', - 'PluginChild.jsm', - 'PluginParent.jsm', - 'PointerLockChild.jsm', - 'PointerLockParent.jsm', - 'PromptParent.jsm', - 'RefreshBlockerChild.jsm', - 'RefreshBlockerParent.jsm', - 'RFPHelperChild.jsm', - 'RFPHelperParent.jsm', - 'SearchTelemetryChild.jsm', - 'SearchTelemetryParent.jsm', - 'SwitchDocumentDirectionChild.jsm', - 'WebRTCChild.jsm', - 'WebRTCParent.jsm', + "AboutNewInstallChild.jsm", + "AboutNewInstallParent.jsm", + "AboutNewTabChild.jsm", + "AboutNewTabParent.jsm", + "AboutPluginsChild.jsm", + "AboutPluginsParent.jsm", + "AboutPrivateBrowsingChild.jsm", + "AboutPrivateBrowsingParent.jsm", + "AboutProtectionsChild.jsm", + "AboutProtectionsParent.jsm", + "AboutReaderChild.jsm", + "AboutReaderParent.jsm", + "AboutTabCrashedChild.jsm", + "AboutTabCrashedParent.jsm", + "BlockedSiteChild.jsm", + "BlockedSiteParent.jsm", + "BrowserProcessChild.jsm", + "BrowserTabChild.jsm", + "BrowserTabParent.jsm", + "ClickHandlerChild.jsm", + "ClickHandlerParent.jsm", + "ContentMetaChild.jsm", + "ContentMetaParent.jsm", + "ContentSearchChild.jsm", + "ContentSearchParent.jsm", + "ContextMenuChild.jsm", + "ContextMenuParent.jsm", + "DecoderDoctorChild.jsm", + "DecoderDoctorParent.jsm", + "DOMFullscreenChild.jsm", + "DOMFullscreenParent.jsm", + "EncryptedMediaChild.jsm", + "EncryptedMediaParent.jsm", + "FormValidationChild.jsm", + "FormValidationParent.jsm", + "LightweightThemeChild.jsm", + "LinkHandlerChild.jsm", + "LinkHandlerParent.jsm", + "NetErrorChild.jsm", + "NetErrorParent.jsm", + "PageInfoChild.jsm", + "PageStyleChild.jsm", + "PageStyleParent.jsm", + "PluginChild.jsm", + "PluginParent.jsm", + "PointerLockChild.jsm", + "PointerLockParent.jsm", + "PromptParent.jsm", + "RefreshBlockerChild.jsm", + "RefreshBlockerParent.jsm", + "RFPHelperChild.jsm", + "RFPHelperParent.jsm", + "SearchTelemetryChild.jsm", + "SearchTelemetryParent.jsm", + "SwitchDocumentDirectionChild.jsm", + "WebRTCChild.jsm", + "WebRTCParent.jsm", ] diff --git a/browser/app/macbuild/Contents/moz.build b/browser/app/macbuild/Contents/moz.build index daf51c25cbf552..b8b28926de4600 100644 --- a/browser/app/macbuild/Contents/moz.build +++ b/browser/app/macbuild/Contents/moz.build @@ -6,14 +6,20 @@ defs = [] -for s in ('MOZ_GECKODRIVER', 'MOZ_ASAN', 'MOZ_TSAN', 'MOZ_CRASHREPORTER', - 'MOZ_APP_NAME'): +for s in ( + "MOZ_GECKODRIVER", + "MOZ_ASAN", + "MOZ_TSAN", + "MOZ_CRASHREPORTER", + "MOZ_APP_NAME", +): if CONFIG[s]: - defs.append('-D%s=%s' % (s, '1' if CONFIG[s] is True else CONFIG[s])) + defs.append("-D%s=%s" % (s, "1" if CONFIG[s] is True else CONFIG[s])) GeneratedFile( - 'MacOS-files.txt', - script='/python/mozbuild/mozbuild/action/preprocessor.py', - entry_point='generate', - inputs=['MacOS-files.in'], - flags=defs) + "MacOS-files.txt", + script="/python/mozbuild/mozbuild/action/preprocessor.py", + entry_point="generate", + inputs=["MacOS-files.in"], + flags=defs, +) diff --git a/browser/app/macversion.py b/browser/app/macversion.py index f57b5dc85f0db0..3d9aaaa94ac545 100644 --- a/browser/app/macversion.py +++ b/browser/app/macversion.py @@ -10,17 +10,17 @@ import re o = OptionParser() -o.add_option('--buildid', dest='buildid') -o.add_option('--version', dest='version') +o.add_option("--buildid", dest="buildid") +o.add_option("--version", dest="version") (options, args) = o.parse_args() if not options.buildid: - print('--buildid is required', file=sys.stderr) + print("--buildid is required", file=sys.stderr) sys.exit(1) if not options.version: - print('--version is required', file=sys.stderr) + print("--version is required", file=sys.stderr) sys.exit(1) # We want to build a version number that matches the format allowed for @@ -29,18 +29,19 @@ # builds), but also so that newly-built older versions (e.g. beta build) aren't # considered "newer" than previously-built newer versions (e.g. a trunk nightly) -define, MOZ_BUILDID, buildid = io.open( - options.buildid, 'r', encoding='utf-8').read().split() +define, MOZ_BUILDID, buildid = ( + io.open(options.buildid, "r", encoding="utf-8").read().split() +) # extract only the major version (i.e. "14" from "14.0b1") -majorVersion = re.match(r'^(\d+)[^\d].*', options.version).group(1) +majorVersion = re.match(r"^(\d+)[^\d].*", options.version).group(1) # last two digits of the year twodigityear = buildid[2:4] month = buildid[4:6] -if month[0] == '0': +if month[0] == "0": month = month[1] day = buildid[6:8] -if day[0] == '0': +if day[0] == "0": day = day[1] -print('%s.%s.%s' % (majorVersion + twodigityear, month, day)) +print("%s.%s.%s" % (majorVersion + twodigityear, month, day)) diff --git a/browser/app/moz.build b/browser/app/moz.build index d6b25cbd92ab37..9da2da337663a5 100644 --- a/browser/app/moz.build +++ b/browser/app/moz.build @@ -29,90 +29,90 @@ with Files("profile/channel-prefs.js"): with Files("profile/firefox.js"): BUG_COMPONENT = ("Firefox", "General") -if CONFIG['MOZ_MACBUNDLE_NAME']: - DIRS += ['macbuild/Contents'] +if CONFIG["MOZ_MACBUNDLE_NAME"]: + DIRS += ["macbuild/Contents"] -if CONFIG['MOZ_NO_PIE_COMPAT']: - GeckoProgram(CONFIG['MOZ_APP_NAME'] + '-bin') +if CONFIG["MOZ_NO_PIE_COMPAT"]: + GeckoProgram(CONFIG["MOZ_APP_NAME"] + "-bin") - DIRS += ['no-pie'] + DIRS += ["no-pie"] else: - GeckoProgram(CONFIG['MOZ_APP_NAME']) + GeckoProgram(CONFIG["MOZ_APP_NAME"]) SOURCES += [ - 'nsBrowserApp.cpp', + "nsBrowserApp.cpp", ] # Neither channel-prefs.js nor firefox.exe want to end up in dist/bin/browser. DIST_SUBDIR = "" LOCAL_INCLUDES += [ - '!/build', - '/toolkit/xre', - '/xpcom/base', - '/xpcom/build', + "!/build", + "/toolkit/xre", + "/xpcom/base", + "/xpcom/build", ] -if CONFIG['LIBFUZZER']: - USE_LIBS += [ 'fuzzer' ] +if CONFIG["LIBFUZZER"]: + USE_LIBS += ["fuzzer"] LOCAL_INCLUDES += [ - '/tools/fuzzing/libfuzzer', + "/tools/fuzzing/libfuzzer", ] -if CONFIG['ENABLE_GECKODRIVER']: - DEFINES['MOZ_GECKODRIVER'] = True +if CONFIG["ENABLE_GECKODRIVER"]: + DEFINES["MOZ_GECKODRIVER"] = True -if CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["CC_TYPE"] == "clang-cl": # Always enter a Windows program through wmain, whether or not we're # a console application. - WIN32_EXE_LDFLAGS += ['-ENTRY:wmainCRTStartup'] + WIN32_EXE_LDFLAGS += ["-ENTRY:wmainCRTStartup"] -if CONFIG['OS_ARCH'] == 'WINNT': - RCINCLUDE = 'splash.rc' +if CONFIG["OS_ARCH"] == "WINNT": + RCINCLUDE = "splash.rc" DIRS += [ - 'winlauncher', + "winlauncher", ] USE_LIBS += [ - 'winlauncher', + "winlauncher", ] LOCAL_INCLUDES += [ - '/browser/app/winlauncher', + "/browser/app/winlauncher", ] DELAYLOAD_DLLS += [ - 'oleaut32.dll', - 'ole32.dll', - 'rpcrt4.dll', - 'version.dll', + "oleaut32.dll", + "ole32.dll", + "rpcrt4.dll", + "version.dll", ] - if CONFIG['CC_TYPE'] == 'clang-cl': - libpath_flag = '-LIBPATH:' + if CONFIG["CC_TYPE"] == "clang-cl": + libpath_flag = "-LIBPATH:" else: - libpath_flag = '-L' + libpath_flag = "-L" WIN32_EXE_LDFLAGS += [ - libpath_flag + OBJDIR + '/winlauncher/freestanding', + libpath_flag + OBJDIR + "/winlauncher/freestanding", ] -if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_ARCH'] == 'Darwin': +if CONFIG["MOZ_SANDBOX"] and CONFIG["OS_ARCH"] == "Darwin": USE_LIBS += [ - 'mozsandbox', + "mozsandbox", ] -if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["MOZ_SANDBOX"] and CONFIG["OS_ARCH"] == "WINNT": # For sandbox includes and the include dependencies those have LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", ] USE_LIBS += [ - 'sandbox_s', + "sandbox_s", ] DELAYLOAD_DLLS += [ - 'winmm.dll', - 'user32.dll', + "winmm.dll", + "user32.dll", ] # Control the default heap size. @@ -123,23 +123,29 @@ if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_ARCH'] == 'WINNT': # The heap will grow if need be. # # Set it to 256k. See bug 127069. -if CONFIG['OS_ARCH'] == 'WINNT' and CONFIG['CC_TYPE'] not in ('clang', 'gcc'): - LDFLAGS += ['/HEAP:0x40000'] +if CONFIG["OS_ARCH"] == "WINNT" and CONFIG["CC_TYPE"] not in ("clang", "gcc"): + LDFLAGS += ["/HEAP:0x40000"] DisableStlWrapping() -if CONFIG['HAVE_CLOCK_MONOTONIC']: - OS_LIBS += CONFIG['REALTIME_LIBS'] +if CONFIG["HAVE_CLOCK_MONOTONIC"]: + OS_LIBS += CONFIG["REALTIME_LIBS"] -if CONFIG['MOZ_LINUX_32_SSE2_STARTUP_ERROR']: - DEFINES['MOZ_LINUX_32_SSE2_STARTUP_ERROR'] = True - COMPILE_FLAGS['OS_CXXFLAGS'] = [ - f for f in COMPILE_FLAGS.get('OS_CXXFLAGS', []) - if not f.startswith('-march=') and f not in ('-msse', '-msse2', '-mfpmath=sse') +if CONFIG["MOZ_LINUX_32_SSE2_STARTUP_ERROR"]: + DEFINES["MOZ_LINUX_32_SSE2_STARTUP_ERROR"] = True + COMPILE_FLAGS["OS_CXXFLAGS"] = [ + f + for f in COMPILE_FLAGS.get("OS_CXXFLAGS", []) + if not f.startswith("-march=") and f not in ("-msse", "-msse2", "-mfpmath=sse") ] + [ - '-mno-sse', '-mno-sse2', '-mfpmath=387', + "-mno-sse", + "-mno-sse2", + "-mfpmath=387", ] -for icon in ('firefox', 'document', 'newwindow', 'newtab', 'pbmode'): - DEFINES[icon.upper() + '_ICO'] = '"%s/%s/%s.ico"' % ( - TOPSRCDIR, CONFIG['MOZ_BRANDING_DIRECTORY'], icon) +for icon in ("firefox", "document", "newwindow", "newtab", "pbmode"): + DEFINES[icon.upper() + "_ICO"] = '"%s/%s/%s.ico"' % ( + TOPSRCDIR, + CONFIG["MOZ_BRANDING_DIRECTORY"], + icon, + ) diff --git a/browser/app/no-pie/moz.build b/browser/app/no-pie/moz.build index 4c9c884327c300..74aa89409d19ee 100644 --- a/browser/app/no-pie/moz.build +++ b/browser/app/no-pie/moz.build @@ -4,25 +4,21 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Program(CONFIG['MOZ_APP_NAME']) +Program(CONFIG["MOZ_APP_NAME"]) SOURCES += [ - 'NoPie.c', + "NoPie.c", ] # For some reason, LTO messes things up. We don't care anyways. CFLAGS += [ - '-fno-lto', + "-fno-lto", ] # Use OS_LIBS instead of LDFLAGS to "force" the flag to come after -pie # from MOZ_PROGRAM_LDFLAGS. -if CONFIG['CC_TYPE'] == 'clang': +if CONFIG["CC_TYPE"] == "clang": # clang < 5.0 doesn't support -no-pie. - OS_LIBS += [ - '-nopie' - ] + OS_LIBS += ["-nopie"] else: - OS_LIBS += [ - '-no-pie' - ] + OS_LIBS += ["-no-pie"] diff --git a/browser/app/winlauncher/freestanding/gen_ntdll_freestanding_lib.py b/browser/app/winlauncher/freestanding/gen_ntdll_freestanding_lib.py index 6a91f63fbce710..a69261cc3ae309 100644 --- a/browser/app/winlauncher/freestanding/gen_ntdll_freestanding_lib.py +++ b/browser/app/winlauncher/freestanding/gen_ntdll_freestanding_lib.py @@ -20,11 +20,11 @@ def main(output_fd, def_file, llvm_dlltool, *llvm_dlltool_args): try: cmd = [llvm_dlltool] cmd.extend(llvm_dlltool_args) - cmd += ['-d', def_file, '-l', tmp_output] + cmd += ["-d", def_file, "-l", tmp_output] subprocess.check_call(cmd) - with open(tmp_output, 'rb') as tmplib: + with open(tmp_output, "rb") as tmplib: output_fd.write(tmplib.read()) finally: os.remove(tmp_output) diff --git a/browser/app/winlauncher/freestanding/moz.build b/browser/app/winlauncher/freestanding/moz.build index 460f8ec4cb0bbf..5e2dac70ea1ed8 100644 --- a/browser/app/winlauncher/freestanding/moz.build +++ b/browser/app/winlauncher/freestanding/moz.build @@ -4,7 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('winlauncher-freestanding') +Library("winlauncher-freestanding") FORCE_STATIC_LIB = True @@ -14,43 +14,43 @@ FORCE_STATIC_LIB = True NO_PGO = True UNIFIED_SOURCES += [ - 'DllBlocklist.cpp', - 'FunctionTableResolver.cpp', - 'LoaderPrivateAPI.cpp', - 'ModuleLoadFrame.cpp', + "DllBlocklist.cpp", + "FunctionTableResolver.cpp", + "LoaderPrivateAPI.cpp", + "ModuleLoadFrame.cpp", ] # This library must be compiled in a freestanding environment, as its code must # not assume that it has access to any runtime libraries. -if CONFIG['CC_TYPE'] == 'clang-cl': - CXXFLAGS += ['-Xclang'] +if CONFIG["CC_TYPE"] == "clang-cl": + CXXFLAGS += ["-Xclang"] CXXFLAGS += [ - '-ffreestanding', + "-ffreestanding", ] # Forcibly include Freestanding.h into all source files in this library. -if CONFIG['CC_TYPE'] == 'clang-cl': - CXXFLAGS += ['-FI'] +if CONFIG["CC_TYPE"] == "clang-cl": + CXXFLAGS += ["-FI"] else: - CXXFLAGS += ['-include'] + CXXFLAGS += ["-include"] -CXXFLAGS += [ SRCDIR + '/Freestanding.h' ] +CXXFLAGS += [SRCDIR + "/Freestanding.h"] OS_LIBS += [ - 'ntdll', - 'ntdll_freestanding', + "ntdll", + "ntdll_freestanding", ] -if CONFIG['COMPILE_ENVIRONMENT'] and CONFIG['LLVM_DLLTOOL']: +if CONFIG["COMPILE_ENVIRONMENT"] and CONFIG["LLVM_DLLTOOL"]: GeneratedFile( - '%sntdll_freestanding.%s' % (CONFIG['LIB_PREFIX'], - CONFIG['LIB_SUFFIX']), - script='gen_ntdll_freestanding_lib.py', - inputs=['ntdll_freestanding.def'], - flags=[CONFIG['LLVM_DLLTOOL']] + CONFIG['LLVM_DLLTOOL_FLAGS']) + "%sntdll_freestanding.%s" % (CONFIG["LIB_PREFIX"], CONFIG["LIB_SUFFIX"]), + script="gen_ntdll_freestanding_lib.py", + inputs=["ntdll_freestanding.def"], + flags=[CONFIG["LLVM_DLLTOOL"]] + CONFIG["LLVM_DLLTOOL_FLAGS"], + ) DisableStlWrapping() -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Launcher Process') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Launcher Process") diff --git a/browser/app/winlauncher/moz.build b/browser/app/winlauncher/moz.build index 499396b3774b57..585e8835473115 100644 --- a/browser/app/winlauncher/moz.build +++ b/browser/app/winlauncher/moz.build @@ -4,48 +4,48 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('winlauncher') +Library("winlauncher") FORCE_STATIC_LIB = True UNIFIED_SOURCES += [ - '/ipc/mscom/ProcessRuntime.cpp', - '/widget/windows/WindowsConsole.cpp', - 'DllBlocklistInit.cpp', - 'ErrorHandler.cpp', - 'LauncherProcessWin.cpp', - 'LaunchUnelevated.cpp', - 'NtLoaderAPI.cpp', + "/ipc/mscom/ProcessRuntime.cpp", + "/widget/windows/WindowsConsole.cpp", + "DllBlocklistInit.cpp", + "ErrorHandler.cpp", + "LauncherProcessWin.cpp", + "LaunchUnelevated.cpp", + "NtLoaderAPI.cpp", ] OS_LIBS += [ - 'oleaut32', - 'ole32', - 'rpcrt4', - 'version', + "oleaut32", + "ole32", + "rpcrt4", + "version", ] DIRS += [ - 'freestanding', + "freestanding", ] USE_LIBS += [ - 'winlauncher-freestanding', + "winlauncher-freestanding", ] TEST_DIRS += [ - 'test', + "test", ] -if CONFIG['MOZ_LAUNCHER_PROCESS']: +if CONFIG["MOZ_LAUNCHER_PROCESS"]: UNIFIED_SOURCES += [ - '/toolkit/xre/LauncherRegistryInfo.cpp', - '/toolkit/xre/WinTokenUtils.cpp', + "/toolkit/xre/LauncherRegistryInfo.cpp", + "/toolkit/xre/WinTokenUtils.cpp", ] - for var in ('MOZ_APP_BASENAME', 'MOZ_APP_VENDOR'): + for var in ("MOZ_APP_BASENAME", "MOZ_APP_VENDOR"): DEFINES[var] = '"%s"' % CONFIG[var] DisableStlWrapping() -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Launcher Process') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Launcher Process") diff --git a/browser/app/winlauncher/test/moz.build b/browser/app/winlauncher/test/moz.build index e53a113279d675..0416be16c05591 100644 --- a/browser/app/winlauncher/test/moz.build +++ b/browser/app/winlauncher/test/moz.build @@ -8,22 +8,22 @@ DisableStlWrapping() GeckoCppUnitTests( [ - 'TestSafeThreadLocal', - 'TestSameBinary', + "TestSafeThreadLocal", + "TestSameBinary", ], - linkage=None + linkage=None, ) LOCAL_INCLUDES += [ - '/browser/app/winlauncher', + "/browser/app/winlauncher", ] OS_LIBS += [ - 'ntdll', + "ntdll", ] -if CONFIG['CC_TYPE'] in ('gcc', 'clang'): +if CONFIG["CC_TYPE"] in ("gcc", "clang"): # This allows us to use wmain as the entry point on mingw LDFLAGS += [ - '-municode', + "-municode", ] diff --git a/browser/base/gen_aboutneterror_codes.py b/browser/base/gen_aboutneterror_codes.py index 572a6c821bbf7f..a333d206fa155c 100644 --- a/browser/base/gen_aboutneterror_codes.py +++ b/browser/base/gen_aboutneterror_codes.py @@ -11,8 +11,10 @@ def find_error_ids(filename, known_strings): - with open(filename, 'r', encoding="utf-8") as f: - known_strings += [m.id.name for m in parse(f.read()).body if isinstance(m, Message)] + with open(filename, "r", encoding="utf-8") as f: + known_strings += [ + m.id.name for m in parse(f.read()).body if isinstance(m, Message) + ] def main(output, *filenames): @@ -20,11 +22,11 @@ def main(output, *filenames): for filename in filenames: find_error_ids(filename, known_strings) - output.write('const KNOWN_ERROR_MESSAGE_IDS = new Set([\n') + output.write("const KNOWN_ERROR_MESSAGE_IDS = new Set([\n") for known_string in known_strings: output.write(' "{}",\n'.format(known_string)) - output.write(']);\n') + output.write("]);\n") -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.stdout, *sys.argv[1:])) diff --git a/browser/base/moz.build b/browser/base/moz.build index fdfa93d4623a82..393ec02bf30a6f 100644 --- a/browser/base/moz.build +++ b/browser/base/moz.build @@ -7,80 +7,80 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "General") -SPHINX_TREES['sslerrorreport'] = 'content/docs/sslerrorreport' -SPHINX_TREES['tabbrowser'] = 'content/docs/tabbrowser' +SPHINX_TREES["sslerrorreport"] = "content/docs/sslerrorreport" +SPHINX_TREES["tabbrowser"] = "content/docs/tabbrowser" -with Files('content/docs/sslerrorreport/**'): - SCHEDULES.exclusive = ['docs'] +with Files("content/docs/sslerrorreport/**"): + SCHEDULES.exclusive = ["docs"] MOCHITEST_CHROME_MANIFESTS += [ - 'content/test/chrome/chrome.ini', + "content/test/chrome/chrome.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'content/test/about/browser.ini', - 'content/test/alerts/browser.ini', - 'content/test/backforward/browser.ini', - 'content/test/caps/browser.ini', - 'content/test/captivePortal/browser.ini', - 'content/test/contextMenu/browser.ini', - 'content/test/favicons/browser.ini', - 'content/test/forms/browser.ini', - 'content/test/fullscreen/browser.ini', - 'content/test/general/browser.ini', - 'content/test/historySwipeAnimation/browser.ini', - 'content/test/keyboard/browser.ini', - 'content/test/menubar/browser.ini', - 'content/test/metaTags/browser.ini', - 'content/test/outOfProcess/browser.ini', - 'content/test/pageActions/browser.ini', - 'content/test/pageinfo/browser.ini', - 'content/test/pageStyle/browser.ini', - 'content/test/performance/browser.ini', - 'content/test/performance/hidpi/browser.ini', - 'content/test/performance/io/browser.ini', - 'content/test/performance/lowdpi/browser.ini', - 'content/test/permissions/browser.ini', - 'content/test/plugins/browser.ini', - 'content/test/popupNotifications/browser.ini', - 'content/test/popups/browser.ini', - 'content/test/protectionsUI/browser.ini', - 'content/test/referrer/browser.ini', - 'content/test/sanitize/browser.ini', - 'content/test/sidebar/browser.ini', - 'content/test/siteIdentity/browser.ini', - 'content/test/static/browser.ini', - 'content/test/statuspanel/browser.ini', - 'content/test/sync/browser.ini', - 'content/test/tabcrashed/browser.ini', - 'content/test/tabdialogs/browser.ini', - 'content/test/tabMediaIndicator/browser.ini', - 'content/test/tabPrompts/browser.ini', - 'content/test/tabs/browser.ini', - 'content/test/touch/browser.ini', - 'content/test/webextensions/browser.ini', - 'content/test/webrtc/browser.ini', - 'content/test/webrtc/legacyIndicator/browser.ini', - 'content/test/zoom/browser.ini', + "content/test/about/browser.ini", + "content/test/alerts/browser.ini", + "content/test/backforward/browser.ini", + "content/test/caps/browser.ini", + "content/test/captivePortal/browser.ini", + "content/test/contextMenu/browser.ini", + "content/test/favicons/browser.ini", + "content/test/forms/browser.ini", + "content/test/fullscreen/browser.ini", + "content/test/general/browser.ini", + "content/test/historySwipeAnimation/browser.ini", + "content/test/keyboard/browser.ini", + "content/test/menubar/browser.ini", + "content/test/metaTags/browser.ini", + "content/test/outOfProcess/browser.ini", + "content/test/pageActions/browser.ini", + "content/test/pageinfo/browser.ini", + "content/test/pageStyle/browser.ini", + "content/test/performance/browser.ini", + "content/test/performance/hidpi/browser.ini", + "content/test/performance/io/browser.ini", + "content/test/performance/lowdpi/browser.ini", + "content/test/permissions/browser.ini", + "content/test/plugins/browser.ini", + "content/test/popupNotifications/browser.ini", + "content/test/popups/browser.ini", + "content/test/protectionsUI/browser.ini", + "content/test/referrer/browser.ini", + "content/test/sanitize/browser.ini", + "content/test/sidebar/browser.ini", + "content/test/siteIdentity/browser.ini", + "content/test/static/browser.ini", + "content/test/statuspanel/browser.ini", + "content/test/sync/browser.ini", + "content/test/tabcrashed/browser.ini", + "content/test/tabdialogs/browser.ini", + "content/test/tabMediaIndicator/browser.ini", + "content/test/tabPrompts/browser.ini", + "content/test/tabs/browser.ini", + "content/test/touch/browser.ini", + "content/test/webextensions/browser.ini", + "content/test/webrtc/browser.ini", + "content/test/webrtc/legacyIndicator/browser.ini", + "content/test/zoom/browser.ini", ] -PERFTESTS_MANIFESTS += [ - 'content/test/perftest.ini' -] - -DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] -DEFINES['MOZ_APP_VERSION_DISPLAY'] = CONFIG['MOZ_APP_VERSION_DISPLAY'] +PERFTESTS_MANIFESTS += ["content/test/perftest.ini"] -DEFINES['APP_LICENSE_BLOCK'] = '%s/content/overrides/app-license.html' % SRCDIR +DEFINES["MOZ_APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] +DEFINES["MOZ_APP_VERSION_DISPLAY"] = CONFIG["MOZ_APP_VERSION_DISPLAY"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk', 'cocoa'): - DEFINES['CONTEXT_COPY_IMAGE_CONTENTS'] = 1 +DEFINES["APP_LICENSE_BLOCK"] = "%s/content/overrides/app-license.html" % SRCDIR -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk'): - DEFINES['MENUBAR_CAN_AUTOHIDE'] = 1 +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("windows", "gtk", "cocoa"): + DEFINES["CONTEXT_COPY_IMAGE_CONTENTS"] = 1 -JAR_MANIFESTS += ['jar.mn'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("windows", "gtk"): + DEFINES["MENUBAR_CAN_AUTOHIDE"] = 1 -GeneratedFile('content/aboutNetErrorCodes.js', script='gen_aboutneterror_codes.py', - inputs=['/browser/locales/en-US/browser/nsserrors.ftl']) +JAR_MANIFESTS += ["jar.mn"] +GeneratedFile( + "content/aboutNetErrorCodes.js", + script="gen_aboutneterror_codes.py", + inputs=["/browser/locales/en-US/browser/nsserrors.ftl"], +) diff --git a/browser/branding/aurora/content/moz.build b/browser/branding/aurora/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/branding/aurora/content/moz.build +++ b/browser/branding/aurora/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/aurora/locales/moz.build b/browser/branding/aurora/locales/moz.build index 8bad13124d5a4a..fff7035065b0ae 100644 --- a/browser/branding/aurora/locales/moz.build +++ b/browser/branding/aurora/locales/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_DISTRIBUTION_ID_UNQUOTED'] = CONFIG['MOZ_DISTRIBUTION_ID'] +DEFINES["MOZ_DISTRIBUTION_ID_UNQUOTED"] = CONFIG["MOZ_DISTRIBUTION_ID"] -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/aurora/moz.build b/browser/branding/aurora/moz.build index 9045cee11bb858..dd081ac44496d1 100644 --- a/browser/branding/aurora/moz.build +++ b/browser/branding/aurora/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['content', 'locales'] +DIRS += ["content", "locales"] -DIST_SUBDIR = 'browser' -export('DIST_SUBDIR') +DIST_SUBDIR = "browser" +export("DIST_SUBDIR") -include('../branding-common.mozbuild') +include("../branding-common.mozbuild") FirefoxBranding() diff --git a/browser/branding/nightly/content/moz.build b/browser/branding/nightly/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/branding/nightly/content/moz.build +++ b/browser/branding/nightly/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/nightly/locales/moz.build b/browser/branding/nightly/locales/moz.build index 8bad13124d5a4a..fff7035065b0ae 100644 --- a/browser/branding/nightly/locales/moz.build +++ b/browser/branding/nightly/locales/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_DISTRIBUTION_ID_UNQUOTED'] = CONFIG['MOZ_DISTRIBUTION_ID'] +DEFINES["MOZ_DISTRIBUTION_ID_UNQUOTED"] = CONFIG["MOZ_DISTRIBUTION_ID"] -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/nightly/moz.build b/browser/branding/nightly/moz.build index 9045cee11bb858..dd081ac44496d1 100644 --- a/browser/branding/nightly/moz.build +++ b/browser/branding/nightly/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['content', 'locales'] +DIRS += ["content", "locales"] -DIST_SUBDIR = 'browser' -export('DIST_SUBDIR') +DIST_SUBDIR = "browser" +export("DIST_SUBDIR") -include('../branding-common.mozbuild') +include("../branding-common.mozbuild") FirefoxBranding() diff --git a/browser/branding/official/content/moz.build b/browser/branding/official/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/branding/official/content/moz.build +++ b/browser/branding/official/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/official/locales/moz.build b/browser/branding/official/locales/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/branding/official/locales/moz.build +++ b/browser/branding/official/locales/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/official/moz.build b/browser/branding/official/moz.build index 9045cee11bb858..dd081ac44496d1 100644 --- a/browser/branding/official/moz.build +++ b/browser/branding/official/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['content', 'locales'] +DIRS += ["content", "locales"] -DIST_SUBDIR = 'browser' -export('DIST_SUBDIR') +DIST_SUBDIR = "browser" +export("DIST_SUBDIR") -include('../branding-common.mozbuild') +include("../branding-common.mozbuild") FirefoxBranding() diff --git a/browser/branding/unofficial/content/moz.build b/browser/branding/unofficial/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/branding/unofficial/content/moz.build +++ b/browser/branding/unofficial/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/unofficial/locales/moz.build b/browser/branding/unofficial/locales/moz.build index 8bad13124d5a4a..fff7035065b0ae 100644 --- a/browser/branding/unofficial/locales/moz.build +++ b/browser/branding/unofficial/locales/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_DISTRIBUTION_ID_UNQUOTED'] = CONFIG['MOZ_DISTRIBUTION_ID'] +DEFINES["MOZ_DISTRIBUTION_ID_UNQUOTED"] = CONFIG["MOZ_DISTRIBUTION_ID"] -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/branding/unofficial/moz.build b/browser/branding/unofficial/moz.build index 9045cee11bb858..dd081ac44496d1 100644 --- a/browser/branding/unofficial/moz.build +++ b/browser/branding/unofficial/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['content', 'locales'] +DIRS += ["content", "locales"] -DIST_SUBDIR = 'browser' -export('DIST_SUBDIR') +DIST_SUBDIR = "browser" +export("DIST_SUBDIR") -include('../branding-common.mozbuild') +include("../branding-common.mozbuild") FirefoxBranding() diff --git a/browser/components/about/moz.build b/browser/components/about/moz.build index a178cd21d2e84e..9be34f57b927d6 100644 --- a/browser/components/about/moz.build +++ b/browser/components/about/moz.build @@ -8,25 +8,25 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "General") EXPORTS.mozilla.browser += [ - 'AboutRedirector.h', + "AboutRedirector.h", ] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] SOURCES += [ - 'AboutRedirector.cpp', + "AboutRedirector.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'browsercomps' +FINAL_LIBRARY = "browsercomps" LOCAL_INCLUDES += [ - '../build', - '/dom/base', - '/ipc/chromium/src', + "../build", + "/dom/base", + "/ipc/chromium/src", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/browser/components/aboutconfig/moz.build b/browser/components/aboutconfig/moz.build index 7ff4b7ef8522e6..47182555bb720e 100644 --- a/browser/components/aboutconfig/moz.build +++ b/browser/components/aboutconfig/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/aboutlogins/moz.build b/browser/components/aboutlogins/moz.build index d912a6f407b78e..ef815d60998662 100644 --- a/browser/components/aboutlogins/moz.build +++ b/browser/components/aboutlogins/moz.build @@ -4,20 +4,20 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'about:logins') +with Files("**"): + BUG_COMPONENT = ("Firefox", "about:logins") EXTRA_JS_MODULES += [ - 'LoginBreaches.jsm', + "LoginBreaches.jsm", ] FINAL_TARGET_FILES.actors += [ - 'AboutLoginsChild.jsm', - 'AboutLoginsParent.jsm', + "AboutLoginsChild.jsm", + "AboutLoginsParent.jsm", ] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] diff --git a/browser/components/attribution/moz.build b/browser/components/attribution/moz.build index 1cd9952495ae9b..9d8a560eda802f 100644 --- a/browser/components/attribution/moz.build +++ b/browser/components/attribution/moz.build @@ -7,31 +7,31 @@ with Files("**"): BUG_COMPONENT = ("Toolkit", "Telemetry") -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] EXTRA_JS_MODULES += [ - 'AttributionCode.jsm', + "AttributionCode.jsm", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": XPIDL_SOURCES += [ - 'nsIMacAttribution.idl', + "nsIMacAttribution.idl", ] - XPIDL_MODULE = 'attribution' + XPIDL_MODULE = "attribution" EXPORTS += [ - 'nsMacAttribution.h', + "nsMacAttribution.h", ] SOURCES += [ - 'nsMacAttribution.cpp', + "nsMacAttribution.cpp", ] - FINAL_LIBRARY = 'browsercomps' + FINAL_LIBRARY = "browsercomps" EXTRA_JS_MODULES += [ - 'MacAttribution.jsm', + "MacAttribution.jsm", ] diff --git a/browser/components/build/moz.build b/browser/components/build/moz.build index 170f181bf871f0..901c6ddfed8dcf 100644 --- a/browser/components/build/moz.build +++ b/browser/components/build/moz.build @@ -8,15 +8,15 @@ with Files("**"): BUG_COMPONENT = ("Firefox Build System", "General") XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -Library('browsercomps') -FINAL_LIBRARY = 'xul' +Library("browsercomps") +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../about', - '../migration', - '../sessionstore', - '../shell', + "../about", + "../migration", + "../sessionstore", + "../shell", ] diff --git a/browser/components/contextualidentity/moz.build b/browser/components/contextualidentity/moz.build index 9364d1f2e5e3bb..4101066ad3c888 100644 --- a/browser/components/contextualidentity/moz.build +++ b/browser/components/contextualidentity/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', + "test/browser/browser.ini", ] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Security") diff --git a/browser/components/customizableui/content/moz.build b/browser/components/customizableui/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/components/customizableui/content/moz.build +++ b/browser/components/customizableui/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/customizableui/moz.build b/browser/components/customizableui/moz.build index 98c4b06ad3135e..e5cc2498e7c3c8 100644 --- a/browser/components/customizableui/moz.build +++ b/browser/components/customizableui/moz.build @@ -5,23 +5,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'content', + "content", ] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] TESTING_JS_MODULES += [ - 'test/CustomizableUITestUtils.jsm', + "test/CustomizableUITestUtils.jsm", ] EXTRA_JS_MODULES += [ - 'CustomizableUI.jsm', - 'CustomizableWidgets.jsm', - 'CustomizeMode.jsm', - 'DragPositionManager.jsm', - 'PanelMultiView.jsm', - 'SearchWidgetTracker.jsm', + "CustomizableUI.jsm", + "CustomizableWidgets.jsm", + "CustomizeMode.jsm", + "DragPositionManager.jsm", + "PanelMultiView.jsm", + "SearchWidgetTracker.jsm", ] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Toolbars and Customization') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Toolbars and Customization") diff --git a/browser/components/doh/moz.build b/browser/components/doh/moz.build index c3de603a2f71b8..0cdc2dff92617d 100644 --- a/browser/components/doh/moz.build +++ b/browser/components/doh/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Security') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Security") EXTRA_JS_MODULES += [ - 'DoHConfig.jsm', - 'DoHController.jsm', - 'DoHHeuristics.jsm', - 'TRRPerformance.jsm', + "DoHConfig.jsm", + "DoHController.jsm", + "DoHHeuristics.jsm", + "TRRPerformance.jsm", ] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] diff --git a/browser/components/downloads/moz.build b/browser/components/downloads/moz.build index 8ec54f33a1dd62..b98c027fd89774 100644 --- a/browser/components/downloads/moz.build +++ b/browser/components/downloads/moz.build @@ -4,27 +4,27 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('*'): - BUG_COMPONENT = ('Firefox', 'Downloads Panel') +with Files("*"): + BUG_COMPONENT = ("Firefox", "Downloads Panel") -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] EXTRA_JS_MODULES += [ - 'DownloadsCommon.jsm', - 'DownloadsSubview.jsm', - 'DownloadsTaskbar.jsm', - 'DownloadsViewableInternally.jsm', - 'DownloadsViewUI.jsm', + "DownloadsCommon.jsm", + "DownloadsSubview.jsm", + "DownloadsTaskbar.jsm", + "DownloadsViewableInternally.jsm", + "DownloadsViewUI.jsm", ] -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] -if toolkit == 'cocoa': - EXTRA_JS_MODULES += ['DownloadsMacFinderProgress.jsm'] +if toolkit == "cocoa": + EXTRA_JS_MODULES += ["DownloadsMacFinderProgress.jsm"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Downloads Panel') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Downloads Panel") -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] diff --git a/browser/components/enterprisepolicies/helpers/moz.build b/browser/components/enterprisepolicies/helpers/moz.build index 947d92c31d84e9..648745cd3fbf02 100644 --- a/browser/components/enterprisepolicies/helpers/moz.build +++ b/browser/components/enterprisepolicies/helpers/moz.build @@ -8,7 +8,7 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "Enterprise Policies") EXTRA_JS_MODULES.policies += [ - 'BookmarksPolicies.jsm', - 'ProxyPolicies.jsm', - 'WebsiteFilter.jsm', + "BookmarksPolicies.jsm", + "ProxyPolicies.jsm", + "WebsiteFilter.jsm", ] diff --git a/browser/components/enterprisepolicies/moz.build b/browser/components/enterprisepolicies/moz.build index 165bcb0eb97d39..e7ae4b19c5b746 100644 --- a/browser/components/enterprisepolicies/moz.build +++ b/browser/components/enterprisepolicies/moz.build @@ -7,21 +7,19 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "Enterprise Policies") -SPHINX_TREES['docs'] = 'docs' +SPHINX_TREES["docs"] = "docs" DIRS += [ - 'helpers', - 'schemas', + "helpers", + "schemas", ] -TEST_DIRS += [ - 'tests' -] +TEST_DIRS += ["tests"] EXTRA_JS_MODULES.policies += [ - 'Policies.jsm', + "Policies.jsm", ] -FINAL_LIBRARY = 'browsercomps' +FINAL_LIBRARY = "browsercomps" -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/enterprisepolicies/schemas/moz.build b/browser/components/enterprisepolicies/schemas/moz.build index 54c07f17316945..19e17705e798c9 100644 --- a/browser/components/enterprisepolicies/schemas/moz.build +++ b/browser/components/enterprisepolicies/schemas/moz.build @@ -8,5 +8,5 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "Enterprise Policies") EXTRA_PP_JS_MODULES.policies += [ - 'schema.jsm', + "schema.jsm", ] diff --git a/browser/components/enterprisepolicies/tests/moz.build b/browser/components/enterprisepolicies/tests/moz.build index 023442ca994962..abb21770236d8f 100644 --- a/browser/components/enterprisepolicies/tests/moz.build +++ b/browser/components/enterprisepolicies/tests/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. BROWSER_CHROME_MANIFESTS += [ - 'browser/browser.ini', - 'browser/disable_app_update/browser.ini', - 'browser/disable_default_bookmarks/browser.ini', - 'browser/disable_developer_tools/browser.ini', - 'browser/disable_forget_button/browser.ini', - 'browser/disable_fxscreenshots/browser.ini', - 'browser/hardware_acceleration/browser.ini', - 'browser/managedbookmarks/browser.ini', + "browser/browser.ini", + "browser/disable_app_update/browser.ini", + "browser/disable_default_bookmarks/browser.ini", + "browser/disable_developer_tools/browser.ini", + "browser/disable_forget_button/browser.ini", + "browser/disable_fxscreenshots/browser.ini", + "browser/hardware_acceleration/browser.ini", + "browser/managedbookmarks/browser.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["xpcshell/xpcshell.ini"] diff --git a/browser/components/extensions/moz.build b/browser/components/extensions/moz.build index e25ba29dcbc757..5c1ac44fc56917 100644 --- a/browser/components/extensions/moz.build +++ b/browser/components/extensions/moz.build @@ -7,25 +7,25 @@ with Files("**"): BUG_COMPONENT = ("WebExtensions", "Untriaged") -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] EXTRA_COMPONENTS += [ - 'extensions-browser.manifest', + "extensions-browser.manifest", ] EXTRA_JS_MODULES += [ - 'ExtensionControlledPopup.jsm', - 'ExtensionPopups.jsm', + "ExtensionControlledPopup.jsm", + "ExtensionPopups.jsm", ] -DIRS += ['schemas'] +DIRS += ["schemas"] BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser-private.ini', - 'test/browser/browser.ini', + "test/browser/browser-private.ini", + "test/browser/browser.ini", ] -MOCHITEST_MANIFESTS += ['test/mochitest/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini', + "test/xpcshell/xpcshell.ini", ] diff --git a/browser/components/extensions/schemas/moz.build b/browser/components/extensions/schemas/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/browser/components/extensions/schemas/moz.build +++ b/browser/components/extensions/schemas/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/fxmonitor/moz.build b/browser/components/fxmonitor/moz.build index 9ecbb6d3682979..1315cd4f5b072a 100644 --- a/browser/components/fxmonitor/moz.build +++ b/browser/components/fxmonitor/moz.build @@ -4,14 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Firefox Monitor') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Firefox Monitor") EXTRA_JS_MODULES += [ - 'FirefoxMonitor.jsm', + "FirefoxMonitor.jsm", ] -JAR_MANIFESTS += ['jar.mn'] - -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +JAR_MANIFESTS += ["jar.mn"] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] diff --git a/browser/components/installerprefs/moz.build b/browser/components/installerprefs/moz.build index 064c580224cde4..853f2d888a806c 100644 --- a/browser/components/installerprefs/moz.build +++ b/browser/components/installerprefs/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Installer') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Installer") -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] EXTRA_JS_MODULES += [ - 'InstallerPrefs.jsm', + "InstallerPrefs.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] diff --git a/browser/components/ion/moz.build b/browser/components/ion/moz.build index 44c5110bbde1b6..24736258a6f293 100644 --- a/browser/components/ion/moz.build +++ b/browser/components/ion/moz.build @@ -4,14 +4,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'General') +with Files("**"): + BUG_COMPONENT = ("Firefox", "General") TESTING_JS_MODULES += [ - 'schemas/IonContentSchema.json', - 'schemas/IonStudyAddonsSchema.json', + "schemas/IonContentSchema.json", + "schemas/IonStudyAddonsSchema.json", ] diff --git a/browser/components/migration/moz.build b/browser/components/migration/moz.build index e56a3329dc17b4..778c7f938bf299 100644 --- a/browser/components/migration/moz.build +++ b/browser/components/migration/moz.build @@ -4,64 +4,64 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] -MARIONETTE_UNIT_MANIFESTS += ['tests/marionette/manifest.ini'] +MARIONETTE_UNIT_MANIFESTS += ["tests/marionette/manifest.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] XPIDL_SOURCES += [ - 'nsIBrowserProfileMigrator.idl', + "nsIBrowserProfileMigrator.idl", ] -XPIDL_MODULE = 'migration' +XPIDL_MODULE = "migration" EXTRA_JS_MODULES += [ - 'ChromeMigrationUtils.jsm', - 'ChromeProfileMigrator.jsm', - 'FirefoxProfileMigrator.jsm', - 'MigrationUtils.jsm', - 'ProfileMigrator.jsm', + "ChromeMigrationUtils.jsm", + "ChromeProfileMigrator.jsm", + "FirefoxProfileMigrator.jsm", + "MigrationUtils.jsm", + "ProfileMigrator.jsm", ] -if CONFIG['OS_ARCH'] == 'WINNT': - if CONFIG['ENABLE_TESTS']: +if CONFIG["OS_ARCH"] == "WINNT": + if CONFIG["ENABLE_TESTS"]: DIRS += [ - 'tests/unit/insertIEHistory', + "tests/unit/insertIEHistory", ] SOURCES += [ - 'nsIEHistoryEnumerator.cpp', + "nsIEHistoryEnumerator.cpp", ] EXTRA_JS_MODULES += [ - '360seProfileMigrator.jsm', - 'ChromeWindowsLoginCrypto.jsm', - 'EdgeProfileMigrator.jsm', - 'ESEDBReader.jsm', - 'IEProfileMigrator.jsm', - 'MSMigrationUtils.jsm', + "360seProfileMigrator.jsm", + "ChromeWindowsLoginCrypto.jsm", + "EdgeProfileMigrator.jsm", + "ESEDBReader.jsm", + "IEProfileMigrator.jsm", + "MSMigrationUtils.jsm", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": EXPORTS += [ - 'nsKeychainMigrationUtils.h', + "nsKeychainMigrationUtils.h", ] EXTRA_JS_MODULES += [ - 'ChromeMacOSLoginCrypto.jsm', - 'SafariProfileMigrator.jsm', + "ChromeMacOSLoginCrypto.jsm", + "SafariProfileMigrator.jsm", ] SOURCES += [ - 'nsKeychainMigrationUtils.mm', + "nsKeychainMigrationUtils.mm", ] XPIDL_SOURCES += [ - 'nsIKeychainMigrationUtils.idl', + "nsIKeychainMigrationUtils.idl", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'browsercomps' +FINAL_LIBRARY = "browsercomps" -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Migration') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Migration") diff --git a/browser/components/migration/tests/unit/insertIEHistory/moz.build b/browser/components/migration/tests/unit/insertIEHistory/moz.build index 76ac757fb221e3..33c261c74688e2 100644 --- a/browser/components/migration/tests/unit/insertIEHistory/moz.build +++ b/browser/components/migration/tests/unit/insertIEHistory/moz.build @@ -4,14 +4,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_TARGET = '_tests/xpcshell/browser/components/migration/tests/unit' +FINAL_TARGET = "_tests/xpcshell/browser/components/migration/tests/unit" -Program('InsertIEHistory') +Program("InsertIEHistory") OS_LIBS += [ - 'ole32', + "ole32", ] SOURCES += [ - 'InsertIEHistory.cpp', + "InsertIEHistory.cpp", ] NO_PGO = True diff --git a/browser/components/moz.build b/browser/components/moz.build index d9906d053f6b68..ba617de9d82f2e 100644 --- a/browser/components/moz.build +++ b/browser/components/moz.build @@ -22,88 +22,86 @@ with Files("tests/unit/test_distribution.js"): with Files("safebrowsing/**"): BUG_COMPONENT = ("Toolkit", "Safe Browsing") -with Files('controlcenter/**'): - BUG_COMPONENT = ('Firefox', 'General') +with Files("controlcenter/**"): + BUG_COMPONENT = ("Firefox", "General") DIRS += [ - 'about', - 'aboutconfig', - 'aboutlogins', - 'attribution', - 'contextualidentity', - 'customizableui', - 'doh', - 'downloads', - 'enterprisepolicies', - 'extensions', - 'fxmonitor', - 'migration', - 'newtab', - 'originattributes', - 'ion', - 'places', - 'pocket', - 'preferences', - 'privatebrowsing', - 'prompts', - 'protections', - 'protocolhandler', - 'resistfingerprinting', - 'search', - 'sessionstore', - 'shell', - 'ssb', - 'syncedtabs', - 'uitour', - 'urlbar', - 'translation', + "about", + "aboutconfig", + "aboutlogins", + "attribution", + "contextualidentity", + "customizableui", + "doh", + "downloads", + "enterprisepolicies", + "extensions", + "fxmonitor", + "migration", + "newtab", + "originattributes", + "ion", + "places", + "pocket", + "preferences", + "privatebrowsing", + "prompts", + "protections", + "protocolhandler", + "resistfingerprinting", + "search", + "sessionstore", + "shell", + "ssb", + "syncedtabs", + "uitour", + "urlbar", + "translation", ] -DIRS += ['build'] +DIRS += ["build"] -if CONFIG['NIGHTLY_BUILD']: +if CONFIG["NIGHTLY_BUILD"]: DIRS += [ - 'payments', + "payments", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - DIRS += ['touchbar'] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - DIRS += ['installerprefs'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + DIRS += ["touchbar"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + DIRS += ["installerprefs"] XPIDL_SOURCES += [ - 'nsIBrowserHandler.idl', + "nsIBrowserHandler.idl", ] -XPIDL_MODULE = 'browsercompsbase' +XPIDL_MODULE = "browsercompsbase" XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXTRA_COMPONENTS += [ - 'BrowserComponents.manifest', - 'tests/startupRecorder.js', - 'tests/testComponents.manifest', + "BrowserComponents.manifest", + "tests/startupRecorder.js", + "tests/testComponents.manifest", ] EXTRA_JS_MODULES += [ - 'BrowserContentHandler.jsm', - 'BrowserGlue.jsm', - 'distribution.js', + "BrowserContentHandler.jsm", + "BrowserGlue.jsm", + "distribution.js", ] BROWSER_CHROME_MANIFESTS += [ - 'safebrowsing/content/test/browser.ini', - 'tests/browser/browser.ini', + "safebrowsing/content/test/browser.ini", + "tests/browser/browser.ini", ] -if CONFIG['MOZ_UPDATER']: +if CONFIG["MOZ_UPDATER"]: BROWSER_CHROME_MANIFESTS += [ - 'tests/browser/whats_new_page/browser.ini', + "tests/browser/whats_new_page/browser.ini", ] -XPCSHELL_TESTS_MANIFESTS += [ - 'tests/unit/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] diff --git a/browser/components/newtab/moz.build b/browser/components/newtab/moz.build index 8b2d7476c40c30..73a6d512841eda 100644 --- a/browser/components/newtab/moz.build +++ b/browser/components/newtab/moz.build @@ -8,36 +8,36 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "New Tab Page") BROWSER_CHROME_MANIFESTS += [ - 'test/browser/abouthomecache/browser.ini', - 'test/browser/browser.ini', + "test/browser/abouthomecache/browser.ini", + "test/browser/browser.ini", ] -SPHINX_TREES['docs'] = 'docs' -SPHINX_TREES['content-src/asrouter/docs'] = 'content-src/asrouter/docs' +SPHINX_TREES["docs"] = "docs" +SPHINX_TREES["content-src/asrouter/docs"] = "content-src/asrouter/docs" XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini', + "test/xpcshell/xpcshell.ini", ] XPIDL_SOURCES += [ - 'nsIAboutNewTabService.idl', + "nsIAboutNewTabService.idl", ] -XPIDL_MODULE = 'browser-newtab' +XPIDL_MODULE = "browser-newtab" EXTRA_JS_MODULES += [ - 'AboutNewTabService.jsm', + "AboutNewTabService.jsm", ] FINAL_TARGET_FILES.actors += [ - 'aboutwelcome/AboutWelcomeChild.jsm', - 'aboutwelcome/AboutWelcomeParent.jsm', - 'actors/ASRouterChild.jsm', - 'actors/ASRouterParent.jsm', + "aboutwelcome/AboutWelcomeChild.jsm", + "aboutwelcome/AboutWelcomeParent.jsm", + "actors/ASRouterChild.jsm", + "actors/ASRouterParent.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/originattributes/moz.build b/browser/components/originattributes/moz.build index 502e5f2b1d6cd8..707d61ce1a1259 100644 --- a/browser/components/originattributes/moz.build +++ b/browser/components/originattributes/moz.build @@ -5,12 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', + "test/browser/browser.ini", ] -MOCHITEST_MANIFESTS += [ - 'test/mochitest/mochitest.ini' -] +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Security") diff --git a/browser/components/payments/moz.build b/browser/components/payments/moz.build index 1b4604c754dbaa..6c764f85fa99f1 100644 --- a/browser/components/payments/moz.build +++ b/browser/components/payments/moz.build @@ -4,33 +4,33 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'WebPayments UI') +with Files("**"): + BUG_COMPONENT = ("Firefox", "WebPayments UI") EXTRA_JS_MODULES += [ - 'PaymentUIService.jsm', + "PaymentUIService.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] MOCHITEST_MANIFESTS += [ - 'test/mochitest/formautofill/mochitest.ini', - 'test/mochitest/mochitest.ini', + "test/mochitest/formautofill/mochitest.ini", + "test/mochitest/mochitest.ini", ] -SPHINX_TREES['docs'] = 'docs' +SPHINX_TREES["docs"] = "docs" -with Files('docs/**'): - SCHEDULES.exclusive = ['docs'] +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] TESTING_JS_MODULES += [ - 'test/PaymentTestUtils.jsm', + "test/PaymentTestUtils.jsm", ] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] diff --git a/browser/components/payments/server.py b/browser/components/payments/server.py index 9b85bd0b572f89..7cb8380260a0d8 100644 --- a/browser/components/payments/server.py +++ b/browser/components/payments/server.py @@ -11,14 +11,13 @@ class RequestHandler(SimpleHTTPRequestHandler, object): def translate_path(self, path): # Map autofill paths to their own directory autofillPath = "/formautofill" - if (path.startswith(autofillPath)): - path = "browser/extensions/formautofill/content" + \ - path[len(autofillPath):] + if path.startswith(autofillPath): + path = "browser/extensions/formautofill/content" + path[len(autofillPath) :] else: path = "browser/components/payments/res" + path return super(RequestHandler, self).translate_path(path) -if __name__ == '__main__': +if __name__ == "__main__": BaseHTTPServer.test(RequestHandler, BaseHTTPServer.HTTPServer) diff --git a/browser/components/places/moz.build b/browser/components/places/moz.build index e6f88b318c9d0d..ee7920749860ff 100644 --- a/browser/components/places/moz.build +++ b/browser/components/places/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] EXTRA_JS_MODULES += [ - 'PlacesUIUtils.jsm', + "PlacesUIUtils.jsm", ] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Bookmarks & History') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Bookmarks & History") diff --git a/browser/components/pocket/moz.build b/browser/components/pocket/moz.build index f6e00831eee201..fb823da11c7dfa 100644 --- a/browser/components/pocket/moz.build +++ b/browser/components/pocket/moz.build @@ -7,6 +7,6 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "Pocket") -BROWSER_CHROME_MANIFESTS += ['test/browser.ini', 'test/unit/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini", "test/unit/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/preferences/dialogs/moz.build b/browser/components/preferences/dialogs/moz.build index 5045224be3c27f..603c56050521d9 100644 --- a/browser/components/preferences/dialogs/moz.build +++ b/browser/components/preferences/dialogs/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -for var in ('MOZ_APP_NAME', 'MOZ_MACBUNDLE_NAME'): +for var in ("MOZ_APP_NAME", "MOZ_MACBUNDLE_NAME"): DEFINES[var] = CONFIG[var] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk', 'cocoa'): - DEFINES['HAVE_SHELL_SERVICE'] = 1 +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("windows", "gtk", "cocoa"): + DEFINES["HAVE_SHELL_SERVICE"] = 1 -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/preferences/moz.build b/browser/components/preferences/moz.build index 76c619ccea19c4..67163934c72fdc 100644 --- a/browser/components/preferences/moz.build +++ b/browser/components/preferences/moz.build @@ -4,22 +4,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'dialogs' -] +DIRS += ["dialogs"] -BROWSER_CHROME_MANIFESTS += [ - 'tests/browser.ini', - 'tests/siteData/browser.ini' -] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini", "tests/siteData/browser.ini"] -for var in ('MOZ_APP_NAME', 'MOZ_MACBUNDLE_NAME'): +for var in ("MOZ_APP_NAME", "MOZ_MACBUNDLE_NAME"): DEFINES[var] = CONFIG[var] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk', 'cocoa'): - DEFINES['HAVE_SHELL_SERVICE'] = 1 +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("windows", "gtk", "cocoa"): + DEFINES["HAVE_SHELL_SERVICE"] = 1 -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Preferences') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Preferences") diff --git a/browser/components/privatebrowsing/moz.build b/browser/components/privatebrowsing/moz.build index 486737a7f3c46d..7078f7e4db1715 100644 --- a/browser/components/privatebrowsing/moz.build +++ b/browser/components/privatebrowsing/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', + "test/browser/browser.ini", ] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Private Browsing') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Private Browsing") diff --git a/browser/components/prompts/moz.build b/browser/components/prompts/moz.build index 21d5d2418e0ac7..4c6d33af909506 100644 --- a/browser/components/prompts/moz.build +++ b/browser/components/prompts/moz.build @@ -6,9 +6,9 @@ with Files("**"): BUG_COMPONENT = ("Toolkit", "Notifications and Alerts") EXTRA_JS_MODULES += [ - 'PromptCollection.jsm', + "PromptCollection.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] diff --git a/browser/components/protections/moz.build b/browser/components/protections/moz.build index 8cb4f4bca5705f..57bd0a917fa9ba 100644 --- a/browser/components/protections/moz.build +++ b/browser/components/protections/moz.build @@ -4,9 +4,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Protections UI') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Protections UI") diff --git a/browser/components/protocolhandler/moz.build b/browser/components/protocolhandler/moz.build index b06afbfa9c8d12..6e23e023cf0e00 100644 --- a/browser/components/protocolhandler/moz.build +++ b/browser/components/protocolhandler/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] EXTRA_JS_MODULES += [ - 'WebProtocolHandlerRegistrar.jsm', + "WebProtocolHandlerRegistrar.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'General') +with Files("**"): + BUG_COMPONENT = ("Firefox", "General") diff --git a/browser/components/resistfingerprinting/moz.build b/browser/components/resistfingerprinting/moz.build index 953cc416a024db..c99c2f8f94b6d2 100644 --- a/browser/components/resistfingerprinting/moz.build +++ b/browser/components/resistfingerprinting/moz.build @@ -8,13 +8,13 @@ with Files("**"): BUG_COMPONENT = ("Core", "Security") BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', + "test/browser/browser.ini", ] MOCHITEST_MANIFESTS += [ - 'test/mochitest/mochitest.ini', + "test/mochitest/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'test/chrome/chrome.ini', -] \ No newline at end of file + "test/chrome/chrome.ini", +] diff --git a/browser/components/search/moz.build b/browser/components/search/moz.build index 18653c878cdc3b..efb73b82fe902a 100644 --- a/browser/components/search/moz.build +++ b/browser/components/search/moz.build @@ -5,21 +5,21 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXTRA_JS_MODULES += [ - 'SearchOneOffs.jsm', - 'SearchTelemetry.jsm', - 'SearchUIUtils.jsm', + "SearchOneOffs.jsm", + "SearchTelemetry.jsm", + "SearchUIUtils.jsm", ] BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', - 'test/browser/google_codes/browser.ini', + "test/browser/browser.ini", + "test/browser/google_codes/browser.ini", ] -MARIONETTE_LAYOUT_MANIFESTS += ['test/marionette/manifest.ini'] +MARIONETTE_LAYOUT_MANIFESTS += ["test/marionette/manifest.ini"] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Search') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Search") diff --git a/browser/components/search/test/marionette/test_engines_on_restart.py b/browser/components/search/test/marionette/test_engines_on_restart.py index 6a657245df32e1..5f9bfe63340028 100644 --- a/browser/components/search/test/marionette/test_engines_on_restart.py +++ b/browser/components/search/test/marionette/test_engines_on_restart.py @@ -10,12 +10,13 @@ class TestEnginesOnRestart(MarionetteTestCase): - def setUp(self): super(TestEnginesOnRestart, self).setUp() - self.marionette.enforce_gecko_prefs({ - 'browser.search.log': True, - }) + self.marionette.enforce_gecko_prefs( + { + "browser.search.log": True, + } + ) def get_default_search_engine(self): """Retrieve the identifier of the default search engine.""" diff --git a/browser/components/sessionstore/moz.build b/browser/components/sessionstore/moz.build index 07006d8d06b28d..cdbdba2b319777 100644 --- a/browser/components/sessionstore/moz.build +++ b/browser/components/sessionstore/moz.build @@ -4,31 +4,31 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] EXTRA_JS_MODULES.sessionstore = [ - 'ContentRestore.jsm', - 'ContentSessionStore.jsm', - 'GlobalState.jsm', - 'RecentlyClosedTabsAndWindowsMenuUtils.jsm', - 'RunState.jsm', - 'SessionCookies.jsm', - 'SessionFile.jsm', - 'SessionMigration.jsm', - 'SessionSaver.jsm', - 'SessionStartup.jsm', - 'SessionStore.jsm', - 'SessionWorker.js', - 'SessionWorker.jsm', - 'StartupPerformance.jsm', - 'TabAttributes.jsm', - 'TabState.jsm', - 'TabStateCache.jsm', - 'TabStateFlusher.jsm', + "ContentRestore.jsm", + "ContentSessionStore.jsm", + "GlobalState.jsm", + "RecentlyClosedTabsAndWindowsMenuUtils.jsm", + "RunState.jsm", + "SessionCookies.jsm", + "SessionFile.jsm", + "SessionMigration.jsm", + "SessionSaver.jsm", + "SessionStartup.jsm", + "SessionStore.jsm", + "SessionWorker.js", + "SessionWorker.jsm", + "StartupPerformance.jsm", + "TabAttributes.jsm", + "TabState.jsm", + "TabStateCache.jsm", + "TabStateFlusher.jsm", ] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Session Restore') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Session Restore") diff --git a/browser/components/shell/moz.build b/browser/components/shell/moz.build index fb4dea0b4277ee..eec5f0a879c409 100644 --- a/browser/components/shell/moz.build +++ b/browser/components/shell/moz.build @@ -5,77 +5,75 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # For BinaryPath::GetLong for Windows -LOCAL_INCLUDES += [ - '/xpcom/build' -] +LOCAL_INCLUDES += ["/xpcom/build"] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] XPIDL_SOURCES += [ - 'nsIShellService.idl', + "nsIShellService.idl", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": XPIDL_SOURCES += [ - 'nsIMacShellService.idl', + "nsIMacShellService.idl", ] SOURCES += [ - 'nsMacShellService.cpp', + "nsMacShellService.cpp", ] LOCAL_INCLUDES += [ # For CocoaFileUtils - '/xpcom/io' + "/xpcom/io" ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": XPIDL_SOURCES += [ - 'nsIGNOMEShellService.idl', + "nsIGNOMEShellService.idl", ] SOURCES += [ - 'nsGNOMEShellService.cpp', + "nsGNOMEShellService.cpp", ] - if CONFIG['MOZ_ENABLE_DBUS']: + if CONFIG["MOZ_ENABLE_DBUS"]: SOURCES += [ - 'nsGNOMEShellDBusHelper.cpp', - 'nsGNOMEShellSearchProvider.cpp', + "nsGNOMEShellDBusHelper.cpp", + "nsGNOMEShellSearchProvider.cpp", ] - include('/ipc/chromium/chromium-config.mozbuild') + include("/ipc/chromium/chromium-config.mozbuild") -elif CONFIG['OS_ARCH'] == 'WINNT': +elif CONFIG["OS_ARCH"] == "WINNT": XPIDL_SOURCES += [ - 'nsIWindowsShellService.idl', + "nsIWindowsShellService.idl", ] SOURCES += [ - 'nsWindowsShellService.cpp', - 'WindowsDefaultBrowser.cpp', + "nsWindowsShellService.cpp", + "WindowsDefaultBrowser.cpp", ] LOCAL_INCLUDES += [ - '../../../other-licenses/nsis/Contrib/CityHash/cityhash', + "../../../other-licenses/nsis/Contrib/CityHash/cityhash", ] -XPIDL_MODULE = 'shellservice' +XPIDL_MODULE = "shellservice" if SOURCES: - FINAL_LIBRARY = 'browsercomps' + FINAL_LIBRARY = "browsercomps" EXTRA_JS_MODULES += [ - 'HeadlessShell.jsm', - 'ScreenshotChild.jsm', - 'ShellService.jsm', + "HeadlessShell.jsm", + "ScreenshotChild.jsm", + "ShellService.jsm", ] -for var in ('MOZ_APP_NAME', 'MOZ_APP_VERSION'): +for var in ("MOZ_APP_NAME", "MOZ_APP_VERSION"): DEFINES[var] = '"%s"' % CONFIG[var] -CXXFLAGS += CONFIG['TK_CFLAGS'] -if CONFIG['MOZ_ENABLE_DBUS']: - CXXFLAGS += CONFIG['MOZ_DBUS_GLIB_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] +if CONFIG["MOZ_ENABLE_DBUS"]: + CXXFLAGS += CONFIG["MOZ_DBUS_GLIB_CFLAGS"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Shell Integration') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Shell Integration") diff --git a/browser/components/shell/test/mac_desktop_image.py b/browser/components/shell/test/mac_desktop_image.py index 0d3c9cb15957f5..ca7bc0b2972663 100755 --- a/browser/components/shell/test/mac_desktop_image.py +++ b/browser/components/shell/test/mac_desktop_image.py @@ -34,24 +34,40 @@ def main(): - parser = argparse.ArgumentParser(description="Utility to print, set, or " + - "check the path to image being used as " + - "the desktop background image. By " + - "default, prints the path to the " + - "current desktop background image.") - parser.add_argument("-v", "--verbose", action="store_true", - help="print verbose debugging information", - default=False) + parser = argparse.ArgumentParser( + description="Utility to print, set, or " + + "check the path to image being used as " + + "the desktop background image. By " + + "default, prints the path to the " + + "current desktop background image." + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + help="print verbose debugging information", + default=False, + ) group = parser.add_mutually_exclusive_group() - group.add_argument("-s", "--set-background-image", - dest='newBackgroundImagePath', required=False, - help="path to the new background image to set. A zero " + - "exit code indicates no errors occurred.", default=None) - group.add_argument("-c", "--check-background-image", - dest='checkBackgroundImagePath', required=False, - help="check if the provided background image path " + - "matches the provided path. A zero exit code " + - "indicates the paths match.", default=None) + group.add_argument( + "-s", + "--set-background-image", + dest="newBackgroundImagePath", + required=False, + help="path to the new background image to set. A zero " + + "exit code indicates no errors occurred.", + default=None, + ) + group.add_argument( + "-c", + "--check-background-image", + dest="checkBackgroundImagePath", + required=False, + help="check if the provided background image path " + + "matches the provided path. A zero exit code " + + "indicates the paths match.", + default=None, + ) args = parser.parse_args() # Using logging for verbose output @@ -59,12 +75,14 @@ def main(): logging.basicConfig(level=logging.DEBUG) else: logging.basicConfig(level=logging.CRITICAL) - logger = logging.getLogger('desktopImage') + logger = logging.getLogger("desktopImage") # Print what we're going to do if args.checkBackgroundImagePath is not None: - logger.debug("checking provided desktop image %s matches current " - "image" % args.checkBackgroundImagePath) + logger.debug( + "checking provided desktop image %s matches current " + "image" % args.checkBackgroundImagePath + ) elif args.newBackgroundImagePath is not None: logger.debug("setting image to %s " % args.newBackgroundImagePath) else: @@ -121,7 +139,8 @@ def main(): status = False (status, error) = ws.setDesktopImageURL_forScreen_options_error_( - newImageURL, focussedScreen, None, None) + newImageURL, focussedScreen, None, None + ) if not status: raise RuntimeError("setDesktopImageURL error") @@ -145,7 +164,7 @@ def getCurrentDesktopImageURL(focussedScreen, workspace, logger): return imageURL -if __name__ == '__main__': +if __name__ == "__main__": if not main(): sys.exit(1) else: diff --git a/browser/components/ssb/moz.build b/browser/components/ssb/moz.build index b799f00f053cf8..d686475dcb43c6 100644 --- a/browser/components/ssb/moz.build +++ b/browser/components/ssb/moz.build @@ -4,28 +4,28 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['content/jar.mn'] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +JAR_MANIFESTS += ["content/jar.mn"] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXTRA_JS_MODULES += [ - 'SiteSpecificBrowserService.jsm', + "SiteSpecificBrowserService.jsm", ] EXTRA_JS_MODULES.ssb += [ - 'ImageTools.jsm', + "ImageTools.jsm", ] FINAL_TARGET_FILES.actors += [ - 'SiteSpecificBrowserChild.jsm', - 'SiteSpecificBrowserParent.jsm', + "SiteSpecificBrowserChild.jsm", + "SiteSpecificBrowserParent.jsm", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXTRA_JS_MODULES.ssb += [ - 'WindowsSupport.jsm', + "WindowsSupport.jsm", ] diff --git a/browser/components/syncedtabs/moz.build b/browser/components/syncedtabs/moz.build index cdee0c283a811f..710477a6458afc 100644 --- a/browser/components/syncedtabs/moz.build +++ b/browser/components/syncedtabs/moz.build @@ -2,23 +2,22 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] EXTRA_JS_MODULES.syncedtabs += [ - 'EventEmitter.jsm', - 'SyncedTabsDeckComponent.js', - 'SyncedTabsDeckStore.js', - 'SyncedTabsDeckView.js', - 'SyncedTabsListStore.js', - 'TabListComponent.js', - 'TabListView.js', - 'util.js', + "EventEmitter.jsm", + "SyncedTabsDeckComponent.js", + "SyncedTabsDeckStore.js", + "SyncedTabsDeckView.js", + "SyncedTabsListStore.js", + "TabListComponent.js", + "TabListView.js", + "util.js", ] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Sync') - +with Files("**"): + BUG_COMPONENT = ("Firefox", "Sync") diff --git a/browser/components/touchbar/moz.build b/browser/components/touchbar/moz.build index f9099cfad16842..25b669b0338599 100644 --- a/browser/components/touchbar/moz.build +++ b/browser/components/touchbar/moz.build @@ -2,14 +2,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Widget: Cocoa') +with Files("**"): + BUG_COMPONENT = ("Core", "Widget: Cocoa") -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] EXTRA_COMPONENTS += [ - 'MacTouchBar.js', - 'MacTouchBar.manifest', + "MacTouchBar.js", + "MacTouchBar.manifest", ] -SPHINX_TREES['/browser/touchbar'] = 'docs' +SPHINX_TREES["/browser/touchbar"] = "docs" diff --git a/browser/components/translation/content/moz.build b/browser/components/translation/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/components/translation/content/moz.build +++ b/browser/components/translation/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/components/translation/moz.build b/browser/components/translation/moz.build index 5d4c61eb15c921..99217d9e828fba 100644 --- a/browser/components/translation/moz.build +++ b/browser/components/translation/moz.build @@ -3,28 +3,24 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'content', + "content", ] with Files("**"): BUG_COMPONENT = ("Firefox", "Translation") EXTRA_JS_MODULES.translation = [ - 'BingTranslator.jsm', - 'cld2/cld-worker.js', - 'cld2/cld-worker.js.mem', - 'GoogleTranslator.jsm', - 'LanguageDetector.jsm', - 'TranslationChild.jsm', - 'TranslationDocument.jsm', - 'TranslationParent.jsm', - 'YandexTranslator.jsm' + "BingTranslator.jsm", + "cld2/cld-worker.js", + "cld2/cld-worker.js.mem", + "GoogleTranslator.jsm", + "LanguageDetector.jsm", + "TranslationChild.jsm", + "TranslationDocument.jsm", + "TranslationParent.jsm", + "YandexTranslator.jsm", ] -BROWSER_CHROME_MANIFESTS += [ - 'test/browser.ini' -] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -XPCSHELL_TESTS_MANIFESTS += [ - 'test/unit/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] diff --git a/browser/components/uitour/moz.build b/browser/components/uitour/moz.build index 5a2b3e8aeb432c..e98ce45ae8b3c3 100644 --- a/browser/components/uitour/moz.build +++ b/browser/components/uitour/moz.build @@ -2,17 +2,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXTRA_JS_MODULES += [ - 'UITour.jsm', - 'UITourChild.jsm', - 'UITourParent.jsm' -] +EXTRA_JS_MODULES += ["UITour.jsm", "UITourChild.jsm", "UITourParent.jsm"] BROWSER_CHROME_MANIFESTS += [ - 'test/browser.ini', + "test/browser.ini", ] -SPHINX_TREES['docs'] = 'docs' +SPHINX_TREES["docs"] = "docs" -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Tours') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Tours") diff --git a/browser/components/urlbar/moz.build b/browser/components/urlbar/moz.build index 8da3bc9e5f5ab3..cdade1db67fc4c 100644 --- a/browser/components/urlbar/moz.build +++ b/browser/components/urlbar/moz.build @@ -6,42 +6,42 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "Address Bar") EXTRA_JS_MODULES += [ - 'UrlbarController.jsm', - 'UrlbarEventBufferer.jsm', - 'UrlbarInput.jsm', - 'UrlbarMuxerUnifiedComplete.jsm', - 'UrlbarPrefs.jsm', - 'UrlbarProviderAutofill.jsm', - 'UrlbarProviderExtension.jsm', - 'UrlbarProviderHeuristicFallback.jsm', - 'UrlbarProviderInterventions.jsm', - 'UrlbarProviderOmnibox.jsm', - 'UrlbarProviderOpenTabs.jsm', - 'UrlbarProviderPrivateSearch.jsm', - 'UrlbarProviderSearchSuggestions.jsm', - 'UrlbarProviderSearchTips.jsm', - 'UrlbarProvidersManager.jsm', - 'UrlbarProviderTabToSearch.jsm', - 'UrlbarProviderTokenAliasEngines.jsm', - 'UrlbarProviderTopSites.jsm', - 'UrlbarProviderUnifiedComplete.jsm', - 'UrlbarResult.jsm', - 'UrlbarSearchOneOffs.jsm', - 'UrlbarSearchUtils.jsm', - 'UrlbarTokenizer.jsm', - 'UrlbarUtils.jsm', - 'UrlbarValueFormatter.jsm', - 'UrlbarView.jsm', + "UrlbarController.jsm", + "UrlbarEventBufferer.jsm", + "UrlbarInput.jsm", + "UrlbarMuxerUnifiedComplete.jsm", + "UrlbarPrefs.jsm", + "UrlbarProviderAutofill.jsm", + "UrlbarProviderExtension.jsm", + "UrlbarProviderHeuristicFallback.jsm", + "UrlbarProviderInterventions.jsm", + "UrlbarProviderOmnibox.jsm", + "UrlbarProviderOpenTabs.jsm", + "UrlbarProviderPrivateSearch.jsm", + "UrlbarProviderSearchSuggestions.jsm", + "UrlbarProviderSearchTips.jsm", + "UrlbarProvidersManager.jsm", + "UrlbarProviderTabToSearch.jsm", + "UrlbarProviderTokenAliasEngines.jsm", + "UrlbarProviderTopSites.jsm", + "UrlbarProviderUnifiedComplete.jsm", + "UrlbarResult.jsm", + "UrlbarSearchOneOffs.jsm", + "UrlbarSearchUtils.jsm", + "UrlbarTokenizer.jsm", + "UrlbarUtils.jsm", + "UrlbarValueFormatter.jsm", + "UrlbarView.jsm", ] TESTING_JS_MODULES += [ - 'tests/UrlbarTestUtils.jsm', + "tests/UrlbarTestUtils.jsm", ] BROWSER_CHROME_MANIFESTS += [ - 'tests/browser-tips/browser.ini', - 'tests/browser/browser.ini', - 'tests/ext/browser/browser.ini', + "tests/browser-tips/browser.ini", + "tests/browser/browser.ini", + "tests/ext/browser/browser.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] -SPHINX_TREES['/browser/urlbar'] = 'docs' +SPHINX_TREES["/browser/urlbar"] = "docs" diff --git a/browser/extensions/doh-rollout/moz.build b/browser/extensions/doh-rollout/moz.build index b3a78fc1be5af5..bce8283117de84 100644 --- a/browser/extensions/doh-rollout/moz.build +++ b/browser/extensions/doh-rollout/moz.build @@ -4,13 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] -DEFINES['MOZ_APP_MAXVERSION'] = CONFIG['MOZ_APP_MAXVERSION'] +DEFINES["MOZ_APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] +DEFINES["MOZ_APP_MAXVERSION"] = CONFIG["MOZ_APP_MAXVERSION"] -FINAL_TARGET_FILES.features['doh-rollout@mozilla.org'] += [ - 'manifest.json' -] +FINAL_TARGET_FILES.features["doh-rollout@mozilla.org"] += ["manifest.json"] -with Files('**'): - BUG_COMPONENT = ('Firefox', 'Security') +with Files("**"): + BUG_COMPONENT = ("Firefox", "Security") diff --git a/browser/extensions/formautofill/locales/moz.build b/browser/extensions/formautofill/locales/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/browser/extensions/formautofill/locales/moz.build +++ b/browser/extensions/formautofill/locales/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/extensions/formautofill/moz.build b/browser/extensions/formautofill/moz.build index 46e92f430988bb..ef46740c3c93c1 100644 --- a/browser/extensions/formautofill/moz.build +++ b/browser/extensions/formautofill/moz.build @@ -4,50 +4,50 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] -DEFINES['MOZ_APP_MAXVERSION'] = CONFIG['MOZ_APP_MAXVERSION'] +DEFINES["MOZ_APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] +DEFINES["MOZ_APP_MAXVERSION"] = CONFIG["MOZ_APP_MAXVERSION"] -DIRS += ['locales'] +DIRS += ["locales"] -FINAL_TARGET_FILES.features['formautofill@mozilla.org'] += [ - 'api.js', - 'background.js', - 'manifest.json', - 'schema.json', +FINAL_TARGET_FILES.features["formautofill@mozilla.org"] += [ + "api.js", + "background.js", + "manifest.json", + "schema.json", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - FINAL_TARGET_FILES.features['formautofill@mozilla.org'].chrome.content.skin += [ - 'skin/linux/autocomplete-item.css', - 'skin/linux/editDialog.css', +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + FINAL_TARGET_FILES.features["formautofill@mozilla.org"].chrome.content.skin += [ + "skin/linux/autocomplete-item.css", + "skin/linux/editDialog.css", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - FINAL_TARGET_FILES.features['formautofill@mozilla.org'].chrome.content.skin += [ - 'skin/osx/autocomplete-item.css', - 'skin/osx/editDialog.css', +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + FINAL_TARGET_FILES.features["formautofill@mozilla.org"].chrome.content.skin += [ + "skin/osx/autocomplete-item.css", + "skin/osx/editDialog.css", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - FINAL_TARGET_FILES.features['formautofill@mozilla.org'].chrome.content.skin += [ - 'skin/windows/autocomplete-item.css', - 'skin/windows/editDialog.css', +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + FINAL_TARGET_FILES.features["formautofill@mozilla.org"].chrome.content.skin += [ + "skin/windows/autocomplete-item.css", + "skin/windows/editDialog.css", ] BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', - 'test/browser/creditCard/browser.ini', - 'test/browser/focus-leak/browser.ini', + "test/browser/browser.ini", + "test/browser/creditCard/browser.ini", + "test/browser/focus-leak/browser.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] MOCHITEST_MANIFESTS += [ - 'test/mochitest/creditCard/mochitest.ini', - 'test/mochitest/mochitest.ini', + "test/mochitest/creditCard/mochitest.ini", + "test/mochitest/mochitest.ini", ] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] -SPHINX_TREES['docs'] = 'docs' +SPHINX_TREES["docs"] = "docs" -with Files('**'): - BUG_COMPONENT = ('Toolkit', 'Form Autofill') +with Files("**"): + BUG_COMPONENT = ("Toolkit", "Form Autofill") diff --git a/browser/extensions/moz.build b/browser/extensions/moz.build index f71ece63c42da3..0eb3c53e76c2e0 100644 --- a/browser/extensions/moz.build +++ b/browser/extensions/moz.build @@ -4,10 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'doh-rollout', - 'formautofill', - 'screenshots', - 'webcompat', - 'report-site-issue' -] +DIRS += ["doh-rollout", "formautofill", "screenshots", "webcompat", "report-site-issue"] diff --git a/browser/extensions/report-site-issue/locales/moz.build b/browser/extensions/report-site-issue/locales/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/browser/extensions/report-site-issue/locales/moz.build +++ b/browser/extensions/report-site-issue/locales/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/extensions/report-site-issue/moz.build b/browser/extensions/report-site-issue/moz.build index 466c38f723bd1e..e3e92a33198fbc 100644 --- a/browser/extensions/report-site-issue/moz.build +++ b/browser/extensions/report-site-issue/moz.build @@ -4,38 +4,38 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] -DEFINES['MOZ_APP_MAXVERSION'] = CONFIG['MOZ_APP_MAXVERSION'] +DEFINES["MOZ_APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] +DEFINES["MOZ_APP_MAXVERSION"] = CONFIG["MOZ_APP_MAXVERSION"] -DIRS += ['locales'] +DIRS += ["locales"] -FINAL_TARGET_FILES.features['webcompat-reporter@mozilla.org'] += [ - 'background.js', - 'manifest.json' +FINAL_TARGET_FILES.features["webcompat-reporter@mozilla.org"] += [ + "background.js", + "manifest.json", ] -FINAL_TARGET_FILES.features['webcompat-reporter@mozilla.org'].experimentalAPIs += [ - 'experimentalAPIs/aboutConfigPrefs.js', - 'experimentalAPIs/aboutConfigPrefs.json', - 'experimentalAPIs/browserInfo.js', - 'experimentalAPIs/browserInfo.json', - 'experimentalAPIs/l10n.js', - 'experimentalAPIs/l10n.json', - 'experimentalAPIs/pageActionExtras.js', - 'experimentalAPIs/pageActionExtras.json', - 'experimentalAPIs/tabExtras.js', - 'experimentalAPIs/tabExtras.json' +FINAL_TARGET_FILES.features["webcompat-reporter@mozilla.org"].experimentalAPIs += [ + "experimentalAPIs/aboutConfigPrefs.js", + "experimentalAPIs/aboutConfigPrefs.json", + "experimentalAPIs/browserInfo.js", + "experimentalAPIs/browserInfo.json", + "experimentalAPIs/l10n.js", + "experimentalAPIs/l10n.json", + "experimentalAPIs/pageActionExtras.js", + "experimentalAPIs/pageActionExtras.json", + "experimentalAPIs/tabExtras.js", + "experimentalAPIs/tabExtras.json", ] -FINAL_TARGET_FILES.features['webcompat-reporter@mozilla.org'].experimentalAPIs.actors += [ - 'experimentalAPIs/actors/tabExtrasActor.jsm' -] +FINAL_TARGET_FILES.features[ + "webcompat-reporter@mozilla.org" +].experimentalAPIs.actors += ["experimentalAPIs/actors/tabExtrasActor.jsm"] -FINAL_TARGET_FILES.features['webcompat-reporter@mozilla.org'].icons += [ - 'icons/lightbulb.svg' +FINAL_TARGET_FILES.features["webcompat-reporter@mozilla.org"].icons += [ + "icons/lightbulb.svg" ] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('Web Compatibility', 'Tooling & Investigations') +with Files("**"): + BUG_COMPONENT = ("Web Compatibility", "Tooling & Investigations") diff --git a/browser/extensions/screenshots/moz.build b/browser/extensions/screenshots/moz.build index 10f01f690c4dea..3a1b161357282e 100644 --- a/browser/extensions/screenshots/moz.build +++ b/browser/extensions/screenshots/moz.build @@ -9,76 +9,73 @@ with Files("**"): # This file list is automatically generated by Screenshots' export scripts. # AUTOMATIC INSERTION START -FINAL_TARGET_FILES.features['screenshots@mozilla.org'] += [ - 'assertIsBlankDocument.js', - 'assertIsTrusted.js', - 'blank.html', - 'blobConverters.js', - 'catcher.js', - 'clipboard.js', - 'domainFromUrl.js', - 'log.js', - 'makeUuid.js', - 'manifest.json', - 'moz.build', - 'randomString.js', - 'sitehelper.js' +FINAL_TARGET_FILES.features["screenshots@mozilla.org"] += [ + "assertIsBlankDocument.js", + "assertIsTrusted.js", + "blank.html", + "blobConverters.js", + "catcher.js", + "clipboard.js", + "domainFromUrl.js", + "log.js", + "makeUuid.js", + "manifest.json", + "moz.build", + "randomString.js", + "sitehelper.js", ] -FINAL_TARGET_FILES.features['screenshots@mozilla.org']["background"] += [ - 'background/analytics.js', - 'background/auth.js', - 'background/communication.js', - 'background/deviceInfo.js', - 'background/main.js', - 'background/selectorLoader.js', - 'background/senderror.js', - 'background/startBackground.js', - 'background/takeshot.js' +FINAL_TARGET_FILES.features["screenshots@mozilla.org"]["background"] += [ + "background/analytics.js", + "background/auth.js", + "background/communication.js", + "background/deviceInfo.js", + "background/main.js", + "background/selectorLoader.js", + "background/senderror.js", + "background/startBackground.js", + "background/takeshot.js", ] -FINAL_TARGET_FILES.features['screenshots@mozilla.org']["build"] += [ - 'build/buildSettings.js', - 'build/inlineSelectionCss.js', - 'build/raven.js', - 'build/selection.js', - 'build/shot.js', - 'build/thumbnailGenerator.js' +FINAL_TARGET_FILES.features["screenshots@mozilla.org"]["build"] += [ + "build/buildSettings.js", + "build/inlineSelectionCss.js", + "build/raven.js", + "build/selection.js", + "build/shot.js", + "build/thumbnailGenerator.js", ] -FINAL_TARGET_FILES.features['screenshots@mozilla.org']["experiments"]["screenshots"] += [ - 'experiments/screenshots/api.js', - 'experiments/screenshots/schema.json' -] +FINAL_TARGET_FILES.features["screenshots@mozilla.org"]["experiments"][ + "screenshots" +] += ["experiments/screenshots/api.js", "experiments/screenshots/schema.json"] -FINAL_TARGET_FILES.features['screenshots@mozilla.org']["icons"] += [ - 'icons/cancel.svg', - 'icons/cloud.svg', - 'icons/copied-notification.svg', - 'icons/copy.svg', - 'icons/download-white.svg', - 'icons/download.svg', - 'icons/help-16.svg', - 'icons/icon-highlight-32-v2.svg', - 'icons/icon-v2.svg', - 'icons/icon-welcome-face-without-eyes.svg', - 'icons/menu-fullpage.svg', - 'icons/menu-myshot-white.svg', - 'icons/menu-myshot.svg', - 'icons/menu-visible.svg', +FINAL_TARGET_FILES.features["screenshots@mozilla.org"]["icons"] += [ + "icons/cancel.svg", + "icons/cloud.svg", + "icons/copied-notification.svg", + "icons/copy.svg", + "icons/download-white.svg", + "icons/download.svg", + "icons/help-16.svg", + "icons/icon-highlight-32-v2.svg", + "icons/icon-v2.svg", + "icons/icon-welcome-face-without-eyes.svg", + "icons/menu-fullpage.svg", + "icons/menu-myshot-white.svg", + "icons/menu-myshot.svg", + "icons/menu-visible.svg", ] -FINAL_TARGET_FILES.features['screenshots@mozilla.org']["selector"] += [ - 'selector/callBackground.js', - 'selector/documentMetadata.js', - 'selector/shooter.js', - 'selector/ui.js', - 'selector/uicontrol.js', - 'selector/util.js' +FINAL_TARGET_FILES.features["screenshots@mozilla.org"]["selector"] += [ + "selector/callBackground.js", + "selector/documentMetadata.js", + "selector/shooter.js", + "selector/ui.js", + "selector/uicontrol.js", + "selector/util.js", ] # AUTOMATIC INSERTION END -BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini' -] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] diff --git a/browser/extensions/webcompat/moz.build b/browser/extensions/webcompat/moz.build index fccc7aac1bb314..87320a05c25b64 100644 --- a/browser/extensions/webcompat/moz.build +++ b/browser/extensions/webcompat/moz.build @@ -4,120 +4,120 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] -DEFINES['MOZ_APP_MAXVERSION'] = CONFIG['MOZ_APP_MAXVERSION'] +DEFINES["MOZ_APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] +DEFINES["MOZ_APP_MAXVERSION"] = CONFIG["MOZ_APP_MAXVERSION"] -FINAL_TARGET_FILES.features['webcompat@mozilla.org'] += [ - 'manifest.json', - 'run.js', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"] += [ + "manifest.json", + "run.js", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['about-compat'] += [ - 'about-compat/aboutCompat.css', - 'about-compat/aboutCompat.html', - 'about-compat/aboutCompat.js', - 'about-compat/AboutCompat.jsm', - 'about-compat/aboutPage.js', - 'about-compat/aboutPage.json', - 'about-compat/aboutPageProcessScript.js', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["about-compat"] += [ + "about-compat/aboutCompat.css", + "about-compat/aboutCompat.html", + "about-compat/aboutCompat.js", + "about-compat/AboutCompat.jsm", + "about-compat/aboutPage.js", + "about-compat/aboutPage.json", + "about-compat/aboutPageProcessScript.js", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['data'] += [ - 'data/injections.js', - 'data/picture_in_picture_overrides.js', - 'data/shims.js', - 'data/ua_overrides.js', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["data"] += [ + "data/injections.js", + "data/picture_in_picture_overrides.js", + "data/shims.js", + "data/ua_overrides.js", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['experiment-apis'] += [ - 'experiment-apis/aboutConfigPrefs.js', - 'experiment-apis/aboutConfigPrefs.json', - 'experiment-apis/appConstants.js', - 'experiment-apis/appConstants.json', - 'experiment-apis/experiments.js', - 'experiment-apis/experiments.json', - 'experiment-apis/matchPatterns.js', - 'experiment-apis/matchPatterns.json', - 'experiment-apis/pictureInPicture.js', - 'experiment-apis/pictureInPicture.json', - 'experiment-apis/sharedPreferences.js', - 'experiment-apis/sharedPreferences.json', - 'experiment-apis/systemManufacturer.js', - 'experiment-apis/systemManufacturer.json', - 'experiment-apis/trackingProtection.js', - 'experiment-apis/trackingProtection.json', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["experiment-apis"] += [ + "experiment-apis/aboutConfigPrefs.js", + "experiment-apis/aboutConfigPrefs.json", + "experiment-apis/appConstants.js", + "experiment-apis/appConstants.json", + "experiment-apis/experiments.js", + "experiment-apis/experiments.json", + "experiment-apis/matchPatterns.js", + "experiment-apis/matchPatterns.json", + "experiment-apis/pictureInPicture.js", + "experiment-apis/pictureInPicture.json", + "experiment-apis/sharedPreferences.js", + "experiment-apis/sharedPreferences.json", + "experiment-apis/systemManufacturer.js", + "experiment-apis/systemManufacturer.json", + "experiment-apis/trackingProtection.js", + "experiment-apis/trackingProtection.json", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['injections']['css'] += [ - 'injections/css/bug0000000-testbed-css-injection.css', - 'injections/css/bug1561371-mail.google.com-allow-horizontal-scrolling.css', - 'injections/css/bug1570119-teamcoco.com-scrollbar-width.css', - 'injections/css/bug1570328-developer-apple.com-transform-scale.css', - 'injections/css/bug1575000-apply.lloydsbank.co.uk-radio-buttons-fix.css', - 'injections/css/bug1577297-kitkat.com.au-slider-width-fix.css', - 'injections/css/bug1605611-maps.google.com-directions-time.css', - 'injections/css/bug1610016-gaana.com-input-position-fix.css', - 'injections/css/bug1610344-directv.com.co-hide-unsupported-message.css', - 'injections/css/bug1632019-everyman.co-gallery-width-fix.css', - 'injections/css/bug1644830-missingmail.usps.com-checkboxes-not-visible.css', - 'injections/css/bug1645064-s-kanava.fi-invisible-charts.css', - 'injections/css/bug1651917-teletrader.com.body-transform-origin.css', - 'injections/css/bug1653075-livescience.com-scrollbar-width.css', - 'injections/css/bug1654865-sports.ndtv.com-float-fix.css', - 'injections/css/bug1654877-preev.com-moz-appearance-fix.css', - 'injections/css/bug1654907-reactine.ca-hide-unsupported.css', - 'injections/css/bug1655049-dev.to-unclickable-button-fix.css', - 'injections/css/bug1666771-zilow-map-overdraw.css', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["injections"]["css"] += [ + "injections/css/bug0000000-testbed-css-injection.css", + "injections/css/bug1561371-mail.google.com-allow-horizontal-scrolling.css", + "injections/css/bug1570119-teamcoco.com-scrollbar-width.css", + "injections/css/bug1570328-developer-apple.com-transform-scale.css", + "injections/css/bug1575000-apply.lloydsbank.co.uk-radio-buttons-fix.css", + "injections/css/bug1577297-kitkat.com.au-slider-width-fix.css", + "injections/css/bug1605611-maps.google.com-directions-time.css", + "injections/css/bug1610016-gaana.com-input-position-fix.css", + "injections/css/bug1610344-directv.com.co-hide-unsupported-message.css", + "injections/css/bug1632019-everyman.co-gallery-width-fix.css", + "injections/css/bug1644830-missingmail.usps.com-checkboxes-not-visible.css", + "injections/css/bug1645064-s-kanava.fi-invisible-charts.css", + "injections/css/bug1651917-teletrader.com.body-transform-origin.css", + "injections/css/bug1653075-livescience.com-scrollbar-width.css", + "injections/css/bug1654865-sports.ndtv.com-float-fix.css", + "injections/css/bug1654877-preev.com-moz-appearance-fix.css", + "injections/css/bug1654907-reactine.ca-hide-unsupported.css", + "injections/css/bug1655049-dev.to-unclickable-button-fix.css", + "injections/css/bug1666771-zilow-map-overdraw.css", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['injections']['js'] += [ - 'injections/js/bug0000000-testbed-js-injection.js', - 'injections/js/bug1452707-window.controllers-shim-ib.absa.co.za.js', - 'injections/js/bug1457335-histography.io-ua-change.js', - 'injections/js/bug1472075-bankofamerica.com-ua-change.js', - 'injections/js/bug1570856-medium.com-menu-isTier1.js', - 'injections/js/bug1579159-m.tailieu.vn-pdfjs-worker-disable.js', - 'injections/js/bug1605611-maps.google.com-directions-time.js', - 'injections/js/bug1610358-pcloud.com-appVersion-change.js', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["injections"]["js"] += [ + "injections/js/bug0000000-testbed-js-injection.js", + "injections/js/bug1452707-window.controllers-shim-ib.absa.co.za.js", + "injections/js/bug1457335-histography.io-ua-change.js", + "injections/js/bug1472075-bankofamerica.com-ua-change.js", + "injections/js/bug1570856-medium.com-menu-isTier1.js", + "injections/js/bug1579159-m.tailieu.vn-pdfjs-worker-disable.js", + "injections/js/bug1605611-maps.google.com-directions-time.js", + "injections/js/bug1610358-pcloud.com-appVersion-change.js", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['shims'] += [ - 'shims/adsafeprotected-ima.js', - 'shims/bmauth.js', - 'shims/eluminate.js', - 'shims/empty-script.js', - 'shims/facebook-sdk.js', - 'shims/google-analytics-ecommerce-plugin.js', - 'shims/google-analytics-legacy.js', - 'shims/google-analytics-tag-manager.js', - 'shims/google-analytics.js', - 'shims/google-publisher-tags.js', - 'shims/live-test-shim.js', - 'shims/mochitest-shim-1.js', - 'shims/mochitest-shim-2.js', - 'shims/mochitest-shim-3.js', - 'shims/rambler-authenticator.js', - 'shims/rich-relevance.js', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["shims"] += [ + "shims/adsafeprotected-ima.js", + "shims/bmauth.js", + "shims/eluminate.js", + "shims/empty-script.js", + "shims/facebook-sdk.js", + "shims/google-analytics-ecommerce-plugin.js", + "shims/google-analytics-legacy.js", + "shims/google-analytics-tag-manager.js", + "shims/google-analytics.js", + "shims/google-publisher-tags.js", + "shims/live-test-shim.js", + "shims/mochitest-shim-1.js", + "shims/mochitest-shim-2.js", + "shims/mochitest-shim-3.js", + "shims/rambler-authenticator.js", + "shims/rich-relevance.js", ] -FINAL_TARGET_FILES.features['webcompat@mozilla.org']['lib'] += [ - 'lib/about_compat_broker.js', - 'lib/custom_functions.js', - 'lib/injections.js', - 'lib/intervention_helpers.js', - 'lib/messaging_helper.js', - 'lib/module_shim.js', - 'lib/picture_in_picture_overrides.js', - 'lib/shim_messaging_helper.js', - 'lib/shims.js', - 'lib/ua_overrides.js', +FINAL_TARGET_FILES.features["webcompat@mozilla.org"]["lib"] += [ + "lib/about_compat_broker.js", + "lib/custom_functions.js", + "lib/injections.js", + "lib/intervention_helpers.js", + "lib/messaging_helper.js", + "lib/module_shim.js", + "lib/picture_in_picture_overrides.js", + "lib/shim_messaging_helper.js", + "lib/shims.js", + "lib/ua_overrides.js", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('Web Compatibility', 'Tooling & Investigations') +with Files("**"): + BUG_COMPONENT = ("Web Compatibility", "Tooling & Investigations") diff --git a/browser/fonts/moz.build b/browser/fonts/moz.build index 308dcabacececf..7b9cb08f48e6ba 100644 --- a/browser/fonts/moz.build +++ b/browser/fonts/moz.build @@ -4,8 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('windows', 'gtk'): - DIST_SUBDIR = '' - FINAL_TARGET_FILES.fonts += [ - 'TwemojiMozilla.ttf' - ] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("windows", "gtk"): + DIST_SUBDIR = "" + FINAL_TARGET_FILES.fonts += ["TwemojiMozilla.ttf"] diff --git a/browser/fxr/moz.build b/browser/fxr/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/browser/fxr/moz.build +++ b/browser/fxr/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/installer/windows/moz.build b/browser/installer/windows/moz.build index f0987ef9c3f4e2..30d4c23bba093c 100644 --- a/browser/installer/windows/moz.build +++ b/browser/installer/windows/moz.build @@ -4,13 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] +DEFINES["APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] -DEFINES['MOZ_APP_NAME'] = CONFIG['MOZ_APP_NAME'] -DEFINES['MOZ_APP_DISPLAYNAME'] = CONFIG['MOZ_APP_DISPLAYNAME'] -DEFINES['MOZILLA_VERSION'] = CONFIG['MOZILLA_VERSION'] +DEFINES["MOZ_APP_NAME"] = CONFIG["MOZ_APP_NAME"] +DEFINES["MOZ_APP_DISPLAYNAME"] = CONFIG["MOZ_APP_DISPLAYNAME"] +DEFINES["MOZILLA_VERSION"] = CONFIG["MOZILLA_VERSION"] -if CONFIG['MOZ_DEFAULT_BROWSER_AGENT']: - DEFINES['MOZ_DEFAULT_BROWSER_AGENT'] = CONFIG['MOZ_DEFAULT_BROWSER_AGENT'] +if CONFIG["MOZ_DEFAULT_BROWSER_AGENT"]: + DEFINES["MOZ_DEFAULT_BROWSER_AGENT"] = CONFIG["MOZ_DEFAULT_BROWSER_AGENT"] -SPHINX_TREES['installer'] = 'docs' +SPHINX_TREES["installer"] = "docs" diff --git a/browser/locales/filter.py b/browser/locales/filter.py index 1fc9b14e7c870a..bb2e992e2e72ed 100644 --- a/browser/locales/filter.py +++ b/browser/locales/filter.py @@ -7,17 +7,25 @@ def test(mod, path, entity=None): import re + # ignore anything but Firefox - if mod not in ("netwerk", "dom", "toolkit", "security/manager", - "devtools/client", "devtools/shared", "devtools/startup", - "browser", - "browser/extensions/formautofill", - "browser/extensions/fxmonitor", - "browser/extensions/report-site-issue", - "extensions/spellcheck", - "other-licenses/branding/firefox", - "browser/branding/official", - "services/sync"): + if mod not in ( + "netwerk", + "dom", + "toolkit", + "security/manager", + "devtools/client", + "devtools/shared", + "devtools/startup", + "browser", + "browser/extensions/formautofill", + "browser/extensions/fxmonitor", + "browser/extensions/report-site-issue", + "extensions/spellcheck", + "other-licenses/branding/firefox", + "browser/branding/official", + "services/sync", + ): return "ignore" if mod not in ("browser", "extensions/spellcheck"): # we only have exceptions for browser and extensions/spellcheck @@ -35,9 +43,13 @@ def test(mod, path, entity=None): if mod == "browser" and path == "chrome/browser-region/region.properties": # only region.properties exceptions remain, compare all others - return ("ignore" - if (re.match(r"browser\.contentHandlers\.types\.[0-5]", entity) or - re.match(r"gecko\.handlerService\.schemes\.", entity) or - re.match(r"gecko\.handlerService\.defaultHandlersVersion", entity)) - else "error") + return ( + "ignore" + if ( + re.match(r"browser\.contentHandlers\.types\.[0-5]", entity) + or re.match(r"gecko\.handlerService\.schemes\.", entity) + or re.match(r"gecko\.handlerService\.defaultHandlersVersion", entity) + ) + else "error" + ) return "error" diff --git a/browser/locales/generate_bookmarks.py b/browser/locales/generate_bookmarks.py index 583a4dc49883b7..1b2002fcf9ca03 100644 --- a/browser/locales/generate_bookmarks.py +++ b/browser/locales/generate_bookmarks.py @@ -15,7 +15,7 @@ def main(output, bookmarks_html_in, bookmarks_inc, locale=None): if not locale: - raise ValueError('locale must be specified!') + raise ValueError("locale must be specified!") CONFIG = buildconfig.substs @@ -25,19 +25,19 @@ def main(output, bookmarks_html_in, bookmarks_inc, locale=None): # anything but #define), so it's safe to restrict the set of defines to # what's used in mozilla-central directly. defines = {} - defines['AB_CD'] = locale - if defines['AB_CD'] == 'ja-JP-mac': - defines['AB_CD'] = 'ja' + defines["AB_CD"] = locale + if defines["AB_CD"] == "ja-JP-mac": + defines["AB_CD"] = "ja" - defines['BOOKMARKS_INCLUDE_PATH'] = bookmarks_inc + defines["BOOKMARKS_INCLUDE_PATH"] = bookmarks_inc - for var in ('NIGHTLY_BUILD',): + for var in ("NIGHTLY_BUILD",): if var in CONFIG: defines[var] = CONFIG[var] - includes = preprocessor.preprocess(includes=[bookmarks_html_in], - defines=defines, - output=output) + includes = preprocessor.preprocess( + includes=[bookmarks_html_in], defines=defines, output=output + ) return includes diff --git a/browser/locales/generate_ini.py b/browser/locales/generate_ini.py index e7488788e7deaa..2f2724dd96cb86 100644 --- a/browser/locales/generate_ini.py +++ b/browser/locales/generate_ini.py @@ -14,15 +14,16 @@ def main(output, ini, ini_append=None, locale=None): - fixup_re = re.compile('^(Info|Title)Text=') + fixup_re = re.compile("^(Info|Title)Text=") # Input INI is always utf-8. - with codecs.open(ini, 'rb', 'utf_8') as f: + with codecs.open(ini, "rb", "utf_8") as f: for line in f: - line = fixup_re.sub(r'\1=', line) - line = line.replace('%MOZ_APP_DISPLAYNAME%', - buildconfig.substs['MOZ_APP_DISPLAYNAME']) + line = fixup_re.sub(r"\1=", line) + line = line.replace( + "%MOZ_APP_DISPLAYNAME%", buildconfig.substs["MOZ_APP_DISPLAYNAME"] + ) output.write(line) - if ini_append and buildconfig.substs['OS_TARGET'] == 'WINNT': + if ini_append and buildconfig.substs["OS_TARGET"] == "WINNT": # Also append the contents of `ini_append`. - with codecs.open(ini_append, 'rb', 'utf_8') as f: + with codecs.open(ini_append, "rb", "utf_8") as f: shutil.copyfileobj(f, output) diff --git a/browser/locales/moz.build b/browser/locales/moz.build index 90ae99d83fa7c3..e1c27c75499d0c 100644 --- a/browser/locales/moz.build +++ b/browser/locales/moz.build @@ -4,35 +4,35 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] # If DIST_SUBDIR ever gets unset in browser this path might be wrong due to PREF_DIR changing. -LOCALIZED_PP_FILES.defaults.preferences += ['en-US/firefox-l10n.js'] +LOCALIZED_PP_FILES.defaults.preferences += ["en-US/firefox-l10n.js"] -if CONFIG['MOZ_CRASHREPORTER']: - LOCALIZED_FILES += ['en-US/crashreporter/crashreporter-override.ini'] +if CONFIG["MOZ_CRASHREPORTER"]: + LOCALIZED_FILES += ["en-US/crashreporter/crashreporter-override.ini"] -if CONFIG['MOZ_UPDATER']: - LOCALIZED_GENERATED_FILES += ['updater.ini'] - updater = LOCALIZED_GENERATED_FILES['updater.ini'] - updater.script = 'generate_ini.py' +if CONFIG["MOZ_UPDATER"]: + LOCALIZED_GENERATED_FILES += ["updater.ini"] + updater = LOCALIZED_GENERATED_FILES["updater.ini"] + updater.script = "generate_ini.py" updater.inputs = [ - 'en-US/updater/updater.ini', - '../installer/windows/nsis/updater_append.ini', + "en-US/updater/updater.ini", + "../installer/windows/nsis/updater_append.ini", ] # Yes, this is weird, but what can you do? This file doesn't want to be in the DIST_SUBDIR, # but we can't really move it to a different directory until we change how locale repacks # work. - LOCALIZED_FILES['..'] += ['!updater.ini'] + LOCALIZED_FILES[".."] += ["!updater.ini"] -LOCALIZED_GENERATED_FILES += ['bookmarks.html'] -bookmarks = LOCALIZED_GENERATED_FILES['bookmarks.html'] -bookmarks.script = 'generate_bookmarks.py' +LOCALIZED_GENERATED_FILES += ["bookmarks.html"] +bookmarks = LOCALIZED_GENERATED_FILES["bookmarks.html"] +bookmarks.script = "generate_bookmarks.py" bookmarks.inputs = [ # This input will not be considered for localization. - 'generic/profile/bookmarks.html.in', + "generic/profile/bookmarks.html.in", # The `locales/en-US/` will be rewritten to the locale-specific path. - 'en-US/profile/bookmarks.inc', + "en-US/profile/bookmarks.inc", ] with Files("**"): diff --git a/browser/modules/moz.build b/browser/modules/moz.build index f1884addb6392e..06b809334b5434 100644 --- a/browser/modules/moz.build +++ b/browser/modules/moz.build @@ -43,11 +43,11 @@ with Files("AboutNewTab.jsm"): with Files("AsanReporter.jsm"): BUG_COMPONENT = ("Firefox Build System", "General") -with Files('AsyncTabSwitcher.jsm'): - BUG_COMPONENT = ('Firefox', 'Tabbed Browser') +with Files("AsyncTabSwitcher.jsm"): + BUG_COMPONENT = ("Firefox", "Tabbed Browser") -with Files('NewTabPagePreloading.jsm'): - BUG_COMPONENT = ('Firefox', 'Tabbed Browser') +with Files("NewTabPagePreloading.jsm"): + BUG_COMPONENT = ("Firefox", "Tabbed Browser") with Files("BrowserWindowTracker.jsm"): BUG_COMPONENT = ("Firefox", "General") @@ -77,7 +77,7 @@ with Files("PartnerLinkAttribution.jsm"): BUG_COMPONENT = ("Firefox", "Search") with Files("PermissionUI.jsm"): - BUG_COMPONENT = ("Firefox", "Site Permissions") + BUG_COMPONENT = ("Firefox", "Site Permissions") with Files("ProcessHangMonitor.jsm"): BUG_COMPONENT = ("Core", "DOM: Content Processes") @@ -119,51 +119,51 @@ with Files("ZoomUI.jsm"): BUG_COMPONENT = ("Firefox", "Toolbars and Customization") BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', - 'test/browser/formValidation/browser.ini', + "test/browser/browser.ini", + "test/browser/formValidation/browser.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] EXTRA_JS_MODULES += [ - 'AboutNewTab.jsm', - 'AppUpdater.jsm', - 'AsyncTabSwitcher.jsm', - 'BrowserUsageTelemetry.jsm', - 'BrowserWindowTracker.jsm', - 'ContentCrashHandlers.jsm', - 'Discovery.jsm', - 'EveryWindow.jsm', - 'ExtensionsUI.jsm', - 'FaviconLoader.jsm', - 'HomePage.jsm', - 'LaterRun.jsm', - 'NewTabPagePreloading.jsm', - 'OpenInTabsUtils.jsm', - 'PageActions.jsm', - 'PartnerLinkAttribution.jsm', - 'PermissionUI.jsm', - 'PingCentre.jsm', - 'ProcessHangMonitor.jsm', - 'Sanitizer.jsm', - 'SelectionChangedMenulist.jsm', - 'SiteDataManager.jsm', - 'SitePermissions.jsm', - 'TabsList.jsm', - 'TabUnloader.jsm', - 'ThemeVariableMap.jsm', - 'TransientPrefs.jsm', - 'webrtcUI.jsm', - 'ZoomUI.jsm', + "AboutNewTab.jsm", + "AppUpdater.jsm", + "AsyncTabSwitcher.jsm", + "BrowserUsageTelemetry.jsm", + "BrowserWindowTracker.jsm", + "ContentCrashHandlers.jsm", + "Discovery.jsm", + "EveryWindow.jsm", + "ExtensionsUI.jsm", + "FaviconLoader.jsm", + "HomePage.jsm", + "LaterRun.jsm", + "NewTabPagePreloading.jsm", + "OpenInTabsUtils.jsm", + "PageActions.jsm", + "PartnerLinkAttribution.jsm", + "PermissionUI.jsm", + "PingCentre.jsm", + "ProcessHangMonitor.jsm", + "Sanitizer.jsm", + "SelectionChangedMenulist.jsm", + "SiteDataManager.jsm", + "SitePermissions.jsm", + "TabsList.jsm", + "TabUnloader.jsm", + "ThemeVariableMap.jsm", + "TransientPrefs.jsm", + "webrtcUI.jsm", + "ZoomUI.jsm", ] -if CONFIG['MOZ_ASAN_REPORTER']: +if CONFIG["MOZ_ASAN_REPORTER"]: EXTRA_JS_MODULES += [ - 'AsanReporter.jsm', + "AsanReporter.jsm", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXTRA_JS_MODULES += [ - 'Windows8WindowFrameColor.jsm', - 'WindowsJumpLists.jsm', - 'WindowsPreviewPerTab.jsm', + "Windows8WindowFrameColor.jsm", + "WindowsJumpLists.jsm", + "WindowsPreviewPerTab.jsm", ] diff --git a/browser/moz.build b/browser/moz.build index 8cbf30a938ec88..ca88ad0a94f733 100644 --- a/browser/moz.build +++ b/browser/moz.build @@ -4,62 +4,62 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -CONFIGURE_SUBST_FILES += ['installer/Makefile'] +CONFIGURE_SUBST_FILES += ["installer/Makefile"] -SPHINX_TREES['/browser'] = 'docs' +SPHINX_TREES["/browser"] = "docs" EXTRA_COMPONENTS += [ - 'l10n-registry.manifest', + "l10n-registry.manifest", ] DIRS += [ - 'actors', - 'base', - 'components', - 'fonts', - 'fxr', - 'locales', - 'modules', - 'themes', - 'extensions', + "actors", + "base", + "components", + "fonts", + "fxr", + "locales", + "modules", + "themes", + "extensions", ] DIRS += [ - 'app', + "app", ] -if CONFIG['MAKENSISU']: - DIRS += ['installer/windows'] +if CONFIG["MAKENSISU"]: + DIRS += ["installer/windows"] TEST_DIRS += [ - 'tools/mozscreenshots', + "tools/mozscreenshots", ] -DIST_SUBDIR = 'browser' -export('DIST_SUBDIR') +DIST_SUBDIR = "browser" +export("DIST_SUBDIR") # These defines are read in firefox.js -DEFINES['APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] +DEFINES["APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] -for cdm in CONFIG['MOZ_EME_MODULES']: - DEFINES['MOZ_%s_EME' % cdm.upper()] = True +for cdm in CONFIG["MOZ_EME_MODULES"]: + DEFINES["MOZ_%s_EME" % cdm.upper()] = True -if CONFIG['MOZ_GPSD']: - DEFINES['MOZ_GPSD'] = True +if CONFIG["MOZ_GPSD"]: + DEFINES["MOZ_GPSD"] = True # These files are specified in this moz.build to pick up DIST_SUBDIR as set in # this directory, which is un-set in browser/app. JS_PREFERENCE_PP_FILES += [ - 'app/profile/firefox.js', + "app/profile/firefox.js", ] -FINAL_TARGET_FILES.defaults += ['app/permissions'] +FINAL_TARGET_FILES.defaults += ["app/permissions"] with Files("**"): BUG_COMPONENT = ("Firefox", "General") - SCHEDULES.exclusive = ['linux', 'macosx', 'windows'] + SCHEDULES.exclusive = ["linux", "macosx", "windows"] -with Files('docs/**'): - SCHEDULES.exclusive = ['docs'] +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] with Files("Makefile.in"): BUG_COMPONENT = ("Firefox Build System", "General") @@ -103,5 +103,5 @@ with Files("installer/**"): with Files("tools/**"): BUG_COMPONENT = ("Firefox", "General") -with Files('l10n-registry.manifest'): - BUG_COMPONENT = ('Core', 'Localization') +with Files("l10n-registry.manifest"): + BUG_COMPONENT = ("Core", "Localization") diff --git a/browser/moz.configure b/browser/moz.configure index a251050feb9b03..8653bcbb165da8 100644 --- a/browser/moz.configure +++ b/browser/moz.configure @@ -4,18 +4,16 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -imply_option('MOZ_PLACES', True) -imply_option('MOZ_SERVICES_HEALTHREPORT', True) -imply_option('MOZ_SERVICES_SYNC', True) -imply_option('MOZ_DEDICATED_PROFILES', True) -imply_option('MOZ_BLOCK_PROFILE_DOWNGRADE', True) -imply_option('MOZ_NORMANDY', True) +imply_option("MOZ_PLACES", True) +imply_option("MOZ_SERVICES_HEALTHREPORT", True) +imply_option("MOZ_SERVICES_SYNC", True) +imply_option("MOZ_DEDICATED_PROFILES", True) +imply_option("MOZ_BLOCK_PROFILE_DOWNGRADE", True) +imply_option("MOZ_NORMANDY", True) with only_when(target_is_linux & compile_environment): - option(env='MOZ_NO_PIE_COMPAT', - help='Enable non-PIE wrapper') + option(env="MOZ_NO_PIE_COMPAT", help="Enable non-PIE wrapper") - set_config('MOZ_NO_PIE_COMPAT', - depends_if('MOZ_NO_PIE_COMPAT')(lambda _: True)) + set_config("MOZ_NO_PIE_COMPAT", depends_if("MOZ_NO_PIE_COMPAT")(lambda _: True)) -include('../toolkit/moz.configure') +include("../toolkit/moz.configure") diff --git a/browser/themes/addons/moz.build b/browser/themes/addons/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/browser/themes/addons/moz.build +++ b/browser/themes/addons/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/themes/linux/moz.build b/browser/themes/linux/moz.build index 5f26f477c3daf3..20e4e3af75f523 100644 --- a/browser/themes/linux/moz.build +++ b/browser/themes/linux/moz.build @@ -4,7 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] - -DEFINES['MENUBAR_CAN_AUTOHIDE'] = 1 +JAR_MANIFESTS += ["jar.mn"] +DEFINES["MENUBAR_CAN_AUTOHIDE"] = 1 diff --git a/browser/themes/moz.build b/browser/themes/moz.build index 9ca868304245d4..0e706766c5027e 100644 --- a/browser/themes/moz.build +++ b/browser/themes/moz.build @@ -7,24 +7,24 @@ with Files("**"): BUG_COMPONENT = ("Firefox", "Theme") -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] -if toolkit == 'cocoa': - DIRS += ['osx'] -elif toolkit == 'gtk': - DIRS += ['linux'] +if toolkit == "cocoa": + DIRS += ["osx"] +elif toolkit == "gtk": + DIRS += ["linux"] else: - DIRS += ['windows'] + DIRS += ["windows"] DIRS += [ - 'addons', + "addons", ] -with Files('osx/**'): - SCHEDULES.exclusive = ['macosx'] +with Files("osx/**"): + SCHEDULES.exclusive = ["macosx"] -with Files('linux/**'): - SCHEDULES.exclusive = ['linux'] +with Files("linux/**"): + SCHEDULES.exclusive = ["linux"] -with Files('windows/**'): - SCHEDULES.exclusive = ['windows'] +with Files("windows/**"): + SCHEDULES.exclusive = ["windows"] diff --git a/browser/themes/osx/moz.build b/browser/themes/osx/moz.build index 7daa419f1faa08..d988c0ff9b162c 100644 --- a/browser/themes/osx/moz.build +++ b/browser/themes/osx/moz.build @@ -4,5 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] - +JAR_MANIFESTS += ["jar.mn"] diff --git a/browser/themes/windows/moz.build b/browser/themes/windows/moz.build index 5f26f477c3daf3..20e4e3af75f523 100644 --- a/browser/themes/windows/moz.build +++ b/browser/themes/windows/moz.build @@ -4,7 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] - -DEFINES['MENUBAR_CAN_AUTOHIDE'] = 1 +JAR_MANIFESTS += ["jar.mn"] +DEFINES["MENUBAR_CAN_AUTOHIDE"] = 1 diff --git a/browser/tools/mozscreenshots/moz.build b/browser/tools/mozscreenshots/moz.build index a0405fc562ce2d..a95940a208ecc2 100644 --- a/browser/tools/mozscreenshots/moz.build +++ b/browser/tools/mozscreenshots/moz.build @@ -22,16 +22,16 @@ with Files("preferences/**"): BROWSER_CHROME_MANIFESTS += [ # Each test is in it's own directory so it gets run in a clean profile with # run-by-dir. - 'controlCenter/browser.ini', - 'devtools/browser.ini', - 'permissionPrompts/browser.ini', - 'preferences/browser.ini', - 'primaryUI/browser.ini', - 'tests/browser/browser.ini', + "controlCenter/browser.ini", + "devtools/browser.ini", + "permissionPrompts/browser.ini", + "preferences/browser.ini", + "primaryUI/browser.ini", + "tests/browser/browser.ini", ] TEST_DIRS += [ - 'mozscreenshots/extension', + "mozscreenshots/extension", ] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] diff --git a/browser/tools/mozscreenshots/mozscreenshots/extension/moz.build b/browser/tools/mozscreenshots/mozscreenshots/extension/moz.build index dff4fc998a4cac..3b96b08ee1ab83 100644 --- a/browser/tools/mozscreenshots/mozscreenshots/extension/moz.build +++ b/browser/tools/mozscreenshots/mozscreenshots/extension/moz.build @@ -4,55 +4,55 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPI_NAME = 'mozscreenshots' +XPI_NAME = "mozscreenshots" USE_EXTENSION_MANIFEST = True FINAL_TARGET_FILES += [ - 'api.js', - 'manifest.json', - 'schema.json', + "api.js", + "manifest.json", + "schema.json", ] FINAL_TARGET_FILES.resources += [ - 'Screenshot.jsm', - 'TestRunner.jsm', + "Screenshot.jsm", + "TestRunner.jsm", ] FINAL_TARGET_FILES.resources.configurations += [ - 'configurations/AppMenu.jsm', - 'configurations/Buttons.jsm', - 'configurations/ControlCenter.jsm', - 'configurations/CustomizeMode.jsm', - 'configurations/DevTools.jsm', - 'configurations/LightweightThemes.jsm', - 'configurations/PermissionPrompts.jsm', - 'configurations/Preferences.jsm', - 'configurations/Tabs.jsm', - 'configurations/TabsInTitlebar.jsm', - 'configurations/Toolbars.jsm', - 'configurations/UIDensities.jsm', - 'configurations/WindowSize.jsm', + "configurations/AppMenu.jsm", + "configurations/Buttons.jsm", + "configurations/ControlCenter.jsm", + "configurations/CustomizeMode.jsm", + "configurations/DevTools.jsm", + "configurations/LightweightThemes.jsm", + "configurations/PermissionPrompts.jsm", + "configurations/Preferences.jsm", + "configurations/Tabs.jsm", + "configurations/TabsInTitlebar.jsm", + "configurations/Toolbars.jsm", + "configurations/UIDensities.jsm", + "configurations/WindowSize.jsm", ] FINAL_TARGET_FILES.resources.lib += [ - 'lib/borderify.xpi', - 'lib/mozscreenshots-script.js', - 'lib/mozscreenshots-style.css', - 'lib/mozscreenshots.html', - 'lib/permissionPrompts.html', - 'lib/robot.png', - 'lib/robot_center.png', - 'lib/robot_cropped_diagonal.png', - 'lib/robot_diagonal.png', - 'lib/robot_uncropped.png', - 'lib/robot_upperleft.png', + "lib/borderify.xpi", + "lib/mozscreenshots-script.js", + "lib/mozscreenshots-style.css", + "lib/mozscreenshots.html", + "lib/permissionPrompts.html", + "lib/robot.png", + "lib/robot_center.png", + "lib/robot_cropped_diagonal.png", + "lib/robot_diagonal.png", + "lib/robot_uncropped.png", + "lib/robot_upperleft.png", ] FINAL_TARGET_FILES.resources.lib.controlCenter += [ - 'lib/controlCenter/mixed.html', - 'lib/controlCenter/mixed_active.html', - 'lib/controlCenter/mixed_passive.html', - 'lib/controlCenter/password.html', - 'lib/controlCenter/tracking.html', + "lib/controlCenter/mixed.html", + "lib/controlCenter/mixed_active.html", + "lib/controlCenter/mixed_passive.html", + "lib/controlCenter/password.html", + "lib/controlCenter/tracking.html", ] diff --git a/build/RunCbindgen.py b/build/RunCbindgen.py index d40683ad8589bd..0c941eaaca2138 100644 --- a/build/RunCbindgen.py +++ b/build/RunCbindgen.py @@ -26,8 +26,8 @@ def _get_crate_name(crate_path): def _run_process(args): env = os.environ.copy() - env['CARGO'] = str(buildconfig.substs['CARGO']) - env['RUSTC'] = str(buildconfig.substs['RUSTC']) + env["CARGO"] = str(buildconfig.substs["CARGO"]) + env["RUSTC"] = str(buildconfig.substs["RUSTC"]) p = subprocess.Popen(args, env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) @@ -41,15 +41,17 @@ def _run_process(args): def generate_metadata(output, cargo_config): - stdout, returncode = _run_process([ - buildconfig.substs['CARGO'], - "metadata", - "--all-features", - "--format-version", - "1", - "--manifest-path", - CARGO_TOML - ]) + stdout, returncode = _run_process( + [ + buildconfig.substs["CARGO"], + "metadata", + "--all-features", + "--format-version", + "1", + "--manifest-path", + CARGO_TOML, + ] + ) if returncode != 0: return returncode @@ -62,17 +64,19 @@ def generate_metadata(output, cargo_config): def generate(output, metadata_path, cbindgen_crate_path, *in_tree_dependencies): - stdout, returncode = _run_process([ - buildconfig.substs['CBINDGEN'], - buildconfig.topsrcdir, - "--lockfile", - CARGO_LOCK, - "--crate", - _get_crate_name(cbindgen_crate_path), - "--metadata", - metadata_path, - "--cpp-compat" - ]) + stdout, returncode = _run_process( + [ + buildconfig.substs["CBINDGEN"], + buildconfig.topsrcdir, + "--lockfile", + CARGO_LOCK, + "--crate", + _get_crate_name(cbindgen_crate_path), + "--metadata", + metadata_path, + "--cpp-compat", + ] + ) if returncode != 0: return returncode diff --git a/build/appini_header.py b/build/appini_header.py index 1cbfe90e202875..08bd22721df92e 100644 --- a/build/appini_header.py +++ b/build/appini_header.py @@ -2,8 +2,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -'''Parses a given application.ini file and outputs the corresponding - StaticXREAppData structure as a C++ header file''' +"""Parses a given application.ini file and outputs the corresponding + StaticXREAppData structure as a C++ header file""" import configparser import sys @@ -14,40 +14,54 @@ def main(output, file): config.read(file) flags = set() try: - if config.getint('XRE', 'EnableProfileMigrator') == 1: - flags.add('NS_XRE_ENABLE_PROFILE_MIGRATOR') + if config.getint("XRE", "EnableProfileMigrator") == 1: + flags.add("NS_XRE_ENABLE_PROFILE_MIGRATOR") except Exception: pass try: - if config.getint('Crash Reporter', 'Enabled') == 1: - flags.add('NS_XRE_ENABLE_CRASH_REPORTER') + if config.getint("Crash Reporter", "Enabled") == 1: + flags.add("NS_XRE_ENABLE_CRASH_REPORTER") except Exception: pass - appdata = dict(("%s:%s" % (s, o), config.get(s, o)) - for s in config.sections() for o in config.options(s)) - appdata['flags'] = ' | '.join(sorted(flags)) if flags else '0' - appdata['App:profile'] = ('"%s"' % appdata['App:profile'] - if 'App:profile' in appdata else 'NULL') - expected = ('App:vendor', 'App:name', 'App:remotingname', 'App:version', 'App:buildid', - 'App:id', 'Gecko:minversion', 'Gecko:maxversion') + appdata = dict( + ("%s:%s" % (s, o), config.get(s, o)) + for s in config.sections() + for o in config.options(s) + ) + appdata["flags"] = " | ".join(sorted(flags)) if flags else "0" + appdata["App:profile"] = ( + '"%s"' % appdata["App:profile"] if "App:profile" in appdata else "NULL" + ) + expected = ( + "App:vendor", + "App:name", + "App:remotingname", + "App:version", + "App:buildid", + "App:id", + "Gecko:minversion", + "Gecko:maxversion", + ) missing = [var for var in expected if var not in appdata] if missing: - print("Missing values in %s: %s" % (file, ', '.join(missing)), - file=sys.stderr) + print("Missing values in %s: %s" % (file, ", ".join(missing)), file=sys.stderr) sys.exit(1) - if 'Crash Reporter:serverurl' not in appdata: - appdata['Crash Reporter:serverurl'] = '' + if "Crash Reporter:serverurl" not in appdata: + appdata["Crash Reporter:serverurl"] = "" - if 'App:sourcerepository' in appdata and 'App:sourcestamp' in appdata: - appdata['App:sourceurl'] = '"%(App:sourcerepository)s/rev/%(App:sourcestamp)s"' % appdata + if "App:sourcerepository" in appdata and "App:sourcestamp" in appdata: + appdata["App:sourceurl"] = ( + '"%(App:sourcerepository)s/rev/%(App:sourcestamp)s"' % appdata + ) else: - appdata['App:sourceurl'] = 'NULL' + appdata["App:sourceurl"] = "NULL" - if 'AppUpdate:url' not in appdata: - appdata['AppUpdate:url'] = '' + if "AppUpdate:url" not in appdata: + appdata["AppUpdate:url"] = "" - output.write('''#include "mozilla/XREAppData.h" + output.write( + """#include "mozilla/XREAppData.h" static const mozilla::StaticXREAppData sAppData = { "%(App:vendor)s", "%(App:name)s", @@ -64,12 +78,13 @@ def main(output, file): NULL, // UAName %(App:sourceurl)s, "%(AppUpdate:url)s" - };''' % appdata) + };""" + % appdata + ) -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) != 1: main(sys.stdout, sys.argv[1]) else: - print("Usage: %s /path/to/application.ini" % sys.argv[0], - file=sys.stderr) + print("Usage: %s /path/to/application.ini" % sys.argv[0], file=sys.stderr) diff --git a/build/build-clang/build-clang.py b/build/build-clang/build-clang.py index 86af69dba6174d..a4d6d93f3142fd 100755 --- a/build/build-clang/build-clang.py +++ b/build/build-clang/build-clang.py @@ -38,8 +38,8 @@ def symlink(source, link_name): def check_run(args): - print(' '.join(args), file=sys.stderr, flush=True) - if args[0] == 'cmake': + print(" ".join(args), file=sys.stderr, flush=True) + if args[0] == "cmake": # CMake `message(STATUS)` messages, as appearing in failed source code # compiles, appear on stdout, so we only capture that. p = subprocess.Popen(args, stdout=subprocess.PIPE) @@ -50,8 +50,8 @@ def check_run(args): sys.stdout.flush() r = p.wait() if r != 0: - cmake_output_re = re.compile(b"See also \"(.*/CMakeOutput.log)\"") - cmake_error_re = re.compile(b"See also \"(.*/CMakeError.log)\"") + cmake_output_re = re.compile(b'See also "(.*/CMakeOutput.log)"') + cmake_error_re = re.compile(b'See also "(.*/CMakeError.log)"') def find_first_match(re): for l in lines: @@ -63,9 +63,10 @@ def find_first_match(re): error_match = find_first_match(cmake_error_re) def dump_file(log): - with open(log, 'rb') as f: + with open(log, "rb") as f: print("\nContents of", log, "follow\n", file=sys.stderr) print(f.read(), file=sys.stderr) + if output_match: dump_file(output_match.group(1)) if error_match: @@ -94,20 +95,18 @@ def chdir(path): def patch(patch, srcdir): patch = os.path.realpath(patch) - check_run(['patch', '-d', srcdir, '-p1', '-i', patch, '--fuzz=0', - '-s']) + check_run(["patch", "-d", srcdir, "-p1", "-i", patch, "--fuzz=0", "-s"]) def import_clang_tidy(source_dir, build_clang_tidy_alpha, build_clang_tidy_external): - clang_plugin_path = os.path.join(os.path.dirname(sys.argv[0]), - '..', 'clang-plugin') - clang_tidy_path = os.path.join(source_dir, - 'clang-tools-extra/clang-tidy') + clang_plugin_path = os.path.join(os.path.dirname(sys.argv[0]), "..", "clang-plugin") + clang_tidy_path = os.path.join(source_dir, "clang-tools-extra/clang-tidy") sys.path.append(clang_plugin_path) from import_mozilla_checks import do_import + import_options = { - "alpha": build_clang_tidy_alpha, - "external": build_clang_tidy_external + "alpha": build_clang_tidy_alpha, + "external": build_clang_tidy_external, } do_import(clang_plugin_path, clang_tidy_path, import_options) @@ -138,7 +137,7 @@ def updated_env(env): def build_tar_package(name, base, directory): name = os.path.realpath(name) - print('tarring {} from {}/{}'.format(name, base, directory), file=sys.stderr) + print("tarring {} from {}/{}".format(name, base, directory), file=sys.stderr) assert name.endswith(".tar.zst") cctx = zstandard.ZstdCompressor() @@ -167,7 +166,7 @@ def delete(path): def install_libgcc(gcc_dir, clang_dir, is_final_stage): - gcc_bin_dir = os.path.join(gcc_dir, 'bin') + gcc_bin_dir = os.path.join(gcc_dir, "bin") # Copy over gcc toolchain bits that clang looks for, to ensure that # clang is using a consistent version of ld, since the system ld may @@ -178,17 +177,22 @@ def install_libgcc(gcc_dir, clang_dir, is_final_stage): # Only install this for the bootstrap process; we expect any consumers of # the newly-built toolchain to provide an appropriate ld themselves. if not is_final_stage: - x64_bin_dir = os.path.join(clang_dir, 'x86_64-unknown-linux-gnu', 'bin') + x64_bin_dir = os.path.join(clang_dir, "x86_64-unknown-linux-gnu", "bin") mkdir_p(x64_bin_dir) - shutil.copy2(os.path.join(gcc_bin_dir, 'ld'), x64_bin_dir) + shutil.copy2(os.path.join(gcc_bin_dir, "ld"), x64_bin_dir) - out = subprocess.check_output([os.path.join(gcc_bin_dir, "gcc"), - '-print-libgcc-file-name']) + out = subprocess.check_output( + [os.path.join(gcc_bin_dir, "gcc"), "-print-libgcc-file-name"] + ) libgcc_dir = os.path.dirname(out.decode().rstrip()) - clang_lib_dir = os.path.join(clang_dir, "lib", "gcc", - "x86_64-unknown-linux-gnu", - os.path.basename(libgcc_dir)) + clang_lib_dir = os.path.join( + clang_dir, + "lib", + "gcc", + "x86_64-unknown-linux-gnu", + os.path.basename(libgcc_dir), + ) mkdir_p(clang_lib_dir) copy_tree(libgcc_dir, clang_lib_dir, preserve_symlinks=True) libgcc_dir = os.path.join(gcc_dir, "lib64") @@ -203,14 +207,16 @@ def install_libgcc(gcc_dir, clang_dir, is_final_stage): def install_import_library(build_dir, clang_dir): - shutil.copy2(os.path.join(build_dir, "lib", "clang.lib"), - os.path.join(clang_dir, "lib")) + shutil.copy2( + os.path.join(build_dir, "lib", "clang.lib"), os.path.join(clang_dir, "lib") + ) def install_asan_symbols(build_dir, clang_dir): lib_path_pattern = os.path.join("lib", "clang", "*.*.*", "lib", "windows") - src_path = glob.glob(os.path.join(build_dir, lib_path_pattern, - "clang_rt.asan_dynamic-*.pdb")) + src_path = glob.glob( + os.path.join(build_dir, lib_path_pattern, "clang_rt.asan_dynamic-*.pdb") + ) dst_path = glob.glob(os.path.join(clang_dir, lib_path_pattern)) if len(src_path) != 1: @@ -234,14 +240,33 @@ def is_windows(): return platform.system() == "Windows" -def build_one_stage(cc, cxx, asm, ld, ar, ranlib, libtool, - src_dir, stage_dir, package_name, build_libcxx, - osx_cross_compile, build_type, assertions, - python_path, gcc_dir, libcxx_include_dir, build_wasm, - compiler_rt_source_dir=None, runtimes_source_link=None, - compiler_rt_source_link=None, - is_final_stage=False, android_targets=None, - extra_targets=None, pgo_phase=None): +def build_one_stage( + cc, + cxx, + asm, + ld, + ar, + ranlib, + libtool, + src_dir, + stage_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir=None, + runtimes_source_link=None, + compiler_rt_source_link=None, + is_final_stage=False, + android_targets=None, + extra_targets=None, + pgo_phase=None, +): if is_final_stage and (android_targets or extra_targets): # Linking compiler-rt under "runtimes" activates LLVM_RUNTIME_TARGETS # and related arguments. @@ -259,7 +284,7 @@ def build_one_stage(cc, cxx, asm, ld, ar, ranlib, libtool, # cmake doesn't deal well with backslashes in paths. def slashify_path(path): - return path.replace('\\', '/') + return path.replace("\\", "/") def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): machine_targets = "X86;ARM;AArch64" if is_final_stage else "X86" @@ -270,11 +295,11 @@ def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): "-DCMAKE_ASM_COMPILER=%s" % slashify_path(asm[0]), "-DCMAKE_LINKER=%s" % slashify_path(ld[0]), "-DCMAKE_AR=%s" % slashify_path(ar), - "-DCMAKE_C_FLAGS=%s" % ' '.join(cc[1:]), - "-DCMAKE_CXX_FLAGS=%s" % ' '.join(cxx[1:]), - "-DCMAKE_ASM_FLAGS=%s" % ' '.join(asm[1:]), - "-DCMAKE_EXE_LINKER_FLAGS=%s" % ' '.join(ld[1:]), - "-DCMAKE_SHARED_LINKER_FLAGS=%s" % ' '.join(ld[1:]), + "-DCMAKE_C_FLAGS=%s" % " ".join(cc[1:]), + "-DCMAKE_CXX_FLAGS=%s" % " ".join(cxx[1:]), + "-DCMAKE_ASM_FLAGS=%s" % " ".join(asm[1:]), + "-DCMAKE_EXE_LINKER_FLAGS=%s" % " ".join(ld[1:]), + "-DCMAKE_SHARED_LINKER_FLAGS=%s" % " ".join(ld[1:]), "-DCMAKE_BUILD_TYPE=%s" % build_type, "-DCMAKE_INSTALL_PREFIX=%s" % inst_dir, "-DLLVM_TARGETS_TO_BUILD=%s" % machine_targets, @@ -316,7 +341,7 @@ def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): "-DCMAKE_OSX_ARCHITECTURES=x86_64", "-DDARWIN_osx_ARCHS=x86_64", "-DDARWIN_osx_SYSROOT=%s" % slashify_path(os.getenv("CROSS_SYSROOT")), - "-DLLVM_DEFAULT_TARGET_TRIPLE=x86_64-apple-darwin" + "-DLLVM_DEFAULT_TARGET_TRIPLE=x86_64-apple-darwin", ] # Starting in LLVM 11 (which requires SDK 10.12) the build tries to # detect the SDK version by calling xcrun. Cross-compiles don't have @@ -380,8 +405,9 @@ def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): android_include_dirs = cfg["ndk_includes"] api_level = cfg["api_level"] - android_flags = ["-isystem %s" % d.format(**os.environ) - for d in android_include_dirs] + android_flags = [ + "-isystem %s" % d.format(**os.environ) for d in android_include_dirs + ] android_flags += ["--gcc-toolchain=%s" % android_gcc_dir] android_flags += ["-D__ANDROID_API__=%s" % api_level] @@ -391,24 +417,21 @@ def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): rt_cxx_flags = " ".join(cxx[1:] + android_flags) rt_asm_flags = " ".join(asm[1:] + android_flags) - for kind in ('BUILTINS', 'RUNTIMES'): + for kind in ("BUILTINS", "RUNTIMES"): for var, arg in ( - ('ANDROID', '1'), - ('CMAKE_ASM_FLAGS', rt_asm_flags), - ('CMAKE_CXX_FLAGS', rt_cxx_flags), - ('CMAKE_C_FLAGS', rt_c_flags), - ('CMAKE_EXE_LINKER_FLAGS', android_link_flags), - ('CMAKE_SHARED_LINKER_FLAGS', android_link_flags), - ('CMAKE_SYSROOT', sysroot_dir), - ('ANDROID_NATIVE_API_LEVEL', api_level), + ("ANDROID", "1"), + ("CMAKE_ASM_FLAGS", rt_asm_flags), + ("CMAKE_CXX_FLAGS", rt_cxx_flags), + ("CMAKE_C_FLAGS", rt_c_flags), + ("CMAKE_EXE_LINKER_FLAGS", android_link_flags), + ("CMAKE_SHARED_LINKER_FLAGS", android_link_flags), + ("CMAKE_SYSROOT", sysroot_dir), + ("ANDROID_NATIVE_API_LEVEL", api_level), ): - cmake_args += ['-D%s_%s_%s=%s' % (kind, target, var, arg)] + cmake_args += ["-D%s_%s_%s=%s" % (kind, target, var, arg)] - cmake_args += cmake_base_args( - cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir) - cmake_args += [ - src_dir - ] + cmake_args += cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir) + cmake_args += [src_dir] build_package(build_dir, cmake_args) if is_linux(): @@ -425,35 +448,40 @@ def cmake_base_args(cc, cxx, asm, ld, ar, ranlib, libtool, inst_dir): # 64-bits, which we detect through the contents of the LIB # environment variable, which we also adjust for a 32-bits build # at the same time. - old_lib = os.environ['LIB'] + old_lib = os.environ["LIB"] new_lib = [] for l in old_lib.split(os.pathsep): - if l.endswith('x64'): - l = l[:-3] + 'x86' + if l.endswith("x64"): + l = l[:-3] + "x86" build_32_bit = True - elif l.endswith('amd64'): + elif l.endswith("amd64"): l = l[:-5] build_32_bit = True new_lib.append(l) if build_32_bit: - os.environ['LIB'] = os.pathsep.join(new_lib) - compiler_rt_build_dir = stage_dir + '/compiler-rt' - compiler_rt_inst_dir = inst_dir + '/lib/clang/' + os.environ["LIB"] = os.pathsep.join(new_lib) + compiler_rt_build_dir = stage_dir + "/compiler-rt" + compiler_rt_inst_dir = inst_dir + "/lib/clang/" subdirs = os.listdir(compiler_rt_inst_dir) assert len(subdirs) == 1 compiler_rt_inst_dir += subdirs[0] cmake_args = cmake_base_args( - [os.path.join(inst_dir, 'bin', 'clang-cl.exe'), '-m32'] + cc[1:], - [os.path.join(inst_dir, 'bin', 'clang-cl.exe'), '-m32'] + cxx[1:], - [os.path.join(inst_dir, 'bin', 'clang-cl.exe'), '-m32'] + asm[1:], - ld, ar, ranlib, libtool, compiler_rt_inst_dir) + [os.path.join(inst_dir, "bin", "clang-cl.exe"), "-m32"] + cc[1:], + [os.path.join(inst_dir, "bin", "clang-cl.exe"), "-m32"] + cxx[1:], + [os.path.join(inst_dir, "bin", "clang-cl.exe"), "-m32"] + asm[1:], + ld, + ar, + ranlib, + libtool, + compiler_rt_inst_dir, + ) cmake_args += [ - '-DLLVM_CONFIG_PATH=%s' % slashify_path( - os.path.join(inst_dir, 'bin', 'llvm-config')), - os.path.join(src_dir, 'projects', 'compiler-rt'), + "-DLLVM_CONFIG_PATH=%s" + % slashify_path(os.path.join(inst_dir, "bin", "llvm-config")), + os.path.join(src_dir, "projects", "compiler-rt"), ] build_package(compiler_rt_build_dir, cmake_args) - os.environ['LIB'] = old_lib + os.environ["LIB"] = old_lib if is_final_stage: install_import_library(build_dir, inst_dir) install_asan_symbols(build_dir, inst_dir) @@ -512,7 +540,16 @@ def get_tool(config, key): # run-clang-tidy.py def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): # Make sure we only have what we expect. - dirs = ["bin", "include", "lib", "lib32", "libexec", "msbuild-bin", "share", "tools"] + dirs = [ + "bin", + "include", + "lib", + "lib32", + "libexec", + "msbuild-bin", + "share", + "tools", + ] if is_linux(): dirs.append("x86_64-unknown-linux-gnu") for f in glob.glob("%s/*" % final_dir): @@ -521,7 +558,7 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): if not os.path.isdir(f): raise Exception("Expected %s to be a directory" % f) - kept_binaries = ['clang-apply-replacements', 'clang-format', 'clang-tidy', 'clangd'] + kept_binaries = ["clang-apply-replacements", "clang-format", "clang-tidy", "clangd"] re_clang_tidy = re.compile(r"^(" + "|".join(kept_binaries) + r")(\.exe)?$", re.I) for f in glob.glob("%s/bin/*" % final_dir): if re_clang_tidy.search(os.path.basename(f)) is None: @@ -540,10 +577,12 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): name = os.path.basename(f) if name == "clang": continue - if osx_cross_compile and name in ['libLLVM.dylib', 'libclang-cpp.dylib']: + if osx_cross_compile and name in ["libLLVM.dylib", "libclang-cpp.dylib"]: continue - if is_linux() and (fnmatch.fnmatch(name, 'libLLVM*.so') or - fnmatch.fnmatch(name, 'libclang-cpp.so*')): + if is_linux() and ( + fnmatch.fnmatch(name, "libLLVM*.so") + or fnmatch.fnmatch(name, "libclang-cpp.so*") + ): continue delete(f) for f in glob.glob("%s/lib/clang/*" % final_dir): @@ -572,24 +611,35 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', required=True, - type=argparse.FileType('r'), - help="Clang configuration file") - parser.add_argument('--clean', required=False, - action='store_true', - help="Clean the build directory") - parser.add_argument('--skip-tar', required=False, - action='store_true', - help="Skip tar packaging stage") - parser.add_argument('--skip-checkout', required=False, - action='store_true', - help="Do not checkout/revert source") + parser.add_argument( + "-c", + "--config", + required=True, + type=argparse.FileType("r"), + help="Clang configuration file", + ) + parser.add_argument( + "--clean", required=False, action="store_true", help="Clean the build directory" + ) + parser.add_argument( + "--skip-tar", + required=False, + action="store_true", + help="Skip tar packaging stage", + ) + parser.add_argument( + "--skip-checkout", + required=False, + action="store_true", + help="Do not checkout/revert source", + ) args = parser.parse_args() - if not os.path.exists('llvm/LLVMBuild.txt'): - raise Exception('The script must be run from the root directory of the ' - 'llvm-project tree') + if not os.path.exists("llvm/LLVMBuild.txt"): + raise Exception( + "The script must be run from the root directory of the " "llvm-project tree" + ) source_dir = os.getcwd() build_dir = source_dir + "/build" @@ -634,8 +684,10 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): if "build_type" in config: build_type = config["build_type"] if build_type not in ("Release", "Debug", "RelWithDebInfo", "MinSizeRel"): - raise ValueError("We only know how to do Release, Debug, RelWithDebInfo or " - "MinSizeRel builds") + raise ValueError( + "We only know how to do Release, Debug, RelWithDebInfo or " + "MinSizeRel builds" + ) build_libcxx = False if "build_libcxx" in config: build_libcxx = config["build_libcxx"] @@ -656,13 +708,17 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): if build_clang_tidy and "build_clang_tidy_alpha" in config: build_clang_tidy_alpha = config["build_clang_tidy_alpha"] if build_clang_tidy_alpha not in (True, False): - raise ValueError("Only boolean values are accepted for build_clang_tidy_alpha.") + raise ValueError( + "Only boolean values are accepted for build_clang_tidy_alpha." + ) build_clang_tidy_external = False # check for build_clang_tidy_external only if build_clang_tidy is true if build_clang_tidy and "build_clang_tidy_external" in config: build_clang_tidy_external = config["build_clang_tidy_external"] if build_clang_tidy_external not in (True, False): - raise ValueError("Only boolean values are accepted for build_clang_tidy_external.") + raise ValueError( + "Only boolean values are accepted for build_clang_tidy_external." + ) osx_cross_compile = False if "osx_cross_compile" in config: osx_cross_compile = config["osx_cross_compile"] @@ -691,8 +747,10 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): for attr in ("ndk_toolchain", "ndk_sysroot", "ndk_includes", "api_level"): for target, cfg in android_targets.items(): if attr not in cfg: - raise ValueError("must specify '%s' as a key for android target: %s" % - (attr, target)) + raise ValueError( + "must specify '%s' as a key for android target: %s" + % (attr, target) + ) extra_targets = None if "extra_targets" in config: extra_targets = config["extra_targets"] @@ -705,7 +763,7 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): raise ValueError("Config file needs to set gcc_dir") if is_darwin() or osx_cross_compile: - os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.11' + os.environ["MACOSX_DEPLOYMENT_TARGET"] = "10.11" cc = get_tool(config, "cc") cxx = get_tool(config, "cxx") @@ -725,17 +783,14 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): compiler_rt_source_link = llvm_source_dir + "/projects/compiler-rt" - symlinks = [(clang_source_dir, - llvm_source_dir + "/tools/clang"), - (extra_source_dir, - llvm_source_dir + "/tools/clang/tools/extra"), - (lld_source_dir, - llvm_source_dir + "/tools/lld"), - (compiler_rt_source_dir, compiler_rt_source_link), - (libcxx_source_dir, - llvm_source_dir + "/projects/libcxx"), - (libcxxabi_source_dir, - llvm_source_dir + "/projects/libcxxabi")] + symlinks = [ + (clang_source_dir, llvm_source_dir + "/tools/clang"), + (extra_source_dir, llvm_source_dir + "/tools/clang/tools/extra"), + (lld_source_dir, llvm_source_dir + "/tools/lld"), + (compiler_rt_source_dir, compiler_rt_source_link), + (libcxx_source_dir, llvm_source_dir + "/projects/libcxx"), + (libcxxabi_source_dir, llvm_source_dir + "/projects/libcxxabi"), + ] for l in symlinks: # On Windows, we have to re-copy the whole directory every time. if not is_windows() and os.path.islink(l[1]): @@ -752,11 +807,10 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): if not os.path.exists(build_dir): os.makedirs(build_dir) - libcxx_include_dir = os.path.join(llvm_source_dir, "projects", - "libcxx", "include") + libcxx_include_dir = os.path.join(llvm_source_dir, "projects", "libcxx", "include") - stage1_dir = build_dir + '/stage1' - stage1_inst_dir = stage1_dir + '/' + package_name + stage1_dir = build_dir + "/stage1" + stage1_inst_dir = stage1_dir + "/" + package_name final_stage_dir = stage1_dir final_inst_dir = stage1_inst_dir @@ -775,24 +829,31 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): # up whatever headers were installed from the gcc we used to build stage1, # always, rather than the system headers. Providing -gcc-toolchain # encourages clang to do that. - extra_cflags2 = ["-fPIC", '-gcc-toolchain', stage1_inst_dir] + extra_cflags2 = ["-fPIC", "-gcc-toolchain", stage1_inst_dir] # Silence clang's warnings about arguments not being used in compilation. - extra_cxxflags2 = ["-fPIC", '-Qunused-arguments', '-gcc-toolchain', stage1_inst_dir] + extra_cxxflags2 = [ + "-fPIC", + "-Qunused-arguments", + "-gcc-toolchain", + stage1_inst_dir, + ] extra_asmflags = [] # Avoid libLLVM internal function calls going through the PLT. - extra_ldflags = ['-Wl,-Bsymbolic-functions'] + extra_ldflags = ["-Wl,-Bsymbolic-functions"] # For whatever reason, LLVM's build system will set things up to turn # on -ffunction-sections and -fdata-sections, but won't turn on the # corresponding option to strip unused sections. We do it explicitly # here. LLVM's build system is also picky about turning on ICF, so # we do that explicitly here, too. - extra_ldflags += ['-fuse-ld=gold', '-Wl,--gc-sections', '-Wl,--icf=safe'] + extra_ldflags += ["-fuse-ld=gold", "-Wl,--gc-sections", "-Wl,--icf=safe"] - if 'LD_LIBRARY_PATH' in os.environ: - os.environ['LD_LIBRARY_PATH'] = ('%s/lib64/:%s' % - (gcc_dir, os.environ['LD_LIBRARY_PATH'])) + if "LD_LIBRARY_PATH" in os.environ: + os.environ["LD_LIBRARY_PATH"] = "%s/lib64/:%s" % ( + gcc_dir, + os.environ["LD_LIBRARY_PATH"], + ) else: - os.environ['LD_LIBRARY_PATH'] = '%s/lib64/' % gcc_dir + os.environ["LD_LIBRARY_PATH"] = "%s/lib64/" % gcc_dir elif is_windows(): extra_cflags = [] extra_cxxflags = [] @@ -800,7 +861,11 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): # by looking at an MSVC install, but we don't really have that here. # Force things on. extra_cflags2 = [] - extra_cxxflags2 = ['-fms-compatibility-version=19.13.26128', '-Xclang', '-std=c++14'] + extra_cxxflags2 = [ + "-fms-compatibility-version=19.13.26128", + "-Xclang", + "-std=c++14", + ] extra_asmflags = [] extra_ldflags = [] @@ -811,124 +876,186 @@ def prune_final_dir_for_clang_tidy(final_dir, osx_cross_compile): extra_cxxflags = ["-stdlib=libc++"] extra_cxxflags2 = ["-stdlib=libc++"] - extra_flags = ["-target", "x86_64-apple-darwin", "-mlinker-version=137", - "-B", "%s/bin" % os.getenv("CROSS_CCTOOLS_PATH"), - "-isysroot", os.getenv("CROSS_SYSROOT"), - # technically the sysroot flag there should be enough to deduce this, - # but clang needs some help to figure this out. - "-I%s/usr/include" % os.getenv("CROSS_SYSROOT"), - "-iframework", "%s/System/Library/Frameworks" % os.getenv("CROSS_SYSROOT")] + extra_flags = [ + "-target", + "x86_64-apple-darwin", + "-mlinker-version=137", + "-B", + "%s/bin" % os.getenv("CROSS_CCTOOLS_PATH"), + "-isysroot", + os.getenv("CROSS_SYSROOT"), + # technically the sysroot flag there should be enough to deduce this, + # but clang needs some help to figure this out. + "-I%s/usr/include" % os.getenv("CROSS_SYSROOT"), + "-iframework", + "%s/System/Library/Frameworks" % os.getenv("CROSS_SYSROOT"), + ] extra_cflags += extra_flags extra_cxxflags += extra_flags extra_cflags2 += extra_flags extra_cxxflags2 += extra_flags extra_asmflags += extra_flags - extra_ldflags = ["-Wl,-syslibroot,%s" % os.getenv("CROSS_SYSROOT"), - "-Wl,-dead_strip"] + extra_ldflags = [ + "-Wl,-syslibroot,%s" % os.getenv("CROSS_SYSROOT"), + "-Wl,-dead_strip", + ] - upload_dir = os.getenv('UPLOAD_DIR') + upload_dir = os.getenv("UPLOAD_DIR") if assertions and upload_dir: - extra_cflags2 += ['-fcrash-diagnostics-dir=%s' % upload_dir] - extra_cxxflags2 += ['-fcrash-diagnostics-dir=%s' % upload_dir] + extra_cflags2 += ["-fcrash-diagnostics-dir=%s" % upload_dir] + extra_cxxflags2 += ["-fcrash-diagnostics-dir=%s" % upload_dir] build_one_stage( [cc] + extra_cflags, [cxx] + extra_cxxflags, [asm] + extra_asmflags, [ld] + extra_ldflags, - ar, ranlib, libtool, - llvm_source_dir, stage1_dir, package_name, build_libcxx, osx_cross_compile, - build_type, assertions, python_path, gcc_dir, libcxx_include_dir, build_wasm, - is_final_stage=(stages == 1)) + ar, + ranlib, + libtool, + llvm_source_dir, + stage1_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + is_final_stage=(stages == 1), + ) runtimes_source_link = llvm_source_dir + "/runtimes/compiler-rt" if stages >= 2: - stage2_dir = build_dir + '/stage2' - stage2_inst_dir = stage2_dir + '/' + package_name + stage2_dir = build_dir + "/stage2" + stage2_inst_dir = stage2_dir + "/" + package_name final_stage_dir = stage2_dir final_inst_dir = stage2_inst_dir pgo_phase = "gen" if pgo else None build_one_stage( - [stage1_inst_dir + "/bin/%s%s" % - (cc_name, exe_ext)] + extra_cflags2, - [stage1_inst_dir + "/bin/%s%s" % - (cxx_name, exe_ext)] + extra_cxxflags2, - [stage1_inst_dir + "/bin/%s%s" % - (cc_name, exe_ext)] + extra_asmflags, + [stage1_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_cflags2, + [stage1_inst_dir + "/bin/%s%s" % (cxx_name, exe_ext)] + extra_cxxflags2, + [stage1_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_asmflags, [ld] + extra_ldflags, - ar, ranlib, libtool, - llvm_source_dir, stage2_dir, package_name, build_libcxx, osx_cross_compile, - build_type, assertions, python_path, gcc_dir, libcxx_include_dir, build_wasm, - compiler_rt_source_dir, runtimes_source_link, compiler_rt_source_link, - is_final_stage=(stages == 2), android_targets=android_targets, - extra_targets=extra_targets, pgo_phase=pgo_phase) + ar, + ranlib, + libtool, + llvm_source_dir, + stage2_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir, + runtimes_source_link, + compiler_rt_source_link, + is_final_stage=(stages == 2), + android_targets=android_targets, + extra_targets=extra_targets, + pgo_phase=pgo_phase, + ) if stages >= 3: - stage3_dir = build_dir + '/stage3' - stage3_inst_dir = stage3_dir + '/' + package_name + stage3_dir = build_dir + "/stage3" + stage3_inst_dir = stage3_dir + "/" + package_name final_stage_dir = stage3_dir final_inst_dir = stage3_inst_dir build_one_stage( - [stage2_inst_dir + "/bin/%s%s" % - (cc_name, exe_ext)] + extra_cflags2, - [stage2_inst_dir + "/bin/%s%s" % - (cxx_name, exe_ext)] + extra_cxxflags2, - [stage2_inst_dir + "/bin/%s%s" % - (cc_name, exe_ext)] + extra_asmflags, + [stage2_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_cflags2, + [stage2_inst_dir + "/bin/%s%s" % (cxx_name, exe_ext)] + extra_cxxflags2, + [stage2_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_asmflags, [ld] + extra_ldflags, - ar, ranlib, libtool, - llvm_source_dir, stage3_dir, package_name, build_libcxx, osx_cross_compile, - build_type, assertions, python_path, gcc_dir, libcxx_include_dir, build_wasm, - compiler_rt_source_dir, runtimes_source_link, compiler_rt_source_link, - (stages == 3), extra_targets=extra_targets) + ar, + ranlib, + libtool, + llvm_source_dir, + stage3_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir, + runtimes_source_link, + compiler_rt_source_link, + (stages == 3), + extra_targets=extra_targets, + ) if stages >= 4: - stage4_dir = build_dir + '/stage4' - stage4_inst_dir = stage4_dir + '/' + package_name + stage4_dir = build_dir + "/stage4" + stage4_inst_dir = stage4_dir + "/" + package_name final_stage_dir = stage4_dir final_inst_dir = stage4_inst_dir pgo_phase = None if pgo: pgo_phase = "use" llvm_profdata = stage3_inst_dir + "/bin/llvm-profdata%s" % exe_ext - merge_cmd = [llvm_profdata, 'merge', '-o', 'merged.profdata'] - profraw_files = glob.glob(os.path.join(stage2_dir, 'build', - 'profiles', '*.profraw')) + merge_cmd = [llvm_profdata, "merge", "-o", "merged.profdata"] + profraw_files = glob.glob( + os.path.join(stage2_dir, "build", "profiles", "*.profraw") + ) if not os.path.exists(stage4_dir): os.mkdir(stage4_dir) run_in(stage4_dir, merge_cmd + profraw_files) build_one_stage( - [stage3_inst_dir + "/bin/%s%s" % - (cc_name, exe_ext)] + extra_cflags2, - [stage3_inst_dir + "/bin/%s%s" % - (cxx_name, exe_ext)] + extra_cxxflags2, - [stage3_inst_dir + "/bin/%s%s" % - (cc_name, exe_ext)] + extra_asmflags, + [stage3_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_cflags2, + [stage3_inst_dir + "/bin/%s%s" % (cxx_name, exe_ext)] + extra_cxxflags2, + [stage3_inst_dir + "/bin/%s%s" % (cc_name, exe_ext)] + extra_asmflags, [ld] + extra_ldflags, - ar, ranlib, libtool, - llvm_source_dir, stage4_dir, package_name, build_libcxx, osx_cross_compile, - build_type, assertions, python_path, gcc_dir, libcxx_include_dir, build_wasm, - compiler_rt_source_dir, runtimes_source_link, compiler_rt_source_link, - (stages == 4), extra_targets=extra_targets, pgo_phase=pgo_phase) + ar, + ranlib, + libtool, + llvm_source_dir, + stage4_dir, + package_name, + build_libcxx, + osx_cross_compile, + build_type, + assertions, + python_path, + gcc_dir, + libcxx_include_dir, + build_wasm, + compiler_rt_source_dir, + runtimes_source_link, + compiler_rt_source_link, + (stages == 4), + extra_targets=extra_targets, + pgo_phase=pgo_phase, + ) if build_clang_tidy: - prune_final_dir_for_clang_tidy(os.path.join(final_stage_dir, package_name), - osx_cross_compile) + prune_final_dir_for_clang_tidy( + os.path.join(final_stage_dir, package_name), osx_cross_compile + ) # Copy the wasm32 builtins to the final_inst_dir if the archive is present. if "wasi-sysroot" in config: sysroot = config["wasi-sysroot"].format(**os.environ) if os.path.isdir(sysroot): for srcdir in glob.glob( - os.path.join(sysroot, "lib", "clang", "*", "lib", "wasi")): + os.path.join(sysroot, "lib", "clang", "*", "lib", "wasi") + ): print("Copying from wasi-sysroot srcdir %s" % srcdir) # Copy the contents of the "lib/wasi" subdirectory to the # appropriate location in final_inst_dir. - version = os.path.basename(os.path.dirname(os.path.dirname( - srcdir))) - destdir = os.path.join(final_inst_dir, "lib", "clang", version, - "lib", "wasi") + version = os.path.basename(os.path.dirname(os.path.dirname(srcdir))) + destdir = os.path.join( + final_inst_dir, "lib", "clang", version, "lib", "wasi" + ) mkdir_p(destdir) copy_tree(srcdir, destdir) diff --git a/build/build-infer/build-infer.py b/build/build-infer/build-infer.py index 9226ed86038f9b..e9a4804ce08f66 100755 --- a/build/build-infer/build-infer.py +++ b/build/build-infer/build-infer.py @@ -13,14 +13,14 @@ def check_run(args, path): - print(' '.join(args) + ' in ' + path, file=sys.stderr) + print(" ".join(args) + " in " + path, file=sys.stderr) subprocess.run(args, cwd=path, check=True) def run_in(path, args, extra_env=None): - ''' + """ Runs the given commands in the directory specified by . - ''' + """ env = dict(os.environ) env.update(extra_env or {}) check_run(args, path) @@ -29,43 +29,50 @@ def run_in(path, args, extra_env=None): def build_tar_package(tar, name, base, directories): name = os.path.realpath(name) - run_in(base, [tar, - '-c', - '-%s' % ('J' if '.xz' in name else 'j'), - '-f', - name] + directories) + run_in( + base, + [tar, "-c", "-%s" % ("J" if ".xz" in name else "j"), "-f", name] + directories, + ) def is_git_repo(dir): - '''Check whether the given directory is a git repository.''' + """Check whether the given directory is a git repository.""" from subprocess import CalledProcessError + try: - check_run(['git', 'rev-parse'], dir) + check_run(["git", "rev-parse"], dir) return True except CalledProcessError: return False def git_clone(main_dir, url, clone_dir, commit): - ''' + """ Clones the repository from into , and brings the repository to the state of . - ''' - run_in(main_dir, ['git', 'clone', url, clone_dir]) - run_in(clone_dir, ['git', 'checkout', commit]) + """ + run_in(main_dir, ["git", "clone", url, clone_dir]) + run_in(clone_dir, ["git", "checkout", commit]) -if __name__ == '__main__': +if __name__ == "__main__": parser = argparse.ArgumentParser() - parser.add_argument('-c', '--config', required=True, - type=argparse.FileType('r'), - help='Infer configuration file') - parser.add_argument('-b', '--base-dir', - help="Base directory for code and build artifacts") - parser.add_argument('--clean', action='store_true', - help='Clean the build directory') - parser.add_argument('--skip-tar', action='store_true', - help='Skip tar packaging stage') + parser.add_argument( + "-c", + "--config", + required=True, + type=argparse.FileType("r"), + help="Infer configuration file", + ) + parser.add_argument( + "-b", "--base-dir", help="Base directory for code and build artifacts" + ) + parser.add_argument( + "--clean", action="store_true", help="Clean the build directory" + ) + parser.add_argument( + "--skip-tar", action="store_true", help="Skip tar packaging stage" + ) args = parser.parse_args() @@ -75,20 +82,20 @@ def git_clone(main_dir, url, clone_dir, commit): if args.base_dir: base_dir = args.base_dir else: - base_dir = reduce(os.path.join, - [os.sep + 'builds', 'worker', - 'workspace', 'moz-toolchain']) - infer_dir = os.path.join(base_dir, 'infer') - source_dir = os.path.join(infer_dir, 'src') - build_dir = os.path.join(infer_dir, 'build') + base_dir = reduce( + os.path.join, [os.sep + "builds", "worker", "workspace", "moz-toolchain"] + ) + infer_dir = os.path.join(base_dir, "infer") + source_dir = os.path.join(infer_dir, "src") + build_dir = os.path.join(infer_dir, "build") if args.clean: shutil.rmtree(build_dir) os.sys.exit(0) config = json.load(args.config) - infer_revision = config['infer_revision'] - infer_repo = config['infer_repo'] + infer_revision = config["infer_revision"] + infer_repo = config["infer_repo"] for folder in [infer_dir, source_dir, build_dir]: os.makedirs(folder, exist_ok=True) @@ -102,31 +109,44 @@ def git_clone(main_dir, url, clone_dir, commit): # apply a few patches dir_path = os.path.dirname(os.path.realpath(__file__)) # clean the git directory by reseting all changes - git_commands = [['clean', '-f'], ['reset', '--hard']] + git_commands = [["clean", "-f"], ["reset", "--hard"]] for command in git_commands: - run_in(source_dir, ['git']+command) - for p in config.get('patches', []): - run_in(source_dir, ['git', 'apply', os.path.join(dir_path, p)]) + run_in(source_dir, ["git"] + command) + for p in config.get("patches", []): + run_in(source_dir, ["git", "apply", os.path.join(dir_path, p)]) # configure opam - run_in(source_dir, ['opam', 'init', '--no-setup', '--disable-sandboxing']) + run_in(source_dir, ["opam", "init", "--no-setup", "--disable-sandboxing"]) # build infer - run_in(source_dir, ['./build-infer.sh', 'java'], - extra_env={'NO_CMAKE_STRIP': '1'}) + run_in(source_dir, ["./build-infer.sh", "java"], extra_env={"NO_CMAKE_STRIP": "1"}) - package_name = 'infer' + package_name = "infer" infer_package = os.path.join(os.getcwd(), package_name) # We need to create a package with all of the depended libraries injected in it - run_in(source_dir, ['make', 'install-with-libs', 'BUILD_MODE=opt', - 'PATCHELF=patchelf', 'DESTDIR={}'.format(infer_package), - 'libdir_relative_to_bindir=../lib']) - - - infer_package_with_pref = os.path.join(infer_package, 'usr') + run_in( + source_dir, + [ + "make", + "install-with-libs", + "BUILD_MODE=opt", + "PATCHELF=patchelf", + "DESTDIR={}".format(infer_package), + "libdir_relative_to_bindir=../lib", + ], + ) + + infer_package_with_pref = os.path.join(infer_package, "usr") if not args.skip_tar: - os.rename(os.path.join(infer_package_with_pref, 'local'), - os.path.join(infer_package_with_pref, 'infer')) - build_tar_package('tar', '%s.tar.xz' % (package_name), - infer_package_with_pref, - [os.path.join('infer', 'bin'), - os.path.join('infer', 'lib'), - os.path.join('infer', 'share')]) + os.rename( + os.path.join(infer_package_with_pref, "local"), + os.path.join(infer_package_with_pref, "infer"), + ) + build_tar_package( + "tar", + "%s.tar.xz" % (package_name), + infer_package_with_pref, + [ + os.path.join("infer", "bin"), + os.path.join("infer", "lib"), + os.path.join("infer", "share"), + ], + ) diff --git a/build/buildconfig.py b/build/buildconfig.py index 63f5d590663ed9..3fcc82ffdb5d0e 100644 --- a/build/buildconfig.py +++ b/build/buildconfig.py @@ -9,10 +9,10 @@ config = MozbuildObject.from_environment() partial_config = PartialConfigEnvironment(config.topobjdir) -for var in ('topsrcdir', 'topobjdir'): +for var in ("topsrcdir", "topobjdir"): value = getattr(config, var) setattr(sys.modules[__name__], var, value) -for var in ('defines', 'substs', 'get_dependencies'): +for var in ("defines", "substs", "get_dependencies"): value = getattr(partial_config, var) setattr(sys.modules[__name__], var, value) diff --git a/build/checksums.py b/build/checksums.py index 08524deebd5312..d82c7211175ff4 100755 --- a/build/checksums.py +++ b/build/checksums.py @@ -10,35 +10,35 @@ import logging import os -logger = logging.getLogger('checksums.py') +logger = logging.getLogger("checksums.py") def digest_file(filename, digest, chunk_size=131072): - '''Produce a checksum for the file specified by 'filename'. 'filename' + """Produce a checksum for the file specified by 'filename'. 'filename' is a string path to a file that is opened and read in this function. The checksum algorithm is specified by 'digest' and is a valid OpenSSL algorithm. If the digest used is not valid or Python's hashlib doesn't work, the None object will be returned instead. The size of blocks that this function will read from the file object it opens based on - 'filename' can be specified by 'chunk_size', which defaults to 1K''' - assert not os.path.isdir(filename), 'this function only works with files' + 'filename' can be specified by 'chunk_size', which defaults to 1K""" + assert not os.path.isdir(filename), "this function only works with files" - logger.debug('Creating new %s object' % digest) + logger.debug("Creating new %s object" % digest) h = hashlib.new(digest) - with open(filename, 'rb') as f: + with open(filename, "rb") as f: while True: data = f.read(chunk_size) if not data: - logger.debug('Finished reading in file') + logger.debug("Finished reading in file") break h.update(data) hash = h.hexdigest() - logger.debug('Hash for %s is %s' % (filename, hash)) + logger.debug("Hash for %s is %s" % (filename, hash)) return hash def process_files(dirs, output_filename, digests): - '''This function takes a list of directory names, 'drs'. It will then + """This function takes a list of directory names, 'drs'. It will then compute the checksum for each of the files in these by by opening the files. Once each file is read and its checksum is computed, this function will write the information to the file specified by 'output_filename'. @@ -51,14 +51,13 @@ def process_files(dirs, output_filename, digests): Example: d1fa09ae4220 sha1 14250744 firefox-4.0b6pre.en-US.mac64.dmg - ''' + """ if os.path.exists(output_filename): - logger.debug('Overwriting existing checksums file "%s"' % - output_filename) + logger.debug('Overwriting existing checksums file "%s"' % output_filename) else: logger.debug('Creating a new checksums file "%s"' % output_filename) - with open(output_filename, 'w+') as output: + with open(output_filename, "w+") as output: for d in dirs: for root, dirs, files in os.walk(d): for f in files: @@ -68,21 +67,21 @@ def process_files(dirs, output_filename, digests): for digest in digests: hash = digest_file(full, digest) - output.write('%s %s %s %s\n' % ( - hash, digest, os.path.getsize(full), rel)) + output.write( + "%s %s %s %s\n" % (hash, digest, os.path.getsize(full), rel) + ) def setup_logging(level=logging.DEBUG): - '''This function sets up the logging module using a speficiable logging + """This function sets up the logging module using a speficiable logging module logging level. The default log level is DEBUG. The output is in the format: - Example: - DEBUG - Finished reading in file -''' + DEBUG - Finished reading in file""" - logger = logging.getLogger('checksums.py') + logger = logging.getLogger("checksums.py") logger.setLevel(logging.DEBUG) handler = logging.StreamHandler() handler.setLevel(level) @@ -92,19 +91,41 @@ def setup_logging(level=logging.DEBUG): def main(): - '''This is a main function that parses arguments, sets up logging - and generates a checksum file''' + """This is a main function that parses arguments, sets up logging + and generates a checksum file""" # Parse command line arguments parser = OptionParser() - parser.add_option('-d', '--digest', help='checksum algorithm to use', - action='append', dest='digests') - parser.add_option('-o', '--output', help='output file to use', - action='store', dest='outfile', default='checksums') - parser.add_option('-v', '--verbose', - help='Be noisy (takes precedence over quiet)', - action='store_true', dest='verbose', default=False) - parser.add_option('-q', '--quiet', help='Be quiet', action='store_true', - dest='quiet', default=False) + parser.add_option( + "-d", + "--digest", + help="checksum algorithm to use", + action="append", + dest="digests", + ) + parser.add_option( + "-o", + "--output", + help="output file to use", + action="store", + dest="outfile", + default="checksums", + ) + parser.add_option( + "-v", + "--verbose", + help="Be noisy (takes precedence over quiet)", + action="store_true", + dest="verbose", + default=False, + ) + parser.add_option( + "-q", + "--quiet", + help="Be quiet", + action="store_true", + dest="quiet", + default=False, + ) options, args = parser.parse_args() @@ -121,15 +142,15 @@ def main(): # Validate the digest type to use if not options.digests: - options.digests = ['sha1'] + options.digests = ["sha1"] for i in args: if not os.path.isdir(i): - logger.error('%s is not a directory' % i) + logger.error("%s is not a directory" % i) exit(1) process_files(args, options.outfile, options.digests) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/build/clang-plugin/ThirdPartyPaths.py b/build/clang-plugin/ThirdPartyPaths.py index d004577c10718c..caaa919d43f280 100644 --- a/build/clang-plugin/ThirdPartyPaths.py +++ b/build/clang-plugin/ThirdPartyPaths.py @@ -18,12 +18,13 @@ def generate(output, *input_paths): for line in lines: line = line.strip() - if line.endswith('/'): + if line.endswith("/"): line = line[:-1] tpp_list.append(line) - tpp_strings = ',\n '.join([json.dumps(tpp) for tpp in sorted(tpp_list)]) + tpp_strings = ",\n ".join([json.dumps(tpp) for tpp in sorted(tpp_list)]) - output.write("""\ + output.write( + """\ /* THIS FILE IS GENERATED BY ThirdPartyPaths.py - DO NOT EDIT */ #include @@ -34,4 +35,6 @@ def generate(output, *input_paths): extern const uint32_t MOZ_THIRD_PARTY_PATHS_COUNT = %d; -""" % (tpp_strings, len(tpp_list))) +""" + % (tpp_strings, len(tpp_list)) + ) diff --git a/build/clang-plugin/ThreadAllows.py b/build/clang-plugin/ThreadAllows.py index 2cf6502ab0ea43..e45f6292546640 100644 --- a/build/clang-plugin/ThreadAllows.py +++ b/build/clang-plugin/ThreadAllows.py @@ -3,19 +3,19 @@ # You can obtain one at http://mozilla.org/MPL/2.0/. import json -FIRST_LINE = '// This file was generated by generate_thread_allows.py. DO NOT EDIT.' +FIRST_LINE = "// This file was generated by generate_thread_allows.py. DO NOT EDIT." def generate_allows(input_paths): """ - This script reads in the ThreadAllows.txt and ThreadFileAllows.txt lists - and generates a header file containing a two arrays of allowed threads. - These can be the following formats: - -Files which the checker should ignore - These files either contain definitions of NS_NewNamedThread or - use args which the plugin can't cast (such as func args). - -Thread names which the checker should ignore - Specifies which individual thread names to ignore. + This script reads in the ThreadAllows.txt and ThreadFileAllows.txt lists + and generates a header file containing a two arrays of allowed threads. + These can be the following formats: + -Files which the checker should ignore + These files either contain definitions of NS_NewNamedThread or + use args which the plugin can't cast (such as func args). + -Thread names which the checker should ignore + Specifies which individual thread names to ignore. """ file_list = [] name_list = [] @@ -27,17 +27,19 @@ def generate_allows(input_paths): for line in sorted(lines): """ - We are assuming lines ending in .cpp, .h are files. Threads should - NOT have names containing filenames. Please don't do that. + We are assuming lines ending in .cpp, .h are files. Threads should + NOT have names containing filenames. Please don't do that. """ line = line.strip() - if line.endswith('.cpp') or line.endswith('.h'): + if line.endswith(".cpp") or line.endswith(".h"): file_list.append(line) else: name_list.append(line) - file_list_s = ',\n '.join(json.dumps(elem) for elem in file_list) - name_list_s = ',\n '.join(json.dumps(elem) for elem in name_list) - output_string = FIRST_LINE + """ + file_list_s = ",\n ".join(json.dumps(elem) for elem in file_list) + name_list_s = ",\n ".join(json.dumps(elem) for elem in name_list) + output_string = ( + FIRST_LINE + + """ static const char *allow_thread_files[] = { %s @@ -47,7 +49,9 @@ def generate_allows(input_paths): %s }; - """ % (file_list_s, name_list_s) + """ + % (file_list_s, name_list_s) + ) return output_string diff --git a/build/clang-plugin/import_mozilla_checks.py b/build/clang-plugin/import_mozilla_checks.py index b4a4e002c0a996..2c2c5a42b9630a 100755 --- a/build/clang-plugin/import_mozilla_checks.py +++ b/build/clang-plugin/import_mozilla_checks.py @@ -30,22 +30,27 @@ def copy_dir_contents(src, dest): os.remove(destname) shutil.copy2(f, destname) else: - raise Exception('Directory not copied. Error: %s' % e) + raise Exception("Directory not copied. Error: %s" % e) def write_cmake(module_path, import_options): - names = [' ' + os.path.basename(f) for f in glob.glob("%s/*.cpp" % module_path)] + names = [" " + os.path.basename(f) for f in glob.glob("%s/*.cpp" % module_path)] if import_options["external"]: - names += [' ' + os.path.join("external", os.path.basename(f)) - for f in glob.glob("%s/external/*.cpp" % (module_path))] + names += [ + " " + os.path.join("external", os.path.basename(f)) + for f in glob.glob("%s/external/*.cpp" % (module_path)) + ] if import_options["alpha"]: - names += [' ' + os.path.join("alpha", os.path.basename(f)) - for f in glob.glob("%s/alpha/*.cpp" % (module_path))] + names += [ + " " + os.path.join("alpha", os.path.basename(f)) + for f in glob.glob("%s/alpha/*.cpp" % (module_path)) + ] - with open(os.path.join(module_path, 'CMakeLists.txt'), 'w') as f: - f.write("""set(LLVM_LINK_COMPONENTS support) + with open(os.path.join(module_path, "CMakeLists.txt"), "w") as f: + f.write( + """set(LLVM_LINK_COMPONENTS support) add_definitions( -DCLANG_TIDY ) @@ -62,47 +67,43 @@ def write_cmake(module_path, import_options): clangTidyReadabilityModule clangTidyUtils clangTidyMPIModule - )""" % {'names': "\n".join(names)}) + )""" + % {"names": "\n".join(names)} + ) def add_moz_module(cmake_path): - with open(cmake_path, 'r') as f: + with open(cmake_path, "r") as f: lines = f.readlines() f.close() try: - idx = lines.index('set(ALL_CLANG_TIDY_CHECKS\n') - lines.insert(idx + 1, ' clangTidyMozillaModule\n') + idx = lines.index("set(ALL_CLANG_TIDY_CHECKS\n") + lines.insert(idx + 1, " clangTidyMozillaModule\n") - with open(cmake_path, 'w') as f: + with open(cmake_path, "w") as f: for line in lines: f.write(line) except ValueError: - raise Exception('Unable to find ALL_CLANG_TIDY_CHECKS in {}'.format(cmake_path)) + raise Exception("Unable to find ALL_CLANG_TIDY_CHECKS in {}".format(cmake_path)) def write_third_party_paths(mozilla_path, module_path): - tpp_txt = os.path.join( - mozilla_path, '../../tools/rewriting/ThirdPartyPaths.txt') - generated_txt = os.path.join( - mozilla_path, '../../tools/rewriting/Generated.txt') - with open(os.path.join(module_path, 'ThirdPartyPaths.cpp'), 'w') as f: + tpp_txt = os.path.join(mozilla_path, "../../tools/rewriting/ThirdPartyPaths.txt") + generated_txt = os.path.join(mozilla_path, "../../tools/rewriting/Generated.txt") + with open(os.path.join(module_path, "ThirdPartyPaths.cpp"), "w") as f: ThirdPartyPaths.generate(f, tpp_txt, generated_txt) def generate_thread_allows(mozilla_path, module_path): - names = os.path.join( - mozilla_path, '../../build/clang-plugin/ThreadAllows.txt' - ) - files = os.path.join( - mozilla_path, '../../build/clang-plugin/ThreadFileAllows.txt' - ) - with open(os.path.join(module_path, 'ThreadAllows.h'), 'w') as f: + names = os.path.join(mozilla_path, "../../build/clang-plugin/ThreadAllows.txt") + files = os.path.join(mozilla_path, "../../build/clang-plugin/ThreadFileAllows.txt") + with open(os.path.join(module_path, "ThreadAllows.h"), "w") as f: f.write(ThreadAllows.generate_allows({files, names})) def do_import(mozilla_path, clang_tidy_path, import_options): - module = 'mozilla' + module = "mozilla" module_path = os.path.join(clang_tidy_path, module) try: os.makedirs(module_path) @@ -114,18 +115,20 @@ def do_import(mozilla_path, clang_tidy_path, import_options): write_third_party_paths(mozilla_path, module_path) generate_thread_allows(mozilla_path, module_path) write_cmake(module_path, import_options) - add_moz_module(os.path.join(module_path, '..', 'CMakeLists.txt')) - with open(os.path.join(module_path, '..', 'CMakeLists.txt'), 'a') as f: - f.write('add_subdirectory(%s)\n' % module) + add_moz_module(os.path.join(module_path, "..", "CMakeLists.txt")) + with open(os.path.join(module_path, "..", "CMakeLists.txt"), "a") as f: + f.write("add_subdirectory(%s)\n" % module) # A better place for this would be in `ClangTidyForceLinker.h` but `ClangTidyMain.cpp` # is also OK. - with open(os.path.join(module_path, '..', 'tool', 'ClangTidyMain.cpp'), 'a') as f: - f.write(''' + with open(os.path.join(module_path, "..", "tool", "ClangTidyMain.cpp"), "a") as f: + f.write( + """ // This anchor is used to force the linker to link the MozillaModule. extern volatile int MozillaModuleAnchorSource; static int LLVM_ATTRIBUTE_UNUSED MozillaModuleAnchorDestination = MozillaModuleAnchorSource; -''') +""" + ) def main(): @@ -133,18 +136,24 @@ def main(): parser = argparse.ArgumentParser( usage="import_mozilla_checks.py [option]", - description="Imports the Mozilla static analysis checks into a clang-tidy source tree." + description="Imports the Mozilla static analysis checks into a clang-tidy source tree.", + ) + parser.add_argument( + "mozilla_path", help="Full path to mozilla-central/build/clang-plugin" + ) + parser.add_argument( + "clang_tidy_path", help="Full path to llvm-project/clang-tools-extra/clang-tidy" + ) + parser.add_argument( + "--import-alpha", + help="Enable import of in-tree alpha checks", + action="store_true", + ) + parser.add_argument( + "--import-external", + help="Enable import of in-tree external checks", + action="store_true", ) - parser.add_argument('mozilla_path', - help="Full path to mozilla-central/build/clang-plugin") - parser.add_argument('clang_tidy_path', - help="Full path to llvm-project/clang-tools-extra/clang-tidy") - parser.add_argument('--import-alpha', - help="Enable import of in-tree alpha checks", - action="store_true") - parser.add_argument('--import-external', - help="Enable import of in-tree external checks", - action="store_true") args = parser.parse_args() if not os.path.isdir(args.mozilla_path): @@ -153,13 +162,10 @@ def main(): if not os.path.isdir(args.clang_tidy_path): print("Invalid path to clang-tidy source directory") - import_options = { - "alpha": args.import_alpha, - "external": args.import_external - } + import_options = {"alpha": args.import_alpha, "external": args.import_external} do_import(args.mozilla_path, args.clang_tidy_path, import_options) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/build/clang-plugin/moz.build b/build/clang-plugin/moz.build index 00e20cc162bceb..47d780054c996d 100644 --- a/build/clang-plugin/moz.build +++ b/build/clang-plugin/moz.build @@ -4,111 +4,119 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -HostSharedLibrary('clang-plugin') +HostSharedLibrary("clang-plugin") -HOST_SOURCES += ['!ThirdPartyPaths.cpp'] +HOST_SOURCES += ["!ThirdPartyPaths.cpp"] HOST_SOURCES += [ - 'ArithmeticArgChecker.cpp', - 'AssertAssignmentChecker.cpp', - 'CanRunScriptChecker.cpp', - 'CustomAttributes.cpp', - 'CustomTypeAnnotation.cpp', - 'DanglingOnTemporaryChecker.cpp', - 'DiagnosticsMatcher.cpp', - 'ExplicitImplicitChecker.cpp', - 'ExplicitOperatorBoolChecker.cpp', - 'KungFuDeathGripChecker.cpp', - 'MozCheckAction.cpp', - 'MustOverrideChecker.cpp', - 'MustReturnFromCallerChecker.cpp', - 'MustUseChecker.cpp', - 'NaNExprChecker.cpp', - 'NeedsNoVTableTypeChecker.cpp', - 'NoAddRefReleaseOnReturnChecker.cpp', - 'NoAutoTypeChecker.cpp', - 'NoDuplicateRefCntMemberChecker.cpp', - 'NoExplicitMoveConstructorChecker.cpp', - 'NoNewThreadsChecker.cpp', - 'NonMemMovableMemberChecker.cpp', - 'NonMemMovableTemplateArgChecker.cpp', - 'NonParamInsideFunctionDeclChecker.cpp', - 'NonTrivialTypeInFfiChecker.cpp', - 'NoPrincipalGetURI.cpp', - 'NoUsingNamespaceMozillaJavaChecker.cpp', - 'OverrideBaseCallChecker.cpp', - 'OverrideBaseCallUsageChecker.cpp', - 'ParamTraitsEnumChecker.cpp', - 'RefCountedCopyConstructorChecker.cpp', - 'RefCountedInsideLambdaChecker.cpp', - 'ScopeChecker.cpp', - 'SprintfLiteralChecker.cpp', - 'TemporaryLifetimeBoundChecker.cpp', - 'TrivialCtorDtorChecker.cpp', - 'TrivialDtorChecker.cpp', - 'VariableUsageHelpers.cpp', + "ArithmeticArgChecker.cpp", + "AssertAssignmentChecker.cpp", + "CanRunScriptChecker.cpp", + "CustomAttributes.cpp", + "CustomTypeAnnotation.cpp", + "DanglingOnTemporaryChecker.cpp", + "DiagnosticsMatcher.cpp", + "ExplicitImplicitChecker.cpp", + "ExplicitOperatorBoolChecker.cpp", + "KungFuDeathGripChecker.cpp", + "MozCheckAction.cpp", + "MustOverrideChecker.cpp", + "MustReturnFromCallerChecker.cpp", + "MustUseChecker.cpp", + "NaNExprChecker.cpp", + "NeedsNoVTableTypeChecker.cpp", + "NoAddRefReleaseOnReturnChecker.cpp", + "NoAutoTypeChecker.cpp", + "NoDuplicateRefCntMemberChecker.cpp", + "NoExplicitMoveConstructorChecker.cpp", + "NoNewThreadsChecker.cpp", + "NonMemMovableMemberChecker.cpp", + "NonMemMovableTemplateArgChecker.cpp", + "NonParamInsideFunctionDeclChecker.cpp", + "NonTrivialTypeInFfiChecker.cpp", + "NoPrincipalGetURI.cpp", + "NoUsingNamespaceMozillaJavaChecker.cpp", + "OverrideBaseCallChecker.cpp", + "OverrideBaseCallUsageChecker.cpp", + "ParamTraitsEnumChecker.cpp", + "RefCountedCopyConstructorChecker.cpp", + "RefCountedInsideLambdaChecker.cpp", + "ScopeChecker.cpp", + "SprintfLiteralChecker.cpp", + "TemporaryLifetimeBoundChecker.cpp", + "TrivialCtorDtorChecker.cpp", + "TrivialDtorChecker.cpp", + "VariableUsageHelpers.cpp", ] # Ideally, we wouldn't have compile-time choices wrt checkes. bug 1617153. -if CONFIG['OS_ARCH'] == 'WINNT': - HOST_DEFINES['TARGET_IS_WINDOWS'] = True +if CONFIG["OS_ARCH"] == "WINNT": + HOST_DEFINES["TARGET_IS_WINDOWS"] = True HOST_SOURCES += [ - 'FopenUsageChecker.cpp', - 'LoadLibraryUsageChecker.cpp', + "FopenUsageChecker.cpp", + "LoadLibraryUsageChecker.cpp", ] -if CONFIG['ENABLE_MOZSEARCH_PLUGIN']: +if CONFIG["ENABLE_MOZSEARCH_PLUGIN"]: HOST_SOURCES += [ - 'mozsearch-plugin/FileOperations.cpp', - 'mozsearch-plugin/JSONFormatter.cpp', - 'mozsearch-plugin/MozsearchIndexer.cpp', - 'mozsearch-plugin/StringOperations.cpp', + "mozsearch-plugin/FileOperations.cpp", + "mozsearch-plugin/JSONFormatter.cpp", + "mozsearch-plugin/MozsearchIndexer.cpp", + "mozsearch-plugin/StringOperations.cpp", ] -GeneratedFile('ThirdPartyPaths.cpp', script="ThirdPartyPaths.py", - entry_point="generate", inputs=[ - '/tools/rewriting/ThirdPartyPaths.txt', - '/tools/rewriting/Generated.txt', - ]) - -GeneratedFile('ThreadAllows.h', script="ThreadAllows.py", - entry_point="generate_file", inputs=[ - '/build/clang-plugin/ThreadAllows.txt', - '/build/clang-plugin/ThreadFileAllows.txt' - ]) - -HOST_COMPILE_FLAGS['STL'] = [] -HOST_COMPILE_FLAGS['VISIBILITY'] = [] +GeneratedFile( + "ThirdPartyPaths.cpp", + script="ThirdPartyPaths.py", + entry_point="generate", + inputs=[ + "/tools/rewriting/ThirdPartyPaths.txt", + "/tools/rewriting/Generated.txt", + ], +) + +GeneratedFile( + "ThreadAllows.h", + script="ThreadAllows.py", + entry_point="generate_file", + inputs=[ + "/build/clang-plugin/ThreadAllows.txt", + "/build/clang-plugin/ThreadFileAllows.txt", + ], +) + +HOST_COMPILE_FLAGS["STL"] = [] +HOST_COMPILE_FLAGS["VISIBILITY"] = [] # libc++ is required to build plugins against clang on OS X. -if CONFIG['HOST_OS_ARCH'] == 'Darwin': - HOST_CXXFLAGS += ['-stdlib=libc++'] +if CONFIG["HOST_OS_ARCH"] == "Darwin": + HOST_CXXFLAGS += ["-stdlib=libc++"] # As of clang 8, llvm-config doesn't output the flags used to build clang # itself, so we don't end up with -fPIC as a side effect. llvm.org/PR8220 -if CONFIG['HOST_OS_ARCH'] != 'WINNT': - HOST_CXXFLAGS += ['-fPIC'] +if CONFIG["HOST_OS_ARCH"] != "WINNT": + HOST_CXXFLAGS += ["-fPIC"] DIRS += [ - 'tests', + "tests", ] -include('external/sources.mozbuild') +include("external/sources.mozbuild") -if CONFIG['ENABLE_CLANG_PLUGIN_ALPHA']: +if CONFIG["ENABLE_CLANG_PLUGIN_ALPHA"]: HOST_DEFINES["MOZ_CLANG_PLUGIN_ALPHA"] = "1" - include('alpha/sources.mozbuild') + include("alpha/sources.mozbuild") # In the current moz.build world, we need to override essentially every # variable to limit ourselves to what we need to build the clang plugin. -if CONFIG['HOST_OS_ARCH'] == 'WINNT': - extra_cxxflags = ['-GR-', '-EHsc'] +if CONFIG["HOST_OS_ARCH"] == "WINNT": + extra_cxxflags = ["-GR-", "-EHsc"] else: - extra_cxxflags = ['-fno-rtti', '-fno-exceptions'] + extra_cxxflags = ["-fno-rtti", "-fno-exceptions"] -if CONFIG['LLVM_CXXFLAGS']: - HOST_COMPILE_FLAGS['HOST_CXXFLAGS'] = CONFIG['LLVM_CXXFLAGS'] + extra_cxxflags +if CONFIG["LLVM_CXXFLAGS"]: + HOST_COMPILE_FLAGS["HOST_CXXFLAGS"] = CONFIG["LLVM_CXXFLAGS"] + extra_cxxflags # Avoid -DDEBUG=1 on the command line, which conflicts with a #define # DEBUG(...) in llvm headers. -DEFINES['DEBUG'] = False +DEFINES["DEBUG"] = False diff --git a/build/clang-plugin/tests/moz.build b/build/clang-plugin/tests/moz.build index bb0d533adfd28e..f826d74d45699c 100644 --- a/build/clang-plugin/tests/moz.build +++ b/build/clang-plugin/tests/moz.build @@ -5,81 +5,86 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # dummy library name to avoid skipping building the sources here. -Library('clang-plugin-tests') +Library("clang-plugin-tests") SOURCES += [ - 'TestAssertWithAssignment.cpp', - 'TestBadImplicitConversionCtor.cpp', - 'TestCanRunScript.cpp', - 'TestCustomHeap.cpp', - 'TestDanglingOnTemporary.cpp', - 'TestExplicitOperatorBool.cpp', - 'TestGlobalClass.cpp', - 'TestHeapClass.cpp', - 'TestInheritTypeAnnotationsFromTemplateArgs.cpp', - 'TestKungFuDeathGrip.cpp', - 'TestMultipleAnnotations.cpp', - 'TestMustOverride.cpp', - 'TestMustReturnFromCaller.cpp', - 'TestMustUse.cpp', - 'TestNANTestingExpr.cpp', - 'TestNANTestingExprC.c', - 'TestNeedsNoVTableType.cpp', - 'TestNoAddRefReleaseOnReturn.cpp', - 'TestNoArithmeticExprInArgument.cpp', - 'TestNoAutoType.cpp', - 'TestNoDuplicateRefCntMember.cpp', - 'TestNoExplicitMoveConstructor.cpp', - 'TestNoNewThreadsChecker.cpp', - 'TestNonHeapClass.cpp', - 'TestNonMemMovable.cpp', - 'TestNonMemMovableStd.cpp', - 'TestNonMemMovableStdAtomic.cpp', - 'TestNonParameterChecker.cpp', - 'TestNonTemporaryClass.cpp', - 'TestNonTrivialTypeInFfi.cpp', - 'TestNoPrincipalGetUri.cpp', - 'TestNoRefcountedInsideLambdas.cpp', - 'TestNoUsingNamespaceMozillaJava.cpp', - 'TestOverrideBaseCall.cpp', - 'TestOverrideBaseCallAnnotation.cpp', - 'TestParamTraitsEnum.cpp', - 'TestRefCountedCopyConstructor.cpp', - 'TestSprintfLiteral.cpp', - 'TestStackClass.cpp', - 'TestStaticLocalClass.cpp', - 'TestTemporaryClass.cpp', - 'TestTemporaryLifetimeBound.cpp', - 'TestTrivialCtorDtor.cpp', - 'TestTrivialDtor.cpp', + "TestAssertWithAssignment.cpp", + "TestBadImplicitConversionCtor.cpp", + "TestCanRunScript.cpp", + "TestCustomHeap.cpp", + "TestDanglingOnTemporary.cpp", + "TestExplicitOperatorBool.cpp", + "TestGlobalClass.cpp", + "TestHeapClass.cpp", + "TestInheritTypeAnnotationsFromTemplateArgs.cpp", + "TestKungFuDeathGrip.cpp", + "TestMultipleAnnotations.cpp", + "TestMustOverride.cpp", + "TestMustReturnFromCaller.cpp", + "TestMustUse.cpp", + "TestNANTestingExpr.cpp", + "TestNANTestingExprC.c", + "TestNeedsNoVTableType.cpp", + "TestNoAddRefReleaseOnReturn.cpp", + "TestNoArithmeticExprInArgument.cpp", + "TestNoAutoType.cpp", + "TestNoDuplicateRefCntMember.cpp", + "TestNoExplicitMoveConstructor.cpp", + "TestNoNewThreadsChecker.cpp", + "TestNonHeapClass.cpp", + "TestNonMemMovable.cpp", + "TestNonMemMovableStd.cpp", + "TestNonMemMovableStdAtomic.cpp", + "TestNonParameterChecker.cpp", + "TestNonTemporaryClass.cpp", + "TestNonTrivialTypeInFfi.cpp", + "TestNoPrincipalGetUri.cpp", + "TestNoRefcountedInsideLambdas.cpp", + "TestNoUsingNamespaceMozillaJava.cpp", + "TestOverrideBaseCall.cpp", + "TestOverrideBaseCallAnnotation.cpp", + "TestParamTraitsEnum.cpp", + "TestRefCountedCopyConstructor.cpp", + "TestSprintfLiteral.cpp", + "TestStackClass.cpp", + "TestStaticLocalClass.cpp", + "TestTemporaryClass.cpp", + "TestTemporaryLifetimeBound.cpp", + "TestTrivialCtorDtor.cpp", + "TestTrivialDtor.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - 'TestFopenUsage.cpp', - 'TestLoadLibraryUsage.cpp', + "TestFopenUsage.cpp", + "TestLoadLibraryUsage.cpp", ] -include('../external/tests/sources.mozbuild') +include("../external/tests/sources.mozbuild") -if CONFIG['ENABLE_CLANG_PLUGIN_ALPHA']: +if CONFIG["ENABLE_CLANG_PLUGIN_ALPHA"]: DEFINES["MOZ_CLANG_PLUGIN_ALPHA"] = "1" - include('../alpha/tests/sources.mozbuild') + include("../alpha/tests/sources.mozbuild") DisableStlWrapping() NoVisibilityFlags() # Build without any warning flags, and with clang verify flag for a # syntax-only build (no codegen), without a limit on the number of errors. -COMPILE_FLAGS['OS_CXXFLAGS'] = ( - [f for f in COMPILE_FLAGS.get('OS_CXXFLAGS', []) if not f.startswith('-W')] + - ['-fsyntax-only', '-Xclang', '-verify', '-ferror-limit=0', '-Wno-invalid-noreturn'] -) -COMPILE_FLAGS['OS_CFLAGS'] = ( - [f for f in COMPILE_FLAGS.get('OS_CFLAGS', []) if not f.startswith('-W')] + - ['-fsyntax-only', '-Xclang', '-verify', '-ferror-limit=0', '-Xclang', '-std=c11', - '-Wno-invalid-noreturn'] -) +COMPILE_FLAGS["OS_CXXFLAGS"] = [ + f for f in COMPILE_FLAGS.get("OS_CXXFLAGS", []) if not f.startswith("-W") +] + ["-fsyntax-only", "-Xclang", "-verify", "-ferror-limit=0", "-Wno-invalid-noreturn"] +COMPILE_FLAGS["OS_CFLAGS"] = [ + f for f in COMPILE_FLAGS.get("OS_CFLAGS", []) if not f.startswith("-W") +] + [ + "-fsyntax-only", + "-Xclang", + "-verify", + "-ferror-limit=0", + "-Xclang", + "-std=c11", + "-Wno-invalid-noreturn", +] # Don't reflect WARNINGS_CFLAGS into CFLAGS, as the warnings flags should be # as specified in OS_CFLAGS above. diff --git a/build/compare-mozconfig/compare-mozconfigs.py b/build/compare-mozconfig/compare-mozconfigs.py index 396d196d57093e..7e39d9c071c522 100644 --- a/build/compare-mozconfig/compare-mozconfigs.py +++ b/build/compare-mozconfig/compare-mozconfigs.py @@ -19,12 +19,12 @@ SUCCESS_CODE = 0 PLATFORMS = ( - 'linux32', - 'linux64', - 'macosx64', - 'win32', - 'win64', - 'win64-aarch64', + "linux32", + "linux64", + "macosx64", + "win32", + "win64", + "win64-aarch64", ) log = logging.getLogger(__name__) @@ -37,11 +37,12 @@ class ConfigError(Exception): def readConfig(configfile): c = {} execfile(configfile, c) - return c['whitelist'] + return c["whitelist"] -def verify_mozconfigs(mozconfig_pair, nightly_mozconfig_pair, platform, - mozconfigWhitelist): +def verify_mozconfigs( + mozconfig_pair, nightly_mozconfig_pair, platform, mozconfigWhitelist +): """Compares mozconfig to nightly_mozconfig and compare to an optional whitelist of known differences. mozconfig_pair and nightly_mozconfig_pair are pairs containing the mozconfig's identifier and the list of lines in @@ -59,61 +60,64 @@ def verify_mozconfigs(mozconfig_pair, nightly_mozconfig_pair, platform, success = True diff_instance = difflib.Differ() - diff_result = diff_instance.compare( - mozconfig_lines, nightly_mozconfig_lines) + diff_result = diff_instance.compare(mozconfig_lines, nightly_mozconfig_lines) diff_list = list(diff_result) for line in diff_list: clean_line = line[1:].strip() - if (line[0] == '-' or line[0] == '+') and len(clean_line) > 1: + if (line[0] == "-" or line[0] == "+") and len(clean_line) > 1: # skip comment lines - if clean_line.startswith('#'): + if clean_line.startswith("#"): continue # compare to whitelist message = "" - if line[0] == '-': + if line[0] == "-": # handle lines that move around in diff - if '+' + line[1:] in diff_list: + if "+" + line[1:] in diff_list: continue - if platform in mozconfigWhitelist.get('release', {}): - if clean_line in \ - mozconfigWhitelist['release'][platform]: + if platform in mozconfigWhitelist.get("release", {}): + if clean_line in mozconfigWhitelist["release"][platform]: continue - elif line[0] == '+': - if '-' + line[1:] in diff_list: + elif line[0] == "+": + if "-" + line[1:] in diff_list: continue - if platform in mozconfigWhitelist.get('nightly', {}): - if clean_line in \ - mozconfigWhitelist['nightly'][platform]: + if platform in mozconfigWhitelist.get("nightly", {}): + if clean_line in mozconfigWhitelist["nightly"][platform]: continue else: - log.warning("%s not in %s %s!" % ( - clean_line, platform, - mozconfigWhitelist['nightly'][platform])) + log.warning( + "%s not in %s %s!" + % ( + clean_line, + platform, + mozconfigWhitelist["nightly"][platform], + ) + ) else: log.error("Skipping line %s!" % line) continue message = "found in %s but not in %s: %s" - if line[0] == '-': - log.error(message % (mozconfig_name, - nightly_mozconfig_name, clean_line)) + if line[0] == "-": + log.error( + message % (mozconfig_name, nightly_mozconfig_name, clean_line) + ) else: - log.error(message % (nightly_mozconfig_name, - mozconfig_name, clean_line)) + log.error( + message % (nightly_mozconfig_name, mozconfig_name, clean_line) + ) success = False return success def get_mozconfig(path): """Consumes a path and returns a list of lines from the mozconfig file.""" - with open(path, 'rb') as fh: + with open(path, "rb") as fh: return fh.readlines() def compare(topsrcdir): - app = os.path.join(topsrcdir, 'browser') - whitelist = readConfig(os.path.join(app, 'config', 'mozconfigs', - 'whitelist')) + app = os.path.join(topsrcdir, "browser") + whitelist = readConfig(os.path.join(app, "config", "mozconfigs", "whitelist")) success = True @@ -121,13 +125,13 @@ def normalize_lines(lines): return {l.strip() for l in lines} for platform in PLATFORMS: - log.info('Comparing platform %s' % platform) + log.info("Comparing platform %s" % platform) - mozconfigs_path = os.path.join(app, 'config', 'mozconfigs', platform) + mozconfigs_path = os.path.join(app, "config", "mozconfigs", platform) - nightly_path = os.path.join(mozconfigs_path, 'nightly') - beta_path = os.path.join(mozconfigs_path, 'beta') - release_path = os.path.join(mozconfigs_path, 'release') + nightly_path = os.path.join(mozconfigs_path, "nightly") + beta_path = os.path.join(mozconfigs_path, "beta") + release_path = os.path.join(mozconfigs_path, "release") nightly_lines = get_mozconfig(nightly_path) beta_lines = get_mozconfig(beta_path) @@ -135,28 +139,28 @@ def normalize_lines(lines): # Validate that entries in whitelist['nightly'][platform] are actually # present. - whitelist_normalized = normalize_lines( - whitelist['nightly'].get(platform, [])) + whitelist_normalized = normalize_lines(whitelist["nightly"].get(platform, [])) nightly_normalized = normalize_lines(nightly_lines) for line in sorted(whitelist_normalized - nightly_normalized): - log.error('extra line in nightly whitelist: %s' % line) + log.error("extra line in nightly whitelist: %s" % line) success = False - log.info('Comparing beta and nightly mozconfigs') - passed = verify_mozconfigs((beta_path, beta_lines), - (nightly_path, nightly_lines), - platform, - whitelist) + log.info("Comparing beta and nightly mozconfigs") + passed = verify_mozconfigs( + (beta_path, beta_lines), (nightly_path, nightly_lines), platform, whitelist + ) if not passed: success = False - log.info('Comparing release and nightly mozconfigs') - passed = verify_mozconfigs((release_path, release_lines), - (nightly_path, nightly_lines), - platform, - whitelist) + log.info("Comparing release and nightly mozconfigs") + passed = verify_mozconfigs( + (release_path, release_lines), + (nightly_path, nightly_lines), + platform, + whitelist, + ) if not passed: success = False @@ -165,10 +169,10 @@ def normalize_lines(lines): class TestCompareMozconfigs(unittest.TestCase): def test_compare_mozconfigs(self): - topsrcdir = buildconfig.substs['top_srcdir'] + topsrcdir = buildconfig.substs["top_srcdir"] self.assertTrue(compare(topsrcdir)) -if __name__ == '__main__': +if __name__ == "__main__": logging.basicConfig(level=logging.INFO) mozunit.main() diff --git a/build/gen_symverscript.py b/build/gen_symverscript.py index 677eea754c0cd0..f32554abc800d6 100644 --- a/build/gen_symverscript.py +++ b/build/gen_symverscript.py @@ -10,12 +10,14 @@ def main(output, input_file, version): pp = Preprocessor() - pp.context.update({ - 'VERSION': version, - }) + pp.context.update( + { + "VERSION": version, + } + ) pp.out = output pp.do_include(input_file) -if __name__ == '__main__': +if __name__ == "__main__": main(*sys.agv[1:]) diff --git a/build/gen_test_packages_manifest.py b/build/gen_test_packages_manifest.py index 75f8ee9982229f..b6f2353684b97e 100644 --- a/build/gen_test_packages_manifest.py +++ b/build/gen_test_packages_manifest.py @@ -9,68 +9,88 @@ from argparse import ArgumentParser ALL_HARNESSES = [ - 'common', # Harnesses without a specific package will look here. - 'condprof', - 'mochitest', - 'reftest', - 'xpcshell', - 'cppunittest', - 'jittest', - 'mozbase', - 'web-platform', - 'talos', - 'raptor', - 'awsy', - 'gtest', - 'updater-dep', - 'jsreftest', - 'perftests', + "common", # Harnesses without a specific package will look here. + "condprof", + "mochitest", + "reftest", + "xpcshell", + "cppunittest", + "jittest", + "mozbase", + "web-platform", + "talos", + "raptor", + "awsy", + "gtest", + "updater-dep", + "jsreftest", + "perftests", ] PACKAGE_SPECIFIED_HARNESSES = [ - 'condprof', - 'cppunittest', - 'mochitest', - 'reftest', - 'xpcshell', - 'web-platform', - 'talos', - 'raptor', - 'awsy', - 'updater-dep', - 'jittest', - 'jsreftest', - 'perftests', + "condprof", + "cppunittest", + "mochitest", + "reftest", + "xpcshell", + "web-platform", + "talos", + "raptor", + "awsy", + "updater-dep", + "jittest", + "jsreftest", + "perftests", ] # These packages are not present for every build configuration. OPTIONAL_PACKAGES = [ - 'gtest', + "gtest", ] def parse_args(): parser = ArgumentParser( description="Generate a test_packages.json file to tell automation which harnesses " - "require which test packages.") - parser.add_argument("--common", required=True, - action="store", dest="tests_common", - help="Name of the \"common\" archive, a package to be used by all " - "harnesses.") - parser.add_argument("--jsshell", required=True, - action="store", dest="jsshell", - help="Name of the jsshell zip.") + "require which test packages." + ) + parser.add_argument( + "--common", + required=True, + action="store", + dest="tests_common", + help='Name of the "common" archive, a package to be used by all ' "harnesses.", + ) + parser.add_argument( + "--jsshell", + required=True, + action="store", + dest="jsshell", + help="Name of the jsshell zip.", + ) for harness in PACKAGE_SPECIFIED_HARNESSES: - parser.add_argument("--%s" % harness, required=True, - action="store", dest=harness, - help="Name of the %s zip." % harness) + parser.add_argument( + "--%s" % harness, + required=True, + action="store", + dest=harness, + help="Name of the %s zip." % harness, + ) for harness in OPTIONAL_PACKAGES: - parser.add_argument("--%s" % harness, required=False, - action="store", dest=harness, - help="Name of the %s zip." % harness) - parser.add_argument("--dest-file", required=True, - action="store", dest="destfile", - help="Path to the output file to be written.") + parser.add_argument( + "--%s" % harness, + required=False, + action="store", + dest=harness, + help="Name of the %s zip." % harness, + ) + parser.add_argument( + "--dest-file", + required=True, + action="store", + dest="destfile", + help="Path to the output file to be written.", + ) return parser.parse_args() @@ -85,8 +105,8 @@ def generate_package_data(args): jsshell = args.jsshell harness_requirements = dict([(k, [tests_common]) for k in ALL_HARNESSES]) - harness_requirements['jittest'].append(jsshell) - harness_requirements['jsreftest'].append(args.reftest) + harness_requirements["jittest"].append(jsshell) + harness_requirements["jsreftest"].append(args.reftest) for harness in PACKAGE_SPECIFIED_HARNESSES + OPTIONAL_PACKAGES: pkg_name = getattr(args, harness, None) if pkg_name is None: @@ -95,8 +115,8 @@ def generate_package_data(args): return harness_requirements -if __name__ == '__main__': +if __name__ == "__main__": args = parse_args() packages_data = generate_package_data(args) - with open(args.destfile, 'w') as of: + with open(args.destfile, "w") as of: json.dump(packages_data, of, indent=4) diff --git a/build/mach_bootstrap.py b/build/mach_bootstrap.py index d92958a5fdcebc..ce193f6c553093 100644 --- a/build/mach_bootstrap.py +++ b/build/mach_bootstrap.py @@ -21,7 +21,7 @@ from types import ModuleType -STATE_DIR_FIRST_RUN = ''' +STATE_DIR_FIRST_RUN = """ mach and the build system store shared state in a common directory on the filesystem. The following directory will be created: @@ -33,142 +33,142 @@ want to export this environment variable from your shell's init scripts. Press ENTER/RETURN to continue or CTRL+c to abort. -'''.lstrip() +""".lstrip() # Individual files providing mach commands. MACH_MODULES = [ - 'build/valgrind/mach_commands.py', - 'devtools/shared/css/generated/mach_commands.py', - 'dom/bindings/mach_commands.py', - 'js/src/devtools/rootAnalysis/mach_commands.py', - 'layout/tools/reftest/mach_commands.py', - 'mobile/android/mach_commands.py', - 'python/mach/mach/commands/commandinfo.py', - 'python/mach/mach/commands/settings.py', - 'python/mach_commands.py', - 'python/mozboot/mozboot/mach_commands.py', - 'python/mozbuild/mozbuild/artifact_commands.py', - 'python/mozbuild/mozbuild/backend/mach_commands.py', - 'python/mozbuild/mozbuild/build_commands.py', - 'python/mozbuild/mozbuild/code_analysis/mach_commands.py', - 'python/mozbuild/mozbuild/compilation/codecomplete.py', - 'python/mozbuild/mozbuild/frontend/mach_commands.py', - 'python/mozbuild/mozbuild/vendor/mach_commands.py', - 'python/mozbuild/mozbuild/mach_commands.py', - 'python/mozperftest/mozperftest/mach_commands.py', - 'python/mozrelease/mozrelease/mach_commands.py', - 'remote/mach_commands.py', - 'taskcluster/mach_commands.py', - 'testing/awsy/mach_commands.py', - 'testing/condprofile/mach_commands.py', - 'testing/firefox-ui/mach_commands.py', - 'testing/geckodriver/mach_commands.py', - 'testing/mach_commands.py', - 'testing/marionette/mach_commands.py', - 'testing/mochitest/mach_commands.py', - 'testing/mozharness/mach_commands.py', - 'testing/raptor/mach_commands.py', - 'testing/talos/mach_commands.py', - 'testing/tps/mach_commands.py', - 'testing/web-platform/mach_commands.py', - 'testing/xpcshell/mach_commands.py', - 'toolkit/components/telemetry/tests/marionette/mach_commands.py', - 'tools/browsertime/mach_commands.py', - 'tools/compare-locales/mach_commands.py', - 'tools/lint/mach_commands.py', - 'tools/mach_commands.py', - 'tools/moztreedocs/mach_commands.py', - 'tools/phabricator/mach_commands.py', - 'tools/power/mach_commands.py', - 'tools/tryselect/mach_commands.py', - 'tools/vcs/mach_commands.py', + "build/valgrind/mach_commands.py", + "devtools/shared/css/generated/mach_commands.py", + "dom/bindings/mach_commands.py", + "js/src/devtools/rootAnalysis/mach_commands.py", + "layout/tools/reftest/mach_commands.py", + "mobile/android/mach_commands.py", + "python/mach/mach/commands/commandinfo.py", + "python/mach/mach/commands/settings.py", + "python/mach_commands.py", + "python/mozboot/mozboot/mach_commands.py", + "python/mozbuild/mozbuild/artifact_commands.py", + "python/mozbuild/mozbuild/backend/mach_commands.py", + "python/mozbuild/mozbuild/build_commands.py", + "python/mozbuild/mozbuild/code_analysis/mach_commands.py", + "python/mozbuild/mozbuild/compilation/codecomplete.py", + "python/mozbuild/mozbuild/frontend/mach_commands.py", + "python/mozbuild/mozbuild/vendor/mach_commands.py", + "python/mozbuild/mozbuild/mach_commands.py", + "python/mozperftest/mozperftest/mach_commands.py", + "python/mozrelease/mozrelease/mach_commands.py", + "remote/mach_commands.py", + "taskcluster/mach_commands.py", + "testing/awsy/mach_commands.py", + "testing/condprofile/mach_commands.py", + "testing/firefox-ui/mach_commands.py", + "testing/geckodriver/mach_commands.py", + "testing/mach_commands.py", + "testing/marionette/mach_commands.py", + "testing/mochitest/mach_commands.py", + "testing/mozharness/mach_commands.py", + "testing/raptor/mach_commands.py", + "testing/talos/mach_commands.py", + "testing/tps/mach_commands.py", + "testing/web-platform/mach_commands.py", + "testing/xpcshell/mach_commands.py", + "toolkit/components/telemetry/tests/marionette/mach_commands.py", + "tools/browsertime/mach_commands.py", + "tools/compare-locales/mach_commands.py", + "tools/lint/mach_commands.py", + "tools/mach_commands.py", + "tools/moztreedocs/mach_commands.py", + "tools/phabricator/mach_commands.py", + "tools/power/mach_commands.py", + "tools/tryselect/mach_commands.py", + "tools/vcs/mach_commands.py", ] CATEGORIES = { - 'build': { - 'short': 'Build Commands', - 'long': 'Interact with the build system', - 'priority': 80, + "build": { + "short": "Build Commands", + "long": "Interact with the build system", + "priority": 80, }, - 'post-build': { - 'short': 'Post-build Commands', - 'long': 'Common actions performed after completing a build.', - 'priority': 70, + "post-build": { + "short": "Post-build Commands", + "long": "Common actions performed after completing a build.", + "priority": 70, }, - 'testing': { - 'short': 'Testing', - 'long': 'Run tests.', - 'priority': 60, + "testing": { + "short": "Testing", + "long": "Run tests.", + "priority": 60, }, - 'ci': { - 'short': 'CI', - 'long': 'Taskcluster commands', - 'priority': 59, + "ci": { + "short": "CI", + "long": "Taskcluster commands", + "priority": 59, }, - 'devenv': { - 'short': 'Development Environment', - 'long': 'Set up and configure your development environment.', - 'priority': 50, + "devenv": { + "short": "Development Environment", + "long": "Set up and configure your development environment.", + "priority": 50, }, - 'build-dev': { - 'short': 'Low-level Build System Interaction', - 'long': 'Interact with specific parts of the build system.', - 'priority': 20, + "build-dev": { + "short": "Low-level Build System Interaction", + "long": "Interact with specific parts of the build system.", + "priority": 20, }, - 'misc': { - 'short': 'Potpourri', - 'long': 'Potent potables and assorted snacks.', - 'priority': 10, + "misc": { + "short": "Potpourri", + "long": "Potent potables and assorted snacks.", + "priority": 10, }, - 'release': { - 'short': 'Release automation', - 'long': 'Commands for used in release automation.', - 'priority': 5, + "release": { + "short": "Release automation", + "long": "Commands for used in release automation.", + "priority": 5, }, - 'disabled': { - 'short': 'Disabled', - 'long': 'The disabled commands are hidden by default. Use -v to display them. ' - 'These commands are unavailable for your current context, ' + "disabled": { + "short": "Disabled", + "long": "The disabled commands are hidden by default. Use -v to display them. " + "These commands are unavailable for your current context, " 'run "mach " to see why.', - 'priority': 0, + "priority": 0, }, } def search_path(mozilla_dir, packages_txt): with open(os.path.join(mozilla_dir, packages_txt)) as f: - packages = [line.rstrip().split(':') for line in f] + packages = [line.rstrip().split(":") for line in f] def handle_package(package): - if package[0] == 'optional': + if package[0] == "optional": try: for path in handle_package(package[1:]): yield path except Exception: pass - if package[0] in ('windows', '!windows'): - for_win = not package[0].startswith('!') - is_win = sys.platform == 'win32' + if package[0] in ("windows", "!windows"): + for_win = not package[0].startswith("!") + is_win = sys.platform == "win32" if is_win == for_win: for path in handle_package(package[1:]): yield path - if package[0] in ('python2', 'python3'): - for_python3 = package[0].endswith('3') + if package[0] in ("python2", "python3"): + for_python3 = package[0].endswith("3") is_python3 = sys.version_info[0] > 2 if is_python3 == for_python3: for path in handle_package(package[1:]): yield path - if package[0] == 'packages.txt': + if package[0] == "packages.txt": assert len(package) == 2 for p in search_path(mozilla_dir, package[1]): yield os.path.join(mozilla_dir, p) - if package[0].endswith('.pth'): + if package[0].endswith(".pth"): assert len(package) == 2 yield os.path.join(mozilla_dir, package[1]) @@ -186,8 +186,8 @@ def bootstrap(topsrcdir, mozilla_dir=None): # on module import. major, minor = sys.version_info[:2] if (major == 2 and minor < 7) or (major == 3 and minor < 5): - print('Python 2.7 or Python 3.5+ is required to run mach.') - print('You are running Python', platform.python_version()) + print("Python 2.7 or Python 3.5+ is required to run mach.") + print("You are running Python", platform.python_version()) sys.exit(1) # Global build system and mach state is stored in a central directory. By @@ -200,8 +200,8 @@ def bootstrap(topsrcdir, mozilla_dir=None): # like surprises. sys.path[0:0] = [ os.path.join(mozilla_dir, path) - for path in search_path(mozilla_dir, - 'build/mach_virtualenv_packages.txt')] + for path in search_path(mozilla_dir, "build/mach_virtualenv_packages.txt") + ] import mach.base import mach.main from mach.util import setenv @@ -227,12 +227,13 @@ def bootstrap(topsrcdir, mozilla_dir=None): # This code mimics the code in taskcluster/scripts/run-task. try: import resource + # Keep the hard limit the same, though, allowing processes to change # their soft limit if they need to (Firefox does, for instance). (soft, hard) = resource.getrlimit(resource.RLIMIT_NOFILE) # Permit people to override our default limit if necessary via # MOZ_LIMIT_NOFILE, which is the same variable `run-task` uses. - limit = os.environ.get('MOZ_LIMIT_NOFILE') + limit = os.environ.get("MOZ_LIMIT_NOFILE") if limit: limit = int(limit) else: @@ -248,6 +249,7 @@ def bootstrap(topsrcdir, mozilla_dir=None): pass from mozbuild.util import patch_main + patch_main() def resolve_repository(): @@ -258,8 +260,7 @@ def resolve_repository(): # If we ever need to use the VCS binary here, consider something # more robust. return mozversioncontrol.get_repository_object(path=mozilla_dir) - except (mozversioncontrol.InvalidRepoPath, - mozversioncontrol.MissingVCSTool): + except (mozversioncontrol.InvalidRepoPath, mozversioncontrol.MissingVCSTool): return None def pre_dispatch_handler(context, handler, args): @@ -267,25 +268,30 @@ def pre_dispatch_handler(context, handler, args): # the build, tests will be disabled. Instead of trying to run # nonexistent tests then reporting a failure, this will prevent mach # from progressing beyond this point. - if handler.category == 'testing' and not handler.ok_if_tests_disabled: + if handler.category == "testing" and not handler.ok_if_tests_disabled: from mozbuild.base import BuildEnvironmentNotFoundException + try: from mozbuild.base import MozbuildObject + # all environments should have an instance of build object. build = MozbuildObject.from_environment() - if build is not None and hasattr(build, 'mozconfig'): - ac_options = build.mozconfig['configure_args'] - if ac_options and '--disable-tests' in ac_options: - print('Tests have been disabled by mozconfig with the flag ' + - '"ac_add_options --disable-tests".\n' + - 'Remove the flag, and re-compile to enable tests.') + if build is not None and hasattr(build, "mozconfig"): + ac_options = build.mozconfig["configure_args"] + if ac_options and "--disable-tests" in ac_options: + print( + "Tests have been disabled by mozconfig with the flag " + + '"ac_add_options --disable-tests".\n' + + "Remove the flag, and re-compile to enable tests." + ) sys.exit(1) except BuildEnvironmentNotFoundException: # likely automation environment, so do nothing. pass - def post_dispatch_handler(context, handler, instance, success, - start_time, end_time, depth, args): + def post_dispatch_handler( + context, handler, instance, success, start_time, end_time, depth, args + ): """Perform global operations after command dispatch. @@ -297,56 +303,60 @@ def post_dispatch_handler(context, handler, instance, success, if depth != 1: return - _finalize_telemetry_glean(context.telemetry, handler.name == 'bootstrap', - success) - _finalize_telemetry_legacy(context, instance, handler, success, start_time, - end_time, topsrcdir) + _finalize_telemetry_glean( + context.telemetry, handler.name == "bootstrap", success + ) + _finalize_telemetry_legacy( + context, instance, handler, success, start_time, end_time, topsrcdir + ) def populate_context(key=None): if key is None: return - if key == 'state_dir': + if key == "state_dir": state_dir = get_state_dir() - if state_dir == os.environ.get('MOZBUILD_STATE_PATH'): + if state_dir == os.environ.get("MOZBUILD_STATE_PATH"): if not os.path.exists(state_dir): - print('Creating global state directory from environment variable: %s' - % state_dir) + print( + "Creating global state directory from environment variable: %s" + % state_dir + ) os.makedirs(state_dir, mode=0o770) else: if not os.path.exists(state_dir): - if not os.environ.get('MOZ_AUTOMATION'): + if not os.environ.get("MOZ_AUTOMATION"): print(STATE_DIR_FIRST_RUN.format(userdir=state_dir)) try: sys.stdin.readline() except KeyboardInterrupt: sys.exit(1) - print('\nCreating default state directory: %s' % state_dir) + print("\nCreating default state directory: %s" % state_dir) os.makedirs(state_dir, mode=0o770) return state_dir - if key == 'local_state_dir': + if key == "local_state_dir": return get_state_dir(srcdir=True) - if key == 'topdir': + if key == "topdir": return topsrcdir - if key == 'pre_dispatch_handler': + if key == "pre_dispatch_handler": return pre_dispatch_handler - if key == 'post_dispatch_handler': + if key == "post_dispatch_handler": return post_dispatch_handler - if key == 'repository': + if key == "repository": return resolve_repository() raise AttributeError(key) # Note which process is top-level so that recursive mach invocations can avoid writing # telemetry data. - if 'MACH_MAIN_PID' not in os.environ: - setenv('MACH_MAIN_PID', str(os.getpid())) + if "MACH_MAIN_PID" not in os.environ: + setenv("MACH_MAIN_PID", str(os.getpid())) driver = mach.main.Mach(os.getcwd()) driver.populate_context_handler = populate_context @@ -358,8 +368,7 @@ def populate_context(key=None): driver.settings_paths.append(mozilla_dir) for category, meta in CATEGORIES.items(): - driver.define_category(category, meta['short'], meta['long'], - meta['priority']) + driver.define_category(category, meta["short"], meta["long"], meta["priority"]) repo = resolve_repository() @@ -375,8 +384,9 @@ def populate_context(key=None): return driver -def _finalize_telemetry_legacy(context, instance, handler, success, start_time, - end_time, topsrcdir): +def _finalize_telemetry_legacy( + context, instance, handler, success, start_time, end_time, topsrcdir +): """Record and submit legacy telemetry. Parameterized by the raw gathered telemetry, this function handles persisting and @@ -388,66 +398,80 @@ def _finalize_telemetry_legacy(context, instance, handler, success, start_time, from mozboot.util import get_state_dir from mozbuild.base import MozbuildObject from mozbuild.telemetry import gather_telemetry - from mach.telemetry import ( - is_telemetry_enabled, - is_applicable_telemetry_environment - ) + from mach.telemetry import is_telemetry_enabled, is_applicable_telemetry_environment - if not (is_applicable_telemetry_environment() - and is_telemetry_enabled(context.settings)): + if not ( + is_applicable_telemetry_environment() and is_telemetry_enabled(context.settings) + ): return if not isinstance(instance, MozbuildObject): instance = MozbuildObject.from_environment() - command_attrs = getattr(context, 'command_attrs', {}) + command_attrs = getattr(context, "command_attrs", {}) # We gather telemetry for every operation. - data = gather_telemetry(command=handler.name, success=success, - start_time=start_time, end_time=end_time, - mach_context=context, instance=instance, - command_attrs=command_attrs) + data = gather_telemetry( + command=handler.name, + success=success, + start_time=start_time, + end_time=end_time, + mach_context=context, + instance=instance, + command_attrs=command_attrs, + ) if data: - telemetry_dir = os.path.join(get_state_dir(), 'telemetry') + telemetry_dir = os.path.join(get_state_dir(), "telemetry") try: os.mkdir(telemetry_dir) except OSError as e: if e.errno != errno.EEXIST: raise - outgoing_dir = os.path.join(telemetry_dir, 'outgoing') + outgoing_dir = os.path.join(telemetry_dir, "outgoing") try: os.mkdir(outgoing_dir) except OSError as e: if e.errno != errno.EEXIST: raise - with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + '.json'), - 'w') as f: + with open(os.path.join(outgoing_dir, str(uuid.uuid4()) + ".json"), "w") as f: json.dump(data, f, sort_keys=True) # The user is performing a maintenance command, skip the upload - if handler.name in ('bootstrap', 'doctor', 'mach-commands', 'vcs-setup', - 'create-mach-environment', 'install-moz-phab', - # We call mach environment in client.mk which would cause the - # data submission to block the forward progress of make. - 'environment'): + if handler.name in ( + "bootstrap", + "doctor", + "mach-commands", + "vcs-setup", + "create-mach-environment", + "install-moz-phab", + # We call mach environment in client.mk which would cause the + # data submission to block the forward progress of make. + "environment", + ): return False - if 'TEST_MACH_TELEMETRY_NO_SUBMIT' in os.environ: + if "TEST_MACH_TELEMETRY_NO_SUBMIT" in os.environ: # In our telemetry tests, we want telemetry to be collected for analysis, but # we don't want it submitted. return False state_dir = get_state_dir() - machpath = os.path.join(instance.topsrcdir, 'mach') - with open(os.devnull, 'wb') as devnull: - subprocess.Popen([sys.executable, machpath, 'python', - '--no-virtualenv', - os.path.join(topsrcdir, 'build', - 'submit_telemetry_data.py'), - state_dir], - stdout=devnull, stderr=devnull) + machpath = os.path.join(instance.topsrcdir, "mach") + with open(os.devnull, "wb") as devnull: + subprocess.Popen( + [ + sys.executable, + machpath, + "python", + "--no-virtualenv", + os.path.join(topsrcdir, "build", "submit_telemetry_data.py"), + state_dir, + ], + stdout=devnull, + stderr=devnull, + ) def _finalize_telemetry_glean(telemetry, is_bootstrap, success): @@ -472,8 +496,9 @@ def _finalize_telemetry_glean(telemetry, is_bootstrap, success): system_metrics.logical_cores.add(logical_cores) system_metrics.physical_cores.add(physical_cores) if memory_total is not None: - system_metrics.memory.accumulate(int( - math.ceil(float(memory_total) / (1024 * 1024 * 1024)))) + system_metrics.memory.accumulate( + int(math.ceil(float(memory_total) / (1024 * 1024 * 1024))) + ) telemetry.submit(is_bootstrap) @@ -487,12 +512,15 @@ def __init__(self, original_import): self._original_import = original_import # Assume the source directory is the parent directory of the one # containing this file. - self._source_dir = os.path.normcase(os.path.abspath( - os.path.dirname(os.path.dirname(__file__)))) + os.sep + self._source_dir = ( + os.path.normcase( + os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + ) + + os.sep + ) self._modules = set() - def __call__(self, name, globals=None, locals=None, fromlist=None, - level=-1): + def __call__(self, name, globals=None, locals=None, fromlist=None, level=-1): if sys.version_info[0] >= 3 and level < 0: level = 0 @@ -513,14 +541,14 @@ def __call__(self, name, globals=None, locals=None, fromlist=None, self._modules.add(resolved_name) # Builtin modules don't have a __file__ attribute. - if not getattr(module, '__file__', None): + if not getattr(module, "__file__", None): return module # Note: module.__file__ is not always absolute. path = os.path.normcase(os.path.abspath(module.__file__)) # Note: we could avoid normcase and abspath above for non pyc/pyo # files, but those are actually rare, so it doesn't really matter. - if not path.endswith(('.pyc', '.pyo')): + if not path.endswith((".pyc", ".pyo")): return module # Ignore modules outside our source directory diff --git a/build/midl.py b/build/midl.py index 3f2a08c4f9db7e..add17006d61c19 100644 --- a/build/midl.py +++ b/build/midl.py @@ -12,11 +12,11 @@ def relativize(path, base=None): # For absolute path in Unix builds, we need relative paths because # Windows programs run via Wine don't like these Unix absolute paths # (they look like command line arguments). - if path.startswith('/'): + if path.startswith("/"): return os.path.relpath(path, base) # For Windows absolute paths, we can just use the unmodified path. # And if the path starts with '-', it's a command line argument. - if os.path.isabs(path) or path.startswith('-'): + if os.path.isabs(path) or path.startswith("-"): return path # Remaining case is relative paths, which may be relative to a different # directory (os.getcwd()) than the needed `base`, so we "rebase" it. @@ -25,18 +25,18 @@ def relativize(path, base=None): def midl(out, input, *flags): out.avoid_writing_to_file() - midl = buildconfig.substs['MIDL'] - wine = buildconfig.substs.get('WINE') - base = os.path.dirname(out.name) or '.' - if midl.lower().endswith('.exe') and wine: + midl = buildconfig.substs["MIDL"] + wine = buildconfig.substs.get("WINE") + base = os.path.dirname(out.name) or "." + if midl.lower().endswith(".exe") and wine: command = [wine, midl] else: command = [midl] - command.extend(buildconfig.substs['MIDL_FLAGS']) + command.extend(buildconfig.substs["MIDL_FLAGS"]) command.extend([relativize(f, base) for f in flags]) - command.append('-Oicf') + command.append("-Oicf") command.append(relativize(input, base)) - print('Executing:', ' '.join(command)) + print("Executing:", " ".join(command)) result = subprocess.run(command, cwd=base) return result.returncode @@ -49,7 +49,9 @@ def merge_dlldata(out, *inputs): inputs = [open(i) for i in inputs] read_a_line = [True] * len(inputs) while True: - lines = [f.readline() if read_a_line[n] else lines[n] for n, f in enumerate(inputs)] + lines = [ + f.readline() if read_a_line[n] else lines[n] for n, f in enumerate(inputs) + ] unique_lines = set(lines) if len(unique_lines) == 1: # All the lines are identical @@ -57,7 +59,10 @@ def merge_dlldata(out, *inputs): break out.write(lines[0]) read_a_line = [True] * len(inputs) - elif len(unique_lines) == 2 and len([l for l in unique_lines if '#define' in l]) == 1: + elif ( + len(unique_lines) == 2 + and len([l for l in unique_lines if "#define" in l]) == 1 + ): # Most lines are identical. When they aren't, it's typically because some # files have an extra #define that others don't. When that happens, we # print out the #define, and get a new input line from the files that had @@ -67,16 +72,18 @@ def merge_dlldata(out, *inputs): # defines across different files, except when there's a different one # for each file, in which case it's handled further below. a = unique_lines.pop() - if '#define' in a: + if "#define" in a: out.write(a) else: out.write(unique_lines.pop()) - read_a_line = ['#define' in l for l in lines] + read_a_line = ["#define" in l for l in lines] elif len(unique_lines) != len(lines): # If for some reason, we don't get lines that are entirely different # from each other, we have some unexpected input. - print('Error while merging dlldata. Last lines read: {}'.format(lines), - file=sys.stderr) + print( + "Error while merging dlldata. Last lines read: {}".format(lines), + file=sys.stderr, + ) return 1 else: for line in lines: diff --git a/build/mobile/remoteautomation.py b/build/mobile/remoteautomation.py index 8d98f9ba2522fc..60d8d952fa1fbb 100644 --- a/build/mobile/remoteautomation.py +++ b/build/mobile/remoteautomation.py @@ -35,15 +35,17 @@ def resetGlobalLog(log): resetGlobalLog(sys.stdout) # signatures for logcat messages that we don't care about much -fennecLogcatFilters = ["The character encoding of the HTML document was not declared", - "Use of Mutation Events is deprecated. Use MutationObserver instead.", - "Unexpected value from nativeGetEnabledTags: 0"] +fennecLogcatFilters = [ + "The character encoding of the HTML document was not declared", + "Use of Mutation Events is deprecated. Use MutationObserver instead.", + "Unexpected value from nativeGetEnabledTags: 0", +] class RemoteAutomation(object): - - def __init__(self, device, appName='', remoteProfile=None, remoteLog=None, - processArgs=None): + def __init__( + self, device, appName="", remoteProfile=None, remoteLog=None, processArgs=None + ): super(RemoteAutomation, self).__init__() self.device = device self.appName = appName @@ -53,9 +55,22 @@ def __init__(self, device, appName='', remoteProfile=None, remoteLog=None, self.lastTestSeen = "remoteautomation.py" self.log = _log - def runApp(self, testURL, env, app, profileDir, extraArgs, - utilityPath=None, xrePath=None, debuggerInfo=None, symbolsPath=None, - timeout=-1, maxTime=None, e10s=True, **kwargs): + def runApp( + self, + testURL, + env, + app, + profileDir, + extraArgs, + utilityPath=None, + xrePath=None, + debuggerInfo=None, + symbolsPath=None, + timeout=-1, + maxTime=None, + e10s=True, + **kwargs + ): """ Run the app, log the duration it took to execute, return the status code. Kills the app if it runs for longer than |maxTime| seconds, or outputs nothing @@ -69,19 +84,26 @@ def runApp(self, testURL, env, app, profileDir, extraArgs, timeout = self.DEFAULT_TIMEOUT self.utilityPath = utilityPath - cmd, args = self.buildCommandLine(app, debuggerInfo, profileDir, testURL, extraArgs) + cmd, args = self.buildCommandLine( + app, debuggerInfo, profileDir, testURL, extraArgs + ) startTime = datetime.datetime.now() self.lastTestSeen = "remoteautomation.py" - self.launchApp([cmd] + args, - env=self.environment(env=env, crashreporter=not debuggerInfo), - e10s=e10s, **self.processArgs) + self.launchApp( + [cmd] + args, + env=self.environment(env=env, crashreporter=not debuggerInfo), + e10s=e10s, + **self.processArgs + ) self.log.info("remoteautomation.py | Application pid: %d" % self.pid) status = self.waitForFinish(timeout, maxTime) - self.log.info("remoteautomation.py | Application ran for: %s" % - str(datetime.datetime.now() - startTime)) + self.log.info( + "remoteautomation.py | Application ran for: %s" + % str(datetime.datetime.now() - startTime) + ) crashed = self.checkForCrashes(symbolsPath) if crashed: @@ -97,36 +119,36 @@ def environment(self, env=None, crashreporter=True, **kwargs): env = {} if crashreporter: - env['MOZ_CRASHREPORTER_NO_REPORT'] = '1' - env['MOZ_CRASHREPORTER'] = '1' - env['MOZ_CRASHREPORTER_SHUTDOWN'] = '1' + env["MOZ_CRASHREPORTER_NO_REPORT"] = "1" + env["MOZ_CRASHREPORTER"] = "1" + env["MOZ_CRASHREPORTER_SHUTDOWN"] = "1" else: - env['MOZ_CRASHREPORTER_DISABLE'] = '1' + env["MOZ_CRASHREPORTER_DISABLE"] = "1" # Crash on non-local network connections by default. # MOZ_DISABLE_NONLOCAL_CONNECTIONS can be set to "0" to temporarily # enable non-local connections for the purposes of local testing. # Don't override the user's choice here. See bug 1049688. - env.setdefault('MOZ_DISABLE_NONLOCAL_CONNECTIONS', '1') + env.setdefault("MOZ_DISABLE_NONLOCAL_CONNECTIONS", "1") # Send an env var noting that we are in automation. Passing any # value except the empty string will declare the value to exist. # # This may be used to disabled network connections during testing, e.g. # Switchboard & telemetry uploads. - env.setdefault('MOZ_IN_AUTOMATION', '1') + env.setdefault("MOZ_IN_AUTOMATION", "1") # Set WebRTC logging in case it is not set yet. - env.setdefault('R_LOG_LEVEL', '6') - env.setdefault('R_LOG_DESTINATION', 'stderr') - env.setdefault('R_LOG_VERBOSE', '1') + env.setdefault("R_LOG_LEVEL", "6") + env.setdefault("R_LOG_DESTINATION", "stderr") + env.setdefault("R_LOG_VERBOSE", "1") return env def waitForFinish(self, timeout, maxTime): - """ Wait for tests to finish. - If maxTime seconds elapse or no output is detected for timeout - seconds, kill the process and fail the test. + """Wait for tests to finish. + If maxTime seconds elapse or no output is detected for timeout + seconds, kill the process and fail the test. """ # maxTime is used to override the default timeout, we should honor that status = self.wait(timeout=maxTime, noOutputTimeout=timeout) @@ -137,38 +159,44 @@ def waitForFinish(self, timeout, maxTime): self.kill(True) if status == 1: if maxTime: - self.log.error("TEST-UNEXPECTED-FAIL | %s | " - "application ran for longer than allowed maximum time " - "of %s seconds" % (self.lastTestSeen, maxTime)) + self.log.error( + "TEST-UNEXPECTED-FAIL | %s | " + "application ran for longer than allowed maximum time " + "of %s seconds" % (self.lastTestSeen, maxTime) + ) else: - self.log.error("TEST-UNEXPECTED-FAIL | %s | " - "application ran for longer than allowed maximum time" - % self.lastTestSeen) + self.log.error( + "TEST-UNEXPECTED-FAIL | %s | " + "application ran for longer than allowed maximum time" + % self.lastTestSeen + ) if status == 2: - self.log.error("TEST-UNEXPECTED-FAIL | %s | " - "application timed out after %d seconds with no output" - % (self.lastTestSeen, int(timeout))) + self.log.error( + "TEST-UNEXPECTED-FAIL | %s | " + "application timed out after %d seconds with no output" + % (self.lastTestSeen, int(timeout)) + ) return status def checkForCrashes(self, symbolsPath): try: dumpDir = tempfile.mkdtemp() - remoteCrashDir = posixpath.join(self.remoteProfile, 'minidumps') + remoteCrashDir = posixpath.join(self.remoteProfile, "minidumps") if not self.device.is_dir(remoteCrashDir): return False self.device.pull(remoteCrashDir, dumpDir) logger = get_default_logger() crashed = mozcrash.log_crashes( - logger, dumpDir, symbolsPath, test=self.lastTestSeen) + logger, dumpDir, symbolsPath, test=self.lastTestSeen + ) finally: try: shutil.rmtree(dumpDir) except Exception as e: - print("WARNING: unable to remove directory %s: %s" % ( - dumpDir, str(e))) + print("WARNING: unable to remove directory %s: %s" % (dumpDir, str(e))) return crashed def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs): @@ -178,7 +206,7 @@ def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs): # Hack for robocop, if app is "am" and extraArgs contains the rest of the stuff, lets # assume extraArgs is all we need - if app == "am" and extraArgs[0] in ('instrument', 'start'): + if app == "am" and extraArgs[0] in ("instrument", "start"): return app, extraArgs cmd = os.path.abspath(app) @@ -198,7 +226,7 @@ def buildCommandLine(self, app, debuggerInfo, profileDir, testURL, extraArgs): args.extend(extraArgs) try: - args.remove('-foreground') + args.remove("-foreground") except Exception: pass return app, args @@ -208,17 +236,19 @@ def launchApp(self, cmd, env=None, e10s=True, messageLogger=None, counts=None): self.stdoutlen = 0 if self.appName and self.device.process_exist(self.appName): - print("remoteautomation.py %s is already running. Stopping..." % self.appName) + print( + "remoteautomation.py %s is already running. Stopping..." % self.appName + ) self.device.stop_application(self.appName) self.counts = counts if self.counts is not None: - self.counts['pass'] = 0 - self.counts['fail'] = 0 - self.counts['todo'] = 0 + self.counts["pass"] = 0 + self.counts["fail"] = 0 + self.counts["todo"] = 0 - if cmd[0] == 'am': - cmd = ' '.join(cmd) + if cmd[0] == "am": + cmd = " ".join(cmd) self.procName = self.appName if not self.device.shell_bool(cmd): print("remoteautomation.py failed to launch %s" % cmd) @@ -228,17 +258,25 @@ def launchApp(self, cmd, env=None, e10s=True, messageLogger=None, counts=None): if args[0] == self.appName: args = args[1:] url = args[-1:][0] - if url.startswith('/'): + if url.startswith("/"): # this is probably a reftest profile directory, not a url url = None else: args = args[:-1] - if 'geckoview' in self.appName: + if "geckoview" in self.appName: activity = "TestRunnerActivity" - self.device.launch_activity(self.appName, activity_name=activity, e10s=e10s, - moz_env=env, extra_args=args, url=url) + self.device.launch_activity( + self.appName, + activity_name=activity, + e10s=e10s, + moz_env=env, + extra_args=args, + url=url, + ) else: - self.device.launch_fennec(self.appName, moz_env=env, extra_args=args, url=url) + self.device.launch_fennec( + self.appName, moz_env=env, extra_args=args, url=url + ) # Setting timeout at 1 hour since on a remote device this takes much longer. # Temporarily increased to 110 minutes because no more chunks can be created. @@ -268,13 +306,15 @@ def read_stdout(self): except ADBTimeoutError: raise except Exception as e: - self.log.exception("remoteautomation.py | exception reading log: %s" % str(e)) + self.log.exception( + "remoteautomation.py | exception reading log: %s" % str(e) + ) return False if not newLogContent: return False self.stdoutlen += len(newLogContent) - newLogContent = six.ensure_str(newLogContent, errors='replace') + newLogContent = six.ensure_str(newLogContent, errors="replace") if self.messageLogger is None: testStartFilenames = re.findall(r"TEST-START \| ([^\s]*)", newLogContent) @@ -284,11 +324,11 @@ def read_stdout(self): return True self.logBuffer += newLogContent - lines = self.logBuffer.split('\n') + lines = self.logBuffer.split("\n") lines = [l for l in lines if l] if lines: - if self.logBuffer.endswith('\n'): + if self.logBuffer.endswith("\n"): # all lines are complete; no need to buffer self.logBuffer = "" else: @@ -303,33 +343,35 @@ def read_stdout(self): # This passes the line to the logger (to be logged or buffered) if isinstance(line, six.text_type): # if line is unicode - let's encode it to bytes - parsed_messages = self.messageLogger.write(line.encode('UTF-8', 'replace')) + parsed_messages = self.messageLogger.write( + line.encode("UTF-8", "replace") + ) else: # if line is bytes type, write it as it is parsed_messages = self.messageLogger.write(line) for message in parsed_messages: if isinstance(message, dict): - if message.get('action') == 'test_start': - self.lastTestSeen = message['test'] - elif message.get('action') == 'test_end': - self.lastTestSeen = '{} (finished)'.format(message['test']) - elif message.get('action') == 'suite_end': + if message.get("action") == "test_start": + self.lastTestSeen = message["test"] + elif message.get("action") == "test_end": + self.lastTestSeen = "{} (finished)".format(message["test"]) + elif message.get("action") == "suite_end": self.lastTestSeen = "Last test finished" - elif message.get('action') == 'log': - line = message['message'].strip() + elif message.get("action") == "log": + line = message["message"].strip() if self.counts: m = re.match(".*:\s*(\d*)", line) if m: try: val = int(m.group(1)) if "Passed:" in line: - self.counts['pass'] += val + self.counts["pass"] += val self.lastTestSeen = "Last test finished" elif "Failed:" in line: - self.counts['fail'] += val + self.counts["fail"] += val elif "Todo:" in line: - self.counts['todo'] += val + self.counts["todo"] += val except ADBTimeoutError: raise except Exception: @@ -370,11 +412,13 @@ def wait(self, timeout=None, noOutputTimeout=None): if (not slowLog) or (timer % 60 == 0): startRead = datetime.datetime.now() hasOutput = self.read_stdout() - if (datetime.datetime.now() - startRead) > datetime.timedelta(seconds=5): + if (datetime.datetime.now() - startRead) > datetime.timedelta( + seconds=5 + ): slowLog = True if hasOutput: noOutputTimer = 0 - if self.counts and 'pass' in self.counts and self.counts['pass'] > 0: + if self.counts and "pass" in self.counts and self.counts["pass"] > 0: interval = 0.5 time.sleep(interval) timer += interval @@ -382,7 +426,7 @@ def wait(self, timeout=None, noOutputTimeout=None): if datetime.datetime.now() > endTime: status = 1 break - if (noOutputTimeout and noOutputTimer > noOutputTimeout): + if noOutputTimeout and noOutputTimer > noOutputTimeout: status = 2 break if not hasOutput: @@ -402,7 +446,7 @@ def kill(self, stagedShutdown=False): # they rarely work well with Firefox on the Android # emulator. dump_screen provides an effective # screenshot of the emulator and its host desktop. - if not self.device._device_serial.startswith('emulator-'): + if not self.device._device_serial.startswith("emulator-"): dump_device_screen(self.device, get_default_logger()) elif self.utilityPath: dump_screen(self.utilityPath, get_default_logger()) @@ -459,5 +503,5 @@ def kill(self, stagedShutdown=False): @staticmethod def elf_arm(filename): - data = open(filename, 'rb').read(20) + data = open(filename, "rb").read(20) return data[:4] == "\x7fELF" and ord(data[18]) == 40 # EM_ARM diff --git a/build/moz.build b/build/moz.build index 43b25372275287..3d7aed0d56517c 100644 --- a/build/moz.build +++ b/build/moz.build @@ -4,114 +4,126 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Firefox Build System', 'General') +with Files("**"): + BUG_COMPONENT = ("Firefox Build System", "General") # This cannot be named "build" because of bug 922191. -SPHINX_TREES['buildsystem'] = 'docs' +SPHINX_TREES["buildsystem"] = "docs" -with Files('docs/**'): - SCHEDULES.exclusive = ['docs'] +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] -if CONFIG['OS_ARCH'] == 'WINNT': - DIRS += ['win32'] +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["win32"] else: - DIRS += ['unix'] + DIRS += ["unix"] CRAMTEST_MANIFESTS += [ - 'tests/cram/cram.ini', + "tests/cram/cram.ini", ] -DEFINES['ACCEPTED_MAR_CHANNEL_IDS'] = CONFIG['ACCEPTED_MAR_CHANNEL_IDS'] +DEFINES["ACCEPTED_MAR_CHANNEL_IDS"] = CONFIG["ACCEPTED_MAR_CHANNEL_IDS"] -if CONFIG['MOZ_BUILD_APP'] == 'browser': +if CONFIG["MOZ_BUILD_APP"] == "browser": PYTHON_UNITTEST_MANIFESTS += [ - 'compare-mozconfig/python.ini', + "compare-mozconfig/python.ini", ] -if CONFIG['ENABLE_TESTS'] or CONFIG['MOZ_DMD']: - FINAL_TARGET_FILES += ['/tools/rb/fix_stacks.py'] +if CONFIG["ENABLE_TESTS"] or CONFIG["MOZ_DMD"]: + FINAL_TARGET_FILES += ["/tools/rb/fix_stacks.py"] -if CONFIG['MOZ_DMD']: - FINAL_TARGET_FILES += ['/memory/replace/dmd/dmd.py'] +if CONFIG["MOZ_DMD"]: + FINAL_TARGET_FILES += ["/memory/replace/dmd/dmd.py"] # Put a useful .gdbinit and .gdbinit.py in $objdir/build, to be picked up # automatically by GDB via either libxul.so-gdb.py or js-gdb.py. -OBJDIR_PP_FILES.build += ['.gdbinit.py.in'] -OBJDIR_FILES.build += ['.gdbinit.loader'] -OBJDIR_FILES.build += ['.gdbinit'] +OBJDIR_PP_FILES.build += [".gdbinit.py.in"] +OBJDIR_FILES.build += [".gdbinit.loader"] +OBJDIR_FILES.build += [".gdbinit"] # Install the clang-cl runtime library for ASAN next to the binaries we produce. -if CONFIG['MOZ_ASAN'] and CONFIG['CC_TYPE'] == 'clang-cl': - FINAL_TARGET_FILES += ['%' + CONFIG['MOZ_CLANG_RT_ASAN_LIB_PATH']] - FINAL_TARGET_FILES += ['%' + CONFIG['MOZ_CLANG_RT_ASAN_LIB_PATH'].replace(".dll", ".pdb")] +if CONFIG["MOZ_ASAN"] and CONFIG["CC_TYPE"] == "clang-cl": + FINAL_TARGET_FILES += ["%" + CONFIG["MOZ_CLANG_RT_ASAN_LIB_PATH"]] + FINAL_TARGET_FILES += [ + "%" + CONFIG["MOZ_CLANG_RT_ASAN_LIB_PATH"].replace(".dll", ".pdb") + ] # Install the clang runtime library for ASAN next to the binaries we produce. -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android' and CONFIG['MOZ_ASAN']: - FINAL_TARGET_FILES += ['%' + CONFIG['MOZ_CLANG_RT_ASAN_LIB_PATH']] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android" and CONFIG["MOZ_ASAN"]: + FINAL_TARGET_FILES += ["%" + CONFIG["MOZ_CLANG_RT_ASAN_LIB_PATH"]] -if CONFIG['LLVM_SYMBOLIZER']: - FINAL_TARGET_FILES += ['/' + CONFIG['LLVM_SYMBOLIZER']] +if CONFIG["LLVM_SYMBOLIZER"]: + FINAL_TARGET_FILES += ["/" + CONFIG["LLVM_SYMBOLIZER"]] -if CONFIG['MOZ_APP_BASENAME']: +if CONFIG["MOZ_APP_BASENAME"]: appini_defines = { - 'TOPOBJDIR': TOPOBJDIR, + "TOPOBJDIR": TOPOBJDIR, } - for var in ('GRE_MILESTONE', 'MOZ_APP_VERSION', 'MOZ_APP_BASENAME', - 'MOZ_APP_VENDOR', 'MOZ_APP_ID', 'MAR_CHANNEL_ID', - 'MOZ_APP_REMOTINGNAME'): + for var in ( + "GRE_MILESTONE", + "MOZ_APP_VERSION", + "MOZ_APP_BASENAME", + "MOZ_APP_VENDOR", + "MOZ_APP_ID", + "MAR_CHANNEL_ID", + "MOZ_APP_REMOTINGNAME", + ): appini_defines[var] = CONFIG[var] - if CONFIG['MOZ_APP_DISPLAYNAME'] != CONFIG['MOZ_APP_BASENAME']: - appini_defines['MOZ_APP_DISPLAYNAME'] = CONFIG['MOZ_APP_DISPLAYNAME'] + if CONFIG["MOZ_APP_DISPLAYNAME"] != CONFIG["MOZ_APP_BASENAME"]: + appini_defines["MOZ_APP_DISPLAYNAME"] = CONFIG["MOZ_APP_DISPLAYNAME"] - if CONFIG['MOZ_BUILD_APP'] == 'browser': - appini_defines['MOZ_BUILD_APP_IS_BROWSER'] = True + if CONFIG["MOZ_BUILD_APP"] == "browser": + appini_defines["MOZ_BUILD_APP_IS_BROWSER"] = True - if CONFIG['MOZ_APP_PROFILE']: - appini_defines['MOZ_APP_PROFILE'] = CONFIG['MOZ_APP_PROFILE'] + if CONFIG["MOZ_APP_PROFILE"]: + appini_defines["MOZ_APP_PROFILE"] = CONFIG["MOZ_APP_PROFILE"] - for var in ('MOZ_CRASHREPORTER', 'MOZ_PROFILE_MIGRATOR', 'MOZ_UPDATER'): + for var in ("MOZ_CRASHREPORTER", "MOZ_PROFILE_MIGRATOR", "MOZ_UPDATER"): if CONFIG[var]: appini_defines[var] = True - appini_defines['MOZ_APPUPDATE_HOST'] = 'aus5.mozilla.org' - if CONFIG['MOZ_APPUPDATE_HOST']: - appini_defines['MOZ_APPUPDATE_HOST'] = CONFIG['MOZ_APPUPDATE_HOST'] + appini_defines["MOZ_APPUPDATE_HOST"] = "aus5.mozilla.org" + if CONFIG["MOZ_APPUPDATE_HOST"]: + appini_defines["MOZ_APPUPDATE_HOST"] = CONFIG["MOZ_APPUPDATE_HOST"] GeneratedFile( - 'application.ini', - script='../python/mozbuild/mozbuild/action/preprocessor.py', - entry_point='generate', - inputs=['application.ini.in'], - flags=['-D%s=%s' % (k, '1' if v is True else v) - for k, v in sorted(appini_defines.items(), key=lambda t: t[0])]) - - FINAL_TARGET_FILES += ['!application.ini'] - if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'android' and CONFIG['MOZ_UPDATER']: - FINAL_TARGET_PP_FILES += ['update-settings.ini'] + "application.ini", + script="../python/mozbuild/mozbuild/action/preprocessor.py", + entry_point="generate", + inputs=["application.ini.in"], + flags=[ + "-D%s=%s" % (k, "1" if v is True else v) + for k, v in sorted(appini_defines.items(), key=lambda t: t[0]) + ], + ) + + FINAL_TARGET_FILES += ["!application.ini"] + if CONFIG["MOZ_WIDGET_TOOLKIT"] != "android" and CONFIG["MOZ_UPDATER"]: + FINAL_TARGET_PP_FILES += ["update-settings.ini"] - GeneratedFile('application.ini.h', script='appini_header.py', - inputs=['!application.ini']) + GeneratedFile( + "application.ini.h", script="appini_header.py", inputs=["!application.ini"] + ) # Put a .lldbinit in the bin directory and the objdir, to be picked up # automatically by LLDB when we debug executables using either of those two # directories as the current working directory. The .lldbinit file will # load $(topsrcdir)/.lldbinit, which is where the actual debugging commands are. -DEFINES['topsrcdir'] = TOPSRCDIR -DEFINES['topobjdir'] = TOPOBJDIR -FINAL_TARGET_PP_FILES += ['.lldbinit.in'] -OBJDIR_FILES += ['!/dist/bin/.lldbinit'] +DEFINES["topsrcdir"] = TOPSRCDIR +DEFINES["topobjdir"] = TOPOBJDIR +FINAL_TARGET_PP_FILES += [".lldbinit.in"] +OBJDIR_FILES += ["!/dist/bin/.lldbinit"] # Put the .ycm_extra_conf.py file at the root of the objdir. It is used by # the vim plugin YouCompleteMe. -OBJDIR_FILES += ['/.ycm_extra_conf.py'] +OBJDIR_FILES += ["/.ycm_extra_conf.py"] -if CONFIG['MOZ_VALGRIND']: +if CONFIG["MOZ_VALGRIND"]: OBJDIR_FILES._valgrind += [ - 'valgrind/cross-architecture.sup', - 'valgrind/i386-pc-linux-gnu.sup', - 'valgrind/x86_64-pc-linux-gnu.sup', + "valgrind/cross-architecture.sup", + "valgrind/i386-pc-linux-gnu.sup", + "valgrind/x86_64-pc-linux-gnu.sup", ] diff --git a/build/moz.configure/android-ndk.configure b/build/moz.configure/android-ndk.configure index dd12b535733694..b6d198b149e34c 100644 --- a/build/moz.configure/android-ndk.configure +++ b/build/moz.configure/android-ndk.configure @@ -5,85 +5,95 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -@depends(mozbuild_state_path, '--help') -@imports(_from='os.path', _import='isdir') -@imports(_from='mozboot.android', _import='NDK_VERSION') +@depends(mozbuild_state_path, "--help") +@imports(_from="os.path", _import="isdir") +@imports(_from="mozboot.android", _import="NDK_VERSION") def default_android_ndk_root(mozbuild_state_path, _): - path = os.path.join(mozbuild_state_path, 'android-ndk-%s' % NDK_VERSION) + path = os.path.join(mozbuild_state_path, "android-ndk-%s" % NDK_VERSION) if isdir(path): return path -option('--with-android-ndk', nargs=1, - default=default_android_ndk_root, - help='location where the Android NDK can be found{|}') +option( + "--with-android-ndk", + nargs=1, + default=default_android_ndk_root, + help="location where the Android NDK can be found{|}", +) -option('--with-android-toolchain', nargs=1, - help='location of the Android toolchain') +option("--with-android-toolchain", nargs=1, help="location of the Android toolchain") -option('--with-android-googlevr-sdk', nargs=1, - help='location of the Android GoogleVR SDK') +option( + "--with-android-googlevr-sdk", nargs=1, help="location of the Android GoogleVR SDK" +) @depends(target) def min_android_version(target): - if target.cpu in ['aarch64', 'x86_64']: + if target.cpu in ["aarch64", "x86_64"]: # 64-bit support was added in API 21. - return '21' - return '16' + return "21" + return "16" -option('--with-android-version', - nargs=1, - help='android platform version{|}', - default=min_android_version) +option( + "--with-android-version", + nargs=1, + help="android platform version{|}", + default=min_android_version, +) -@depends('--with-android-version', min_android_version) -@imports(_from='__builtin__', _import='ValueError') +@depends("--with-android-version", min_android_version) +@imports(_from="__builtin__", _import="ValueError") def android_version(value, min_version): if not value: # Someone has passed --without-android-version. - die('--with-android-version cannot be disabled.') + die("--with-android-version cannot be disabled.") try: version = int(value[0]) except ValueError: - die('--with-android-version expects an integer value') + die("--with-android-version expects an integer value") if version < int(min_version): - die('--with-android-version must be at least %s (got %s)', - min_version, value[0]) + die( + "--with-android-version must be at least %s (got %s)", min_version, value[0] + ) return version -add_old_configure_assignment('android_version', android_version) +add_old_configure_assignment("android_version", android_version) -@depends('--with-android-ndk') -@imports(_from='os.path', _import='isdir') +@depends("--with-android-ndk") +@imports(_from="os.path", _import="isdir") def ndk(value): if value: if not isdir(value[0]): - die("The path you specified with --with-android-ndk (%s) is not " - "a directory" % value[0]) + die( + "The path you specified with --with-android-ndk (%s) is not " + "a directory" % value[0] + ) return value[0] - die('You must specify --with-android-ndk=/path/to/ndk when targeting Android, ' - 'or try |mach bootstrap|.') + die( + "You must specify --with-android-ndk=/path/to/ndk when targeting Android, " + "or try |mach bootstrap|." + ) -set_config('ANDROID_NDK', ndk) -add_old_configure_assignment('android_ndk', ndk) +set_config("ANDROID_NDK", ndk) +add_old_configure_assignment("android_ndk", ndk) @depends(ndk) -@checking('for android ndk version') -@imports(_from='__builtin__', _import='open') -@imports(_from='mozboot.android', _import='NDK_VERSION') -@imports(_from='mozboot.android', _import='get_ndk_version') -@imports(_from='mozboot.android', _import='GetNdkVersionError') +@checking("for android ndk version") +@imports(_from="__builtin__", _import="open") +@imports(_from="mozboot.android", _import="NDK_VERSION") +@imports(_from="mozboot.android", _import="get_ndk_version") +@imports(_from="mozboot.android", _import="GetNdkVersionError") def ndk_version(ndk): if not ndk: # Building 'js/src' for non-Android. @@ -95,28 +105,30 @@ def ndk_version(ndk): die(str(e)) if NDK_VERSION != human: - die('The only supported version of the NDK is %s (have %s)\n' - 'Please run |mach bootstrap| ' - 'to install the correct NDK.' % (NDK_VERSION, human)) + die( + "The only supported version of the NDK is %s (have %s)\n" + "Please run |mach bootstrap| " + "to install the correct NDK." % (NDK_VERSION, human) + ) return namespace( major=major, minor=minor, ) -set_config('ANDROID_NDK_MAJOR_VERSION', ndk_version.major) -set_config('ANDROID_NDK_MINOR_VERSION', ndk_version.minor) +set_config("ANDROID_NDK_MAJOR_VERSION", ndk_version.major) +set_config("ANDROID_NDK_MINOR_VERSION", ndk_version.minor) @depends(target, android_version, ndk) -@checking('for android platform directory') -@imports(_from='os.path', _import='isdir') +@checking("for android platform directory") +@imports(_from="os.path", _import="isdir") def android_platform(target, android_version, ndk): - if target.os != 'Android': + if target.os != "Android": return - if 'aarch64' == target.cpu: - target_dir_name = 'arm64' + if "aarch64" == target.cpu: + target_dir_name = "arm64" else: target_dir_name = target.cpu @@ -129,60 +141,63 @@ def android_platform(target, android_version, ndk): else: platform_version = android_version - platform_dir = os.path.join(ndk, - 'platforms', - 'android-%s' % platform_version, - 'arch-%s' % target_dir_name) + platform_dir = os.path.join( + ndk, "platforms", "android-%s" % platform_version, "arch-%s" % target_dir_name + ) if not isdir(platform_dir): - die("Android platform directory not found. With the current " - "configuration, it should be in %s" % platform_dir) + die( + "Android platform directory not found. With the current " + "configuration, it should be in %s" % platform_dir + ) return platform_dir -add_old_configure_assignment('android_platform', android_platform) -set_config('ANDROID_PLATFORM', android_platform) +add_old_configure_assignment("android_platform", android_platform) +set_config("ANDROID_PLATFORM", android_platform) @depends(android_platform, ndk, target) -@checking('for android sysroot directory') -@imports(_from='os.path', _import='isdir') +@checking("for android sysroot directory") +@imports(_from="os.path", _import="isdir") def android_sysroot(android_platform, ndk, target): - if target.os != 'Android': + if target.os != "Android": return # NDK r15 has both unified and non-unified headers, but we only support # non-unified for that NDK, so look for that first. search_dirs = [ # (, ) - (os.path.join(android_platform, 'usr', 'include'), android_platform), - (os.path.join(ndk, 'sysroot'), os.path.join(ndk, 'sysroot')), + (os.path.join(android_platform, "usr", "include"), android_platform), + (os.path.join(ndk, "sysroot"), os.path.join(ndk, "sysroot")), ] for test_dir, sysroot_dir in search_dirs: if isdir(test_dir): return sysroot_dir - die("Android sysroot directory not found in %s." % - str([sysroot_dir for test_dir, sysroot_dir in search_dirs])) + die( + "Android sysroot directory not found in %s." + % str([sysroot_dir for test_dir, sysroot_dir in search_dirs]) + ) -add_old_configure_assignment('android_sysroot', android_sysroot) +add_old_configure_assignment("android_sysroot", android_sysroot) @depends(android_platform, ndk, target) -@checking('for android system directory') -@imports(_from='os.path', _import='isdir') +@checking("for android system directory") +@imports(_from="os.path", _import="isdir") def android_system(android_platform, ndk, target): - if target.os != 'Android': + if target.os != "Android": return # NDK r15 has both unified and non-unified headers, but we only support # non-unified for that NDK, so look for that first. search_dirs = [ - os.path.join(android_platform, 'usr', 'include'), - os.path.join(ndk, 'sysroot', 'usr', 'include', target.toolchain), + os.path.join(android_platform, "usr", "include"), + os.path.join(ndk, "sysroot", "usr", "include", target.toolchain), ] for system_dir in search_dirs: @@ -192,86 +207,85 @@ def android_system(android_platform, ndk, target): die("Android system directory not found in %s." % str(search_dirs)) -add_old_configure_assignment('android_system', android_system) +add_old_configure_assignment("android_system", android_system) -@depends(target, host, ndk, '--with-android-toolchain') -@checking('for the Android toolchain directory', lambda x: x or 'not found') -@imports(_from='os.path', _import='isdir') -@imports(_from='mozbuild.shellutil', _import='quote') +@depends(target, host, ndk, "--with-android-toolchain") +@checking("for the Android toolchain directory", lambda x: x or "not found") +@imports(_from="os.path", _import="isdir") +@imports(_from="mozbuild.shellutil", _import="quote") def android_toolchain(target, host, ndk, toolchain): if not ndk: return if toolchain: return toolchain[0] else: - if target.cpu == 'arm' and target.endianness == 'little': - target_base = 'arm-linux-androideabi' - elif target.cpu == 'x86': - target_base = 'x86' - elif target.cpu == 'x86_64': - target_base = 'x86_64' - elif target.cpu == 'aarch64' and target.endianness == 'little': - target_base = 'aarch64-linux-android' + if target.cpu == "arm" and target.endianness == "little": + target_base = "arm-linux-androideabi" + elif target.cpu == "x86": + target_base = "x86" + elif target.cpu == "x86_64": + target_base = "x86_64" + elif target.cpu == "aarch64" and target.endianness == "little": + target_base = "aarch64-linux-android" else: - die('Target cpu is not supported.') - - toolchain_format = '%s/toolchains/%s-4.9/prebuilt/%s-%s' - host_kernel = 'windows' if host.kernel == 'WINNT' else host.kernel.lower() - - toolchain = toolchain_format % (ndk, target_base, - host_kernel, host.cpu) - log.debug('Trying %s' % quote(toolchain)) - if not isdir(toolchain) and host.cpu == 'x86_64': - toolchain = toolchain_format % (ndk, target_base, - host_kernel, 'x86') - log.debug('Trying %s' % quote(toolchain)) + die("Target cpu is not supported.") + + toolchain_format = "%s/toolchains/%s-4.9/prebuilt/%s-%s" + host_kernel = "windows" if host.kernel == "WINNT" else host.kernel.lower() + + toolchain = toolchain_format % (ndk, target_base, host_kernel, host.cpu) + log.debug("Trying %s" % quote(toolchain)) + if not isdir(toolchain) and host.cpu == "x86_64": + toolchain = toolchain_format % (ndk, target_base, host_kernel, "x86") + log.debug("Trying %s" % quote(toolchain)) if isdir(toolchain): return toolchain - die('You have to specify --with-android-toolchain=' - '/path/to/ndk/toolchain.') + die("You have to specify --with-android-toolchain=" "/path/to/ndk/toolchain.") -set_config('ANDROID_TOOLCHAIN', android_toolchain) +set_config("ANDROID_TOOLCHAIN", android_toolchain) @depends(target) def android_toolchain_prefix_base(target): - if target.cpu == 'x86': + if target.cpu == "x86": # Ideally, the --target should just have the right x86 variant # in the first place. - return 'i686-linux-android' + return "i686-linux-android" return target.toolchain -option(env='STLPORT_CPPFLAGS', - nargs=1, - help='Options compiler should pass for standard C++ library') +option( + env="STLPORT_CPPFLAGS", + nargs=1, + help="Options compiler should pass for standard C++ library", +) -@depends('STLPORT_CPPFLAGS', ndk) -@imports(_from='os.path', _import='isdir') +@depends("STLPORT_CPPFLAGS", ndk) +@imports(_from="os.path", _import="isdir") def stlport_cppflags(value, ndk): if value and len(value): return value.split() if not ndk: return - ndk_base = os.path.join(ndk, 'sources', 'cxx-stl') - cxx_base = os.path.join(ndk_base, 'llvm-libc++') - cxx_include = os.path.join(cxx_base, 'libcxx', 'include') - cxxabi_base = os.path.join(ndk_base, 'llvm-libc++abi') - cxxabi_include = os.path.join(cxxabi_base, 'libcxxabi', 'include') + ndk_base = os.path.join(ndk, "sources", "cxx-stl") + cxx_base = os.path.join(ndk_base, "llvm-libc++") + cxx_include = os.path.join(cxx_base, "libcxx", "include") + cxxabi_base = os.path.join(ndk_base, "llvm-libc++abi") + cxxabi_include = os.path.join(cxxabi_base, "libcxxabi", "include") if not isdir(cxx_include): # NDK r13 removes the inner "libcxx" directory. - cxx_include = os.path.join(cxx_base, 'include') + cxx_include = os.path.join(cxx_base, "include") if not isdir(cxx_include): die("Couldn't find path to libc++ includes in the android ndk") if not isdir(cxxabi_include): # NDK r13 removes the inner "libcxxabi" directory. - cxxabi_include = os.path.join(cxxabi_base, 'include') + cxxabi_include = os.path.join(cxxabi_base, "include") if not isdir(cxxabi_include): die("Couldn't find path to libc++abi includes in the android ndk") @@ -284,87 +298,99 @@ def stlport_cppflags(value, ndk): # Using -stdlib=libc++ and removing some of the -I below also doesn't # work because not everything that is in cxx_include comes in the C++ # header directory that comes with clang. - '-stdlib=libstdc++', - '-I%s' % cxx_include, - '-I%s' % os.path.join(ndk, 'sources', 'android', 'support', 'include'), - '-I%s' % cxxabi_include] + "-stdlib=libstdc++", + "-I%s" % cxx_include, + "-I%s" % os.path.join(ndk, "sources", "android", "support", "include"), + "-I%s" % cxxabi_include, + ] -add_old_configure_assignment('stlport_cppflags', stlport_cppflags) +add_old_configure_assignment("stlport_cppflags", stlport_cppflags) @depends(android_system, android_sysroot, android_toolchain, android_version) -def extra_toolchain_flags(android_system, android_sysroot, toolchain_dir, - android_version): +def extra_toolchain_flags( + android_system, android_sysroot, toolchain_dir, android_version +): if not android_sysroot: return [] - flags = ['-isystem', - android_system, - '-isystem', - os.path.join(android_sysroot, 'usr', 'include'), - '-gcc-toolchain', - toolchain_dir, - '-D__ANDROID_API__=%d' % android_version] + flags = [ + "-isystem", + android_system, + "-isystem", + os.path.join(android_sysroot, "usr", "include"), + "-gcc-toolchain", + toolchain_dir, + "-D__ANDROID_API__=%d" % android_version, + ] return flags @depends(android_toolchain_prefix_base, android_toolchain) def android_toolchain_prefix(prefix_base, toolchain): if toolchain: - return '%s/bin/%s-' % (toolchain, prefix_base) + return "%s/bin/%s-" % (toolchain, prefix_base) -imply_option('--with-toolchain-prefix', android_toolchain_prefix, - reason='--with-android-ndk') +imply_option( + "--with-toolchain-prefix", android_toolchain_prefix, reason="--with-android-ndk" +) -@depends(extra_toolchain_flags, stlport_cppflags, android_toolchain, - android_toolchain_prefix_base) -@imports(_from='os.path', _import='isdir') -def bindgen_cflags_android(toolchain_flags, stlport_flags, toolchain, - toolchain_prefix): +@depends( + extra_toolchain_flags, + stlport_cppflags, + android_toolchain, + android_toolchain_prefix_base, +) +@imports(_from="os.path", _import="isdir") +def bindgen_cflags_android(toolchain_flags, stlport_flags, toolchain, toolchain_prefix): if not toolchain_flags: return - gcc_include = os.path.join( - toolchain, 'lib', 'gcc', toolchain_prefix, '4.9.x') + gcc_include = os.path.join(toolchain, "lib", "gcc", toolchain_prefix, "4.9.x") if not isdir(gcc_include): - gcc_include = os.path.join( - toolchain, 'lib', 'gcc', toolchain_prefix, '4.9') - - return toolchain_flags + stlport_flags + [ - '-I%s' % os.path.join(gcc_include, 'include'), - '-I%s' % os.path.join(gcc_include, 'include-fixed'), - ] + gcc_include = os.path.join(toolchain, "lib", "gcc", toolchain_prefix, "4.9") + + return ( + toolchain_flags + + stlport_flags + + [ + "-I%s" % os.path.join(gcc_include, "include"), + "-I%s" % os.path.join(gcc_include, "include-fixed"), + ] + ) -@depends('--with-android-googlevr-sdk', target) -@checking('for GoogleVR SDK', lambda x: x.result) -@imports(_from='os.path', _import='exists') -@imports(_from='os.path', _import='abspath') +@depends("--with-android-googlevr-sdk", target) +@checking("for GoogleVR SDK", lambda x: x.result) +@imports(_from="os.path", _import="exists") +@imports(_from="os.path", _import="abspath") def googlevr_sdk(value, target): if not value: - return namespace( - result='Not specified' - ) + return namespace(result="Not specified") path = abspath(value[0]) if not exists(path): - die('Could not find GoogleVR SDK %s', path) - include = '%s/libraries/headers/' % path - if 'arm' == target.cpu: - arch = 'armeabi-v7a' - elif 'aarch64' == target.cpu: - arch = 'arm64-v8a' - elif 'x86' == target.cpu: - arch = 'x86' + die("Could not find GoogleVR SDK %s", path) + include = "%s/libraries/headers/" % path + if "arm" == target.cpu: + arch = "armeabi-v7a" + elif "aarch64" == target.cpu: + arch = "arm64-v8a" + elif "x86" == target.cpu: + arch = "x86" else: - die('Unsupported GoogleVR cpu architecture %s' % target.cpu) + die("Unsupported GoogleVR cpu architecture %s" % target.cpu) - libs = '{0}/libraries/jni/{1}/'.format(path, arch) + libs = "{0}/libraries/jni/{1}/".format(path, arch) if not exists(libs): - die('Could not find GoogleVR NDK at %s. Did you try running ' - '\'./gradlew :extractNdk\' in %s?', libs, path) + die( + "Could not find GoogleVR NDK at %s. Did you try running " + "'./gradlew :extractNdk' in %s?", + libs, + path, + ) return namespace( result=path, @@ -374,7 +400,7 @@ def googlevr_sdk(value, target): ) -set_define('MOZ_ANDROID_GOOGLE_VR', googlevr_sdk.enabled) -set_config('MOZ_ANDROID_GOOGLE_VR', googlevr_sdk.enabled) -set_config('MOZ_ANDROID_GOOGLE_VR_INCLUDE', googlevr_sdk.include) -set_config('MOZ_ANDROID_GOOGLE_VR_LIBS', googlevr_sdk.libs) +set_define("MOZ_ANDROID_GOOGLE_VR", googlevr_sdk.enabled) +set_config("MOZ_ANDROID_GOOGLE_VR", googlevr_sdk.enabled) +set_config("MOZ_ANDROID_GOOGLE_VR_INCLUDE", googlevr_sdk.include) +set_config("MOZ_ANDROID_GOOGLE_VR_LIBS", googlevr_sdk.libs) diff --git a/build/moz.configure/android-sdk.configure b/build/moz.configure/android-sdk.configure index 2eb4c8ceae09ff..8f12de584973d6 100644 --- a/build/moz.configure/android-sdk.configure +++ b/build/moz.configure/android-sdk.configure @@ -7,13 +7,13 @@ # Ensure Android SDK and build-tools versions depending on mobile target. -@depends(host, mozbuild_state_path, '--help') -@imports(_from='os.path', _import='isdir') +@depends(host, mozbuild_state_path, "--help") +@imports(_from="os.path", _import="isdir") def default_android_sdk_root(host, mozbuild_state_path, _): sdk_basename = { - 'Darwin': 'android-sdk-macosx', - 'Linux': 'android-sdk-linux', - 'WINNT': 'android-sdk-windows', + "Darwin": "android-sdk-macosx", + "Linux": "android-sdk-linux", + "WINNT": "android-sdk-windows", }.get(host.kernel) if sdk_basename: path = os.path.join(mozbuild_state_path, sdk_basename) @@ -21,52 +21,61 @@ def default_android_sdk_root(host, mozbuild_state_path, _): return path -option('--with-android-sdk', nargs=1, - default=default_android_sdk_root, - help='location where the Android SDK can be found (like ~/.mozbuild/android-sdk-linux){|}') +option( + "--with-android-sdk", + nargs=1, + default=default_android_sdk_root, + help="location where the Android SDK can be found (like ~/.mozbuild/android-sdk-linux){|}", +) -@depends('--with-android-sdk') -@imports(_from='os.path', _import='isdir') +@depends("--with-android-sdk") +@imports(_from="os.path", _import="isdir") def android_sdk_root(value): if value: if not isdir(value[0]): - die("The path you specified with --with-android-sdk (%s) is not " - "a directory" % value[0]) + die( + "The path you specified with --with-android-sdk (%s) is not " + "a directory" % value[0] + ) return value[0] - die("You must specify --with-android-sdk=/path/to/sdk when targeting Android, " - "or try |mach bootstrap|.") + die( + "You must specify --with-android-sdk=/path/to/sdk when targeting Android, " + "or try |mach bootstrap|." + ) -@depends('--help') +@depends("--help") def android_sdk_version(_): - return namespace(build_tools_version='29.0.3', target_sdk_version='29') + return namespace(build_tools_version="29.0.3", target_sdk_version="29") @depends(android_sdk_root, android_sdk_version) -@checking('for Android build-tools') -@imports(_from='os.path', _import='exists') -@imports(_from='os.path', _import='isdir') +@checking("for Android build-tools") +@imports(_from="os.path", _import="exists") +@imports(_from="os.path", _import="isdir") def android_build_tools(sdk_root, sdk_version): - android_build_tools_base = os.path.join(sdk_root, 'build-tools') + android_build_tools_base = os.path.join(sdk_root, "build-tools") version = sdk_version.build_tools_version if isdir(os.path.join(android_build_tools_base, version)): tools = os.path.join(android_build_tools_base, version) - for zipalign in ('zipalign', 'zipalign.exe'): + for zipalign in ("zipalign", "zipalign.exe"): if exists(os.path.join(tools, zipalign)): return [tools] - die("You must install the Android build-tools version %s. " - "Try |mach bootstrap|. (Looked for %s/%s)" % - (version, android_build_tools_base, version)) + die( + "You must install the Android build-tools version %s. " + "Try |mach bootstrap|. (Looked for %s/%s)" + % (version, android_build_tools_base, version) + ) @depends(android_sdk_root) -@checking('for Android tools') -@imports(_from='os.path', _import='isdir') +@checking("for Android tools") +@imports(_from="os.path", _import="isdir") def android_tools(sdk_root): - tools = os.path.join(sdk_root, 'tools') + tools = os.path.join(sdk_root, "tools") if isdir(tools): return tools @@ -74,45 +83,50 @@ def android_tools(sdk_root): @depends(android_sdk_root) -@checking('for Android platform-tools') -@imports(_from='os.path', _import='exists') -@imports(_from='os.path', _import='isdir') +@checking("for Android platform-tools") +@imports(_from="os.path", _import="exists") +@imports(_from="os.path", _import="isdir") def android_platform_tools(sdk_root): - tools = os.path.join(sdk_root, 'platform-tools') - for adb in ('adb', 'adb.exe'): + tools = os.path.join(sdk_root, "platform-tools") + for adb in ("adb", "adb.exe"): if exists(os.path.join(tools, adb)): return [tools] - die("You must install the Android platform-tools. Try |mach bootstrap|. (Looked for %s)" % - tools) + die( + "You must install the Android platform-tools. Try |mach bootstrap|. (Looked for %s)" + % tools + ) @depends(android_sdk_root) def android_emulator_path(sdk_root): - return [os.path.join(sdk_root, 'emulator')] + return [os.path.join(sdk_root, "emulator")] @template def check_android_tools(tool, tool_dir): - check = check_prog(tool.upper(), (tool, tool + '.exe'), paths=tool_dir, - allow_missing=True) + check = check_prog( + tool.upper(), (tool, tool + ".exe"), paths=tool_dir, allow_missing=True + ) @depends(check) def require_tool(result): if result is None: - die('The program %s was not found. Try |mach bootstrap|' % tool) + die("The program %s was not found. Try |mach bootstrap|" % tool) return result return require_tool -check_android_tools('zipalign', android_build_tools) -check_android_tools('adb', android_platform_tools) -check_android_tools('emulator', android_emulator_path) +check_android_tools("zipalign", android_build_tools) +check_android_tools("adb", android_platform_tools) +check_android_tools("emulator", android_emulator_path) -set_config('ANDROID_SDK_ROOT', android_sdk_root) -set_config('ANDROID_TOOLS', android_tools) +set_config("ANDROID_SDK_ROOT", android_sdk_root) +set_config("ANDROID_TOOLS", android_tools) -set_config('ANDROID_BUILD_TOOLS_VERSION', android_sdk_version.build_tools_version) -set_config('ANDROID_TARGET_SDK', android_sdk_version.target_sdk_version) -add_old_configure_assignment('ANDROID_TARGET_SDK', android_sdk_version.target_sdk_version) +set_config("ANDROID_BUILD_TOOLS_VERSION", android_sdk_version.build_tools_version) +set_config("ANDROID_TARGET_SDK", android_sdk_version.target_sdk_version) +add_old_configure_assignment( + "ANDROID_TARGET_SDK", android_sdk_version.target_sdk_version +) diff --git a/build/moz.configure/arm.configure b/build/moz.configure/arm.configure index d767fb8d99f730..2082fa640f498c 100644 --- a/build/moz.configure/arm.configure +++ b/build/moz.configure/arm.configure @@ -4,15 +4,16 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -@depends(target.os, '--help') + +@depends(target.os, "--help") def arm_option_defaults(os, _): - if os == 'Android': - arch = 'armv7-a' - thumb = 'yes' - fpu = 'neon' - float_abi = 'softfp' + if os == "Android": + arch = "armv7-a" + thumb = "yes" + fpu = "neon" + float_abi = "softfp" else: - arch = thumb = fpu = float_abi = 'toolchain-default' + arch = thumb = fpu = float_abi = "toolchain-default" return namespace( arch=arch, thumb=thumb, @@ -26,124 +27,154 @@ def arm_option_defaults(os, _): # default is always returned. The lint is fooled by this file being # conditional. If it weren't conditional, the lint wouldn't ask for '{|}' to # be there. -option('--with-arch', nargs=1, - default=arm_option_defaults.arch, - help='{Use specific CPU features (-march=type). Resets thumb, fpu, ' - 'float-abi, etc. defaults when set|}') +option( + "--with-arch", + nargs=1, + default=arm_option_defaults.arch, + help="{Use specific CPU features (-march=type). Resets thumb, fpu, " + "float-abi, etc. defaults when set|}", +) -@depends('--with-arch') +@depends("--with-arch") def arch_option(value): if value: - if value[0] != 'toolchain-default': - return ['-march={}'.format(value[0])] + if value[0] != "toolchain-default": + return ["-march={}".format(value[0])] return [] -option('--with-thumb', choices=('yes', 'no', 'toolchain-default'), - default=arm_option_defaults.thumb, - nargs='?', help='{Use Thumb instruction set (-mthumb)|}') +option( + "--with-thumb", + choices=("yes", "no", "toolchain-default"), + default=arm_option_defaults.thumb, + nargs="?", + help="{Use Thumb instruction set (-mthumb)|}", +) def normalize_arm_option(value): if value: if len(value): - if value[0] == 'yes': + if value[0] == "yes": return True - elif value[0] == 'no': + elif value[0] == "no": return False else: return value[0] return True return False -@depends('--with-thumb') + +@depends("--with-thumb") def thumb_option(value): value = normalize_arm_option(value) if value is True: - return ['-mthumb'] + return ["-mthumb"] if value is False: - return ['-marm'] + return ["-marm"] return [] -option('--with-thumb-interwork', choices=('yes', 'no', 'toolchain-default'), - default='toolchain-default', - nargs='?', help='Use Thumb/ARM instuctions interwork (-mthumb-interwork)') +option( + "--with-thumb-interwork", + choices=("yes", "no", "toolchain-default"), + default="toolchain-default", + nargs="?", + help="Use Thumb/ARM instuctions interwork (-mthumb-interwork)", +) -@depends('--with-thumb-interwork') +@depends("--with-thumb-interwork") def thumb_interwork_option(value): value = normalize_arm_option(value) if value is True: - return ['-mthumb-interwork'] + return ["-mthumb-interwork"] if value is False: - return ['-mno-thumb-interwork'] + return ["-mno-thumb-interwork"] return [] -option('--with-fpu', nargs=1, - default=arm_option_defaults.fpu, - help='{Use specific FPU type (-mfpu=type)|}') +option( + "--with-fpu", + nargs=1, + default=arm_option_defaults.fpu, + help="{Use specific FPU type (-mfpu=type)|}", +) -@depends('--with-fpu') +@depends("--with-fpu") def fpu_option(value): if value: - if value[0] != 'toolchain-default': - return ['-mfpu={}'.format(value[0])] + if value[0] != "toolchain-default": + return ["-mfpu={}".format(value[0])] return [] -option('--with-float-abi', nargs=1, - default=arm_option_defaults.float_abi, - help='{Use specific arm float ABI (-mfloat-abi=type)|}') +option( + "--with-float-abi", + nargs=1, + default=arm_option_defaults.float_abi, + help="{Use specific arm float ABI (-mfloat-abi=type)|}", +) -@depends('--with-float-abi') +@depends("--with-float-abi") def float_abi_option(value): if value: - if value[0] != 'toolchain-default': - return ['-mfloat-abi={}'.format(value[0])] + if value[0] != "toolchain-default": + return ["-mfloat-abi={}".format(value[0])] return [] -option('--with-soft-float', choices=('yes', 'no', 'toolchain-default'), - default='toolchain-default', - nargs='?', help='Use soft float library (-msoft-float)') +option( + "--with-soft-float", + choices=("yes", "no", "toolchain-default"), + default="toolchain-default", + nargs="?", + help="Use soft float library (-msoft-float)", +) -@depends('--with-soft-float') +@depends("--with-soft-float") def soft_float_option(value): value = normalize_arm_option(value) if value is True: - return ['-msoft-float'] + return ["-msoft-float"] if value is False: - return ['-mno-soft-float'] + return ["-mno-soft-float"] return [] -check_and_add_gcc_flag('-mno-unaligned-access', - when=depends(target.os)(lambda os: os == 'Android')) +check_and_add_gcc_flag( + "-mno-unaligned-access", when=depends(target.os)(lambda os: os == "Android") +) -@depends(arch_option, thumb_option, thumb_interwork_option, fpu_option, - float_abi_option, soft_float_option) +@depends( + arch_option, + thumb_option, + thumb_interwork_option, + fpu_option, + float_abi_option, + soft_float_option, +) def all_flags(arch, thumb, interwork, fpu, float_abi, soft_float): return arch + thumb + interwork + fpu + float_abi + soft_float -add_old_configure_assignment('_ARM_FLAGS', all_flags) -add_old_configure_assignment('_THUMB_FLAGS', thumb_option) +add_old_configure_assignment("_ARM_FLAGS", all_flags) +add_old_configure_assignment("_THUMB_FLAGS", thumb_option) @depends(c_compiler, all_flags) -@checking('ARM version support in compiler', lambda x: x.arm_arch) -@imports(_from='textwrap', _import='dedent') +@checking("ARM version support in compiler", lambda x: x.arm_arch) +@imports(_from="textwrap", _import="dedent") def arm_target(compiler, all_flags): # We're going to preprocess the following source to figure out some details # about the arm target options we have enabled. - source = dedent('''\ + source = dedent( + """\ %ARM_ARCH __ARM_ARCH #if __thumb2__ %THUMB2 yes @@ -172,54 +203,58 @@ def arm_target(compiler, all_flags): #elif __ARM_FPV5__ %FPU fp-armv8 #endif - ''') + """ + ) result = try_invoke_compiler( compiler.wrapper + [compiler.compiler] + compiler.flags, compiler.language, source, - ['-E'] + all_flags, + ["-E"] + all_flags, ) # Metadata emitted by preprocessors such as GCC with LANG=ja_JP.utf-8 may # have non-ASCII characters. Treat the output as bytearray. - data = {'fpu': None} # fpu may not get a value from the preprocessor. + data = {"fpu": None} # fpu may not get a value from the preprocessor. for line in result.splitlines(): - if line.startswith('%'): - k, _, v = line.partition(' ') - k = k.lstrip('%').lower() - if k == 'arm_arch': + if line.startswith("%"): + k, _, v = line.partition(" ") + k = k.lstrip("%").lower() + if k == "arm_arch": data[k] = int(v) else: data[k] = { - 'yes': True, - 'no': False, + "yes": True, + "no": False, }.get(v, v) - log.debug('%s = %s', k, data[k]) + log.debug("%s = %s", k, data[k]) return namespace(**data) -@depends(arm_target.arm_arch, when=depends(target.os)(lambda os: os == 'Android')) +@depends(arm_target.arm_arch, when=depends(target.os)(lambda os: os == "Android")) def armv7(arch): if arch < 7: - die('Android/armv6 and earlier are not supported') + die("Android/armv6 and earlier are not supported") -set_config('MOZ_THUMB2', True, when=arm_target.thumb2) -set_define('MOZ_THUMB2', True, when=arm_target.thumb2) -add_old_configure_assignment('MOZ_THUMB2', True, when=arm_target.thumb2) +set_config("MOZ_THUMB2", True, when=arm_target.thumb2) +set_define("MOZ_THUMB2", True, when=arm_target.thumb2) +add_old_configure_assignment("MOZ_THUMB2", True, when=arm_target.thumb2) -have_arm_simd = c_compiler.try_compile(body='asm("uqadd8 r1, r1, r2");', - check_msg='for ARM SIMD support in compiler') +have_arm_simd = c_compiler.try_compile( + body='asm("uqadd8 r1, r1, r2");', check_msg="for ARM SIMD support in compiler" +) -set_config('HAVE_ARM_SIMD', have_arm_simd) -set_define('HAVE_ARM_SIMD', have_arm_simd) +set_config("HAVE_ARM_SIMD", have_arm_simd) +set_define("HAVE_ARM_SIMD", have_arm_simd) -have_arm_neon = c_compiler.try_compile(body='asm(".fpu neon\\n vadd.i8 d0, d0, d0");', - check_msg='for ARM NEON support in compiler') +have_arm_neon = c_compiler.try_compile( + body='asm(".fpu neon\\n vadd.i8 d0, d0, d0");', + check_msg="for ARM NEON support in compiler", +) -set_config('HAVE_ARM_NEON', have_arm_neon) -set_define('HAVE_ARM_NEON', have_arm_neon) +set_config("HAVE_ARM_NEON", have_arm_neon) +set_define("HAVE_ARM_NEON", have_arm_neon) # We don't need to build NEON support if we're targetting a non-NEON device. @@ -229,13 +264,13 @@ def build_arm_neon(arm_arch): return arm_arch >= 7 -set_config('BUILD_ARM_NEON', build_arm_neon) -set_define('BUILD_ARM_NEON', build_arm_neon) +set_config("BUILD_ARM_NEON", build_arm_neon) +set_define("BUILD_ARM_NEON", build_arm_neon) -set_config('ARM_ARCH', depends(arm_target.arm_arch)(lambda x: str(x))) -add_old_configure_assignment('ARM_ARCH', depends(arm_target.arm_arch)(lambda x: str(x))) -set_config('MOZ_FPU', arm_target.fpu) +set_config("ARM_ARCH", depends(arm_target.arm_arch)(lambda x: str(x))) +add_old_configure_assignment("ARM_ARCH", depends(arm_target.arm_arch)(lambda x: str(x))) +set_config("MOZ_FPU", arm_target.fpu) @depends(arm_target.float_abi) @@ -248,10 +283,10 @@ def neon_flags(float_abi): # we can safely mix code built with both ABIs. So, if we detect # that compiling uses the "softfloat" ABI, force the use of the # "softfp" ABI instead. - flags = ['-mfpu=neon'] - if float_abi == 'soft': - flags.append('-mfloat-abi=softfp') + flags = ["-mfpu=neon"] + if float_abi == "soft": + flags.append("-mfloat-abi=softfp") return tuple(flags) -set_config('NEON_FLAGS', neon_flags) +set_config("NEON_FLAGS", neon_flags) diff --git a/build/moz.configure/bindgen.configure b/build/moz.configure/bindgen.configure index 79e1c3b14dcb59..da8a5d57af1db9 100644 --- a/build/moz.configure/bindgen.configure +++ b/build/moz.configure/bindgen.configure @@ -4,45 +4,52 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -@depends(build_project, '--enable-smoosh') + +@depends(build_project, "--enable-smoosh") def cbindgen_is_needed(build_project, js_enable_smoosh): - if build_project != 'js': + if build_project != "js": # cbindgen is needed by the style system build and webrender. return True # cbindgen is needed by SmooshMonkey. return js_enable_smoosh -option(env='CBINDGEN', nargs=1, when=cbindgen_is_needed, - help='Path to cbindgen') + +option(env="CBINDGEN", nargs=1, when=cbindgen_is_needed, help="Path to cbindgen") -@imports(_from='textwrap', _import='dedent') +@imports(_from="textwrap", _import="dedent") def check_cbindgen_version(cbindgen, fatal=False): log.debug("trying cbindgen: %s" % cbindgen) - cbindgen_min_version = Version('0.15.0') + cbindgen_min_version = Version("0.15.0") # cbindgen x.y.z - version = Version(check_cmd_output(cbindgen, '--version').strip().split(" ")[1]) + version = Version(check_cmd_output(cbindgen, "--version").strip().split(" ")[1]) log.debug("%s has version %s" % (cbindgen, version)) if version >= cbindgen_min_version: return True if not fatal: return False - die(dedent('''\ + die( + dedent( + """\ cbindgen version {} is too old. At least version {} is required. Please update using 'cargo install cbindgen --force' or running './mach bootstrap', after removing the existing executable located at {}. - '''.format(version, cbindgen_min_version, cbindgen))) + """.format( + version, cbindgen_min_version, cbindgen + ) + ) + ) -@depends_if('CBINDGEN', toolchain_search_path, when=cbindgen_is_needed) -@checking('for cbindgen') -@imports(_from='textwrap', _import='dedent') +@depends_if("CBINDGEN", toolchain_search_path, when=cbindgen_is_needed) +@checking("for cbindgen") +@imports(_from="textwrap", _import="dedent") def cbindgen(cbindgen_override, toolchain_search_path): if cbindgen_override: check_cbindgen_version(cbindgen_override[0], fatal=True) @@ -50,7 +57,7 @@ def cbindgen(cbindgen_override, toolchain_search_path): candidates = [] for path in toolchain_search_path: - candidate = find_program('cbindgen', [path]) + candidate = find_program("cbindgen", [path]) if not candidate: continue if check_cbindgen_version(candidate): @@ -58,104 +65,136 @@ def cbindgen(cbindgen_override, toolchain_search_path): candidates.append(candidate) if not candidates: - raise FatalCheckError(dedent('''\ + raise FatalCheckError( + dedent( + """\ Cannot find cbindgen. Please run `mach bootstrap`, `cargo install cbindgen`, ensure that `cbindgen` is on your PATH, or point at an executable with `CBINDGEN`. - ''')) + """ + ) + ) check_cbindgen_version(candidates[0], fatal=True) -set_config('CBINDGEN', cbindgen) - -# Bindgen can use rustfmt to format Rust file, but it's not required. -option(env='RUSTFMT', nargs=1, help='Path to the rustfmt program') - -rustfmt = check_prog('RUSTFMT', ['rustfmt'], paths=toolchain_search_path, - input='RUSTFMT', allow_missing=True) - -option('--with-libclang-path', nargs=1, - help='Absolute path to a directory containing Clang/LLVM libraries for bindgen (version 3.9.x or above)') -option('--with-clang-path', nargs=1, - help='Absolute path to a Clang binary for bindgen (version 3.9.x or above)') +set_config("CBINDGEN", cbindgen) - -@depends('--with-clang-path', c_compiler, cxx_compiler, toolchain_search_path, - target, macos_sdk) -@checking('for clang for bindgen', lambda x: x.path if x else 'not found') -def bindgen_clang_compiler(clang_path, c_compiler, cxx_compiler, - toolchain_search_path, target, macos_sdk): +# Bindgen can use rustfmt to format Rust file, but it's not required. +option(env="RUSTFMT", nargs=1, help="Path to the rustfmt program") + +rustfmt = check_prog( + "RUSTFMT", + ["rustfmt"], + paths=toolchain_search_path, + input="RUSTFMT", + allow_missing=True, +) + + +option( + "--with-libclang-path", + nargs=1, + help="Absolute path to a directory containing Clang/LLVM libraries for bindgen (version 3.9.x or above)", +) +option( + "--with-clang-path", + nargs=1, + help="Absolute path to a Clang binary for bindgen (version 3.9.x or above)", +) + + +@depends( + "--with-clang-path", + c_compiler, + cxx_compiler, + toolchain_search_path, + target, + macos_sdk, +) +@checking("for clang for bindgen", lambda x: x.path if x else "not found") +def bindgen_clang_compiler( + clang_path, c_compiler, cxx_compiler, toolchain_search_path, target, macos_sdk +): # When the target compiler is clang, use that, including flags. - if cxx_compiler.type == 'clang': - if clang_path and clang_path[0] not in (c_compiler.compiler, - cxx_compiler.compiler): - die('--with-clang-path is not valid when the target compiler is %s', - cxx_compiler.type) + if cxx_compiler.type == "clang": + if clang_path and clang_path[0] not in ( + c_compiler.compiler, + cxx_compiler.compiler, + ): + die( + "--with-clang-path is not valid when the target compiler is %s", + cxx_compiler.type, + ) return namespace( path=cxx_compiler.compiler, flags=cxx_compiler.flags, ) # When the target compiler is clang-cl, use clang in the same directory, # and figure the right flags to use. - if cxx_compiler.type == 'clang-cl': - if clang_path and os.path.dirname(clang_path[0]) != \ - os.path.dirname(cxx_compiler.compiler): - die('--with-clang-path must point to clang in the same directory ' - 'as the target compiler') + if cxx_compiler.type == "clang-cl": + if clang_path and os.path.dirname(clang_path[0]) != os.path.dirname( + cxx_compiler.compiler + ): + die( + "--with-clang-path must point to clang in the same directory " + "as the target compiler" + ) if not clang_path: - clang_path = [os.path.join(os.path.dirname(cxx_compiler.compiler), - 'clang')] + clang_path = [os.path.join(os.path.dirname(cxx_compiler.compiler), "clang")] - clang_path = find_program(clang_path[0] if clang_path else 'clang++', - toolchain_search_path) + clang_path = find_program( + clang_path[0] if clang_path else "clang++", toolchain_search_path + ) if not clang_path: return # Hack before bug 1617793: if the compiler is clang-cl, hack the target - if cxx_compiler.type == 'clang-cl': - target = split_triplet('%s-pc-windows-msvc' % target.raw_cpu, allow_msvc=True) + if cxx_compiler.type == "clang-cl": + target = split_triplet("%s-pc-windows-msvc" % target.raw_cpu, allow_msvc=True) flags = prepare_flags(target, macos_sdk) - info = check_compiler([clang_path] + flags, 'C++', target) + info = check_compiler([clang_path] + flags, "C++", target) return namespace( path=clang_path, flags=flags + info.flags, ) -@depends('--with-libclang-path', bindgen_clang_compiler, - host_library_name_info, host) -@checking('for libclang for bindgen', lambda x: x if x else 'not found') -@imports('glob') -@imports(_from='os', _import='pathsep') -@imports(_from='os.path', _import='split', _as='pathsplit') -@imports('re') +@depends("--with-libclang-path", bindgen_clang_compiler, host_library_name_info, host) +@checking("for libclang for bindgen", lambda x: x if x else "not found") +@imports("glob") +@imports(_from="os", _import="pathsep") +@imports(_from="os.path", _import="split", _as="pathsplit") +@imports("re") def bindgen_libclang_path(libclang_path, clang, library_name_info, host): if not clang: if libclang_path: - die('--with-libclang-path is not valid without a clang compiler ' - 'for bindgen') + die( + "--with-libclang-path is not valid without a clang compiler " + "for bindgen" + ) return # Try to ensure that the clang shared library that bindgen is going # to look for is actually present. The files that we search for # mirror the logic in clang-sys/build.rs. libclang_choices = [] - if host.os == 'WINNT': - libclang_choices.append('libclang.dll') - libclang_choices.append('%sclang%s' % (library_name_info.dll.prefix, - library_name_info.dll.suffix)) - if host.kernel == 'Linux': - libclang_choices.append('libclang.so.*') + if host.os == "WINNT": + libclang_choices.append("libclang.dll") + libclang_choices.append( + "%sclang%s" % (library_name_info.dll.prefix, library_name_info.dll.suffix) + ) + if host.kernel == "Linux": + libclang_choices.append("libclang.so.*") - if host.os == 'OpenBSD': - libclang_choices.append('libclang.so.*.*') + if host.os == "OpenBSD": + libclang_choices.append("libclang.so.*.*") candidates = [] if not libclang_path: # Try to find libclang_path based on clang search dirs. - clang_search_dirs = check_cmd_output(clang.path, '-print-search-dirs') + clang_search_dirs = check_cmd_output(clang.path, "-print-search-dirs") for line in clang_search_dirs.splitlines(): - name, _, value = line.partition(': =') - if host.os == 'WINNT' and name == 'programs': + name, _, value = line.partition(": =") + if host.os == "WINNT" and name == "programs": # On Windows, libclang.dll is in bin/ rather than lib/, # so scan the programs search dirs. # To make matters complicated, clang before version 9 uses `:` @@ -163,22 +202,22 @@ def bindgen_libclang_path(libclang_path, clang, library_name_info, host): if pathsep in value: candidates.extend(value.split(pathsep)) else: - for part in value.split(':'): + for part in value.split(":"): # Assume that if previous "candidate" was of length 1, # it's a drive letter and the current part is the rest of # the corresponding full path. if candidates and len(candidates[-1]) == 1: - candidates[-1] += ':' + part + candidates[-1] += ":" + part else: candidates.append(part) - elif host.os != 'WINNT' and name == 'libraries': + elif host.os != "WINNT" and name == "libraries": # On other platforms, use the directories from the libraries # search dirs that looks like $something/clang/$version. for dir in value.split(pathsep): dir, version = pathsplit(dir) - if re.match(r'[0-9.]+', version): + if re.match(r"[0-9.]+", version): dir, name = pathsplit(dir) - if name == 'clang': + if name == "clang": candidates.append(dir) else: candidates.append(libclang_path[0]) @@ -198,16 +237,20 @@ def bindgen_config_paths(clang, libclang, build_project): # Actually, we don't want to force an error if we're not building the # browser generally. We therefore whitelist the projects that require # bindgen facilities at this point and leave it at that. - if build_project in ('browser', 'mobile/android'): + if build_project in ("browser", "mobile/android"): if not clang: - die('Could not find clang to generate run bindings for C/C++. ' - 'Please install the necessary packages, run `mach bootstrap`, ' - 'or use --with-clang-path to give the location of clang.') + die( + "Could not find clang to generate run bindings for C/C++. " + "Please install the necessary packages, run `mach bootstrap`, " + "or use --with-clang-path to give the location of clang." + ) if not libclang: - die('Could not find libclang to generate rust bindings for C/C++. ' - 'Please install the necessary packages, run `mach bootstrap`, ' - 'or use --with-libclang-path to give the path containing it.') + die( + "Could not find libclang to generate rust bindings for C/C++. " + "Please install the necessary packages, run `mach bootstrap`, " + "or use --with-libclang-path to give the path containing it." + ) if clang and libclang: return namespace( @@ -219,9 +262,9 @@ def bindgen_config_paths(clang, libclang, build_project): @depends(bindgen_config_paths.libclang, when=bindgen_config_paths) -@checking('that libclang is new enough', lambda s: 'yes' if s else 'no') -@imports(_from='ctypes', _import='CDLL') -@imports(_from='textwrap', _import='dedent') +@checking("that libclang is new enough", lambda s: "yes" if s else "no") +@imports(_from="ctypes", _import="CDLL") +@imports(_from="textwrap", _import="dedent") def min_libclang_version(libclang): try: lib = CDLL(libclang) @@ -230,77 +273,94 @@ def min_libclang_version(libclang): fun = lib.clang_getAddressSpace return True except: - die(dedent('''\ + die( + dedent( + """\ The libclang located at {} is too old (need at least 5.0). Please make sure to update it or point to a newer libclang using --with-libclang-path. - '''.format(libclang))) + """.format( + libclang + ) + ) + ) return False -set_config('MOZ_LIBCLANG_PATH', bindgen_config_paths.libclang_path) -set_config('MOZ_CLANG_PATH', bindgen_config_paths.clang_path) +set_config("MOZ_LIBCLANG_PATH", bindgen_config_paths.libclang_path) +set_config("MOZ_CLANG_PATH", bindgen_config_paths.clang_path) -@depends(target, target_is_unix, cxx_compiler, bindgen_cflags_android, - bindgen_config_paths.clang_flags) -def basic_bindgen_cflags(target, is_unix, compiler_info, android_cflags, - clang_flags): +@depends( + target, + target_is_unix, + cxx_compiler, + bindgen_cflags_android, + bindgen_config_paths.clang_flags, +) +def basic_bindgen_cflags(target, is_unix, compiler_info, android_cflags, clang_flags): args = [ - '-x', 'c++', '-fno-sized-deallocation', '-fno-aligned-new', - '-DTRACING=1', '-DIMPL_LIBXUL', '-DMOZILLA_INTERNAL_API', - '-DRUST_BINDGEN' + "-x", + "c++", + "-fno-sized-deallocation", + "-fno-aligned-new", + "-DTRACING=1", + "-DIMPL_LIBXUL", + "-DMOZILLA_INTERNAL_API", + "-DRUST_BINDGEN", ] if is_unix: - args += ['-DOS_POSIX=1'] + args += ["-DOS_POSIX=1"] - if target.os == 'Android': + if target.os == "Android": args += android_cflags args += { - 'Android': ['-DOS_ANDROID=1'], - 'DragonFly': ['-DOS_BSD=1', '-DOS_DRAGONFLY=1'], - 'FreeBSD': ['-DOS_BSD=1', '-DOS_FREEBSD=1'], - 'GNU': ['-DOS_LINUX=1'], - 'NetBSD': ['-DOS_BSD=1', '-DOS_NETBSD=1'], - 'OpenBSD': ['-DOS_BSD=1', '-DOS_OPENBSD=1'], - 'OSX': ['-DOS_MACOSX=1', '-stdlib=libc++'], - 'SunOS': ['-DOS_SOLARIS=1'], - 'WINNT': [ - '-DOS_WIN=1', - '-DWIN32=1', + "Android": ["-DOS_ANDROID=1"], + "DragonFly": ["-DOS_BSD=1", "-DOS_DRAGONFLY=1"], + "FreeBSD": ["-DOS_BSD=1", "-DOS_FREEBSD=1"], + "GNU": ["-DOS_LINUX=1"], + "NetBSD": ["-DOS_BSD=1", "-DOS_NETBSD=1"], + "OpenBSD": ["-DOS_BSD=1", "-DOS_OPENBSD=1"], + "OSX": ["-DOS_MACOSX=1", "-stdlib=libc++"], + "SunOS": ["-DOS_SOLARIS=1"], + "WINNT": [ + "-DOS_WIN=1", + "-DWIN32=1", ], }.get(target.os, []) - if compiler_info.type == 'clang-cl': + if compiler_info.type == "clang-cl": args += [ # To enable the builtin __builtin_offsetof so that CRT wouldn't # use reinterpret_cast in offsetof() which is not allowed inside # static_assert(). - '-D_CRT_USE_BUILTIN_OFFSETOF', + "-D_CRT_USE_BUILTIN_OFFSETOF", # Enable hidden attribute (which is not supported by MSVC and # thus not enabled by default with a MSVC-compatibile build) # to exclude hidden symbols from the generated file. - '-DHAVE_VISIBILITY_HIDDEN_ATTRIBUTE=1', + "-DHAVE_VISIBILITY_HIDDEN_ATTRIBUTE=1", ] return args + (clang_flags or []) -option(env='BINDGEN_CFLAGS', - nargs=1, - help='Options bindgen should pass to the C/C++ parser') +option( + env="BINDGEN_CFLAGS", + nargs=1, + help="Options bindgen should pass to the C/C++ parser", +) -@depends(basic_bindgen_cflags, 'BINDGEN_CFLAGS') -@checking('bindgen cflags', lambda s: s if s else 'no') +@depends(basic_bindgen_cflags, "BINDGEN_CFLAGS") +@checking("bindgen cflags", lambda s: s if s else "no") def bindgen_cflags(base_flags, extra_flags): flags = base_flags if extra_flags and len(extra_flags): flags += extra_flags[0].split() - return ' '.join(flags) + return " ".join(flags) -add_old_configure_assignment('_BINDGEN_CFLAGS', bindgen_cflags) +add_old_configure_assignment("_BINDGEN_CFLAGS", bindgen_cflags) diff --git a/build/moz.configure/checks.configure b/build/moz.configure/checks.configure index 5bb026dfdfa110..cfff05933a5a86 100644 --- a/build/moz.configure/checks.configure +++ b/build/moz.configure/checks.configure @@ -14,13 +14,14 @@ @template -@imports(_from='__builtin__', _import='Exception') +@imports(_from="__builtin__", _import="Exception") def _declare_exceptions(): class FatalCheckError(Exception): - '''An exception to throw from a function decorated with @checking. + """An exception to throw from a function decorated with @checking. It will result in calling die() with the given message. Debugging messages emitted from the decorated function will also be - printed out.''' + printed out.""" + return (FatalCheckError,) @@ -51,7 +52,7 @@ del _declare_exceptions def checking(what, callback=None): def decorator(func): def wrapped(*args, **kwargs): - log.info('checking %s... ', what) + log.info("checking %s... ", what) with log.queue_debug(): error, ret = None, None try: @@ -60,15 +61,17 @@ def checking(what, callback=None): error = str(e) display_ret = callback(ret) if callback else ret if display_ret is True: - log.info('yes') + log.info("yes") elif display_ret is False or display_ret is None: - log.info('no') + log.info("no") else: log.info(display_ret) if error is not None: die(error) return ret + return wrapped + return decorator @@ -101,9 +104,17 @@ def checking(what, callback=None): # it can find. If PROG is already set from the environment or command line, # use that value instead. @template -@imports(_from='mozbuild.shellutil', _import='quote') -def check_prog(var, progs, what=None, input=None, allow_missing=False, - paths=None, paths_have_priority=False, when=None): +@imports(_from="mozbuild.shellutil", _import="quote") +def check_prog( + var, + progs, + what=None, + input=None, + allow_missing=False, + paths=None, + paths_have_priority=False, + when=None, +): if input is not None: # Wrap input with type checking and normalization. @depends(input, when=when) @@ -114,11 +125,18 @@ def check_prog(var, progs, what=None, input=None, allow_missing=False, return (value,) if isinstance(value, (tuple, list)) and len(value) == 1: return value - configure_error('input must resolve to a tuple or a list with a ' - 'single element, or a string') + configure_error( + "input must resolve to a tuple or a list with a " + "single element, or a string" + ) + else: - option(env=var, nargs=1, when=when, - help='Path to %s' % (what or 'the %s program' % var.lower())) + option( + env=var, + nargs=1, + when=when, + help="Path to %s" % (what or "the %s program" % var.lower()), + ) input = var what = what or var.lower() @@ -138,12 +156,12 @@ def check_prog(var, progs, what=None, input=None, allow_missing=False, progs = () if not isinstance(progs, (tuple, list)): - configure_error('progs must resolve to a list or tuple!') + configure_error("progs must resolve to a list or tuple!") return namespace(value=input, progs=progs, paths=paths) @depends(inputs, allow_missing, when=inputs) - @checking('for %s' % what, lambda x: quote(x) if x else 'not found') + @checking("for %s" % what, lambda x: quote(x) if x else "not found") def check(inputs, allow_missing): value = inputs.value progs = inputs.progs @@ -152,19 +170,19 @@ def check_prog(var, progs, what=None, input=None, allow_missing=False, if paths_have_priority: for path in paths: for prog in value or progs: - log.debug('%s: Trying %s', var.lower(), quote(prog)) + log.debug("%s: Trying %s", var.lower(), quote(prog)) result = find_program(prog, [path]) if result: return result else: for prog in value or progs: - log.debug('%s: Trying %s', var.lower(), quote(prog)) + log.debug("%s: Trying %s", var.lower(), quote(prog)) result = find_program(prog, paths) if result: return result if not allow_missing or value: - raise FatalCheckError('Cannot find %s' % what) + raise FatalCheckError("Cannot find %s" % what) set_config(var, check) diff --git a/build/moz.configure/compile-checks.configure b/build/moz.configure/compile-checks.configure index b795c23c58d2a6..25e4f80bf5b7fd 100755 --- a/build/moz.configure/compile-checks.configure +++ b/build/moz.configure/compile-checks.configure @@ -18,14 +18,23 @@ # - `check_msg` is the message to be printed to accompany compiling the test # program. @template -def try_compile(includes=None, body='', language='C++', flags=None, check_msg=None, - when=None, onerror=lambda: None): +def try_compile( + includes=None, + body="", + language="C++", + flags=None, + check_msg=None, + when=None, + onerror=lambda: None, +): compiler = { - 'C': c_compiler, - 'C++': cxx_compiler, + "C": c_compiler, + "C++": cxx_compiler, }[language] - return compiler.try_compile(includes, body, flags, check_msg, when=when, onerror=onerror) + return compiler.try_compile( + includes, body, flags, check_msg, when=when, onerror=onerror + ) # Checks for the presence of the given header on the target system by compiling @@ -44,8 +53,9 @@ def try_compile(includes=None, body='', language='C++', flags=None, check_msg=No # - `when` is a depends function that if present will make performing the check # conditional on the value of that function. @template -def check_header(header, language='C++', flags=None, includes=None, when=None, - onerror=lambda: None): +def check_header( + header, language="C++", flags=None, includes=None, when=None, onerror=lambda: None +): if when is None: when = always @@ -55,15 +65,21 @@ def check_header(header, language='C++', flags=None, includes=None, when=None, includes = [] includes.append(header) - have_header = try_compile(includes=includes, language=language, flags=flags, - check_msg='for %s' % header, when=when, onerror=onerror) - header_var = 'HAVE_%s' % (header.upper() - .replace('-', '_') - .replace('/', '_') - .replace('.', '_')) + have_header = try_compile( + includes=includes, + language=language, + flags=flags, + check_msg="for %s" % header, + when=when, + onerror=onerror, + ) + header_var = "HAVE_%s" % ( + header.upper().replace("-", "_").replace("/", "_").replace(".", "_") + ) set_define(header_var, have_header) return have_header + # A convenience wrapper for check_header for checking multiple headers. # returns an array of the resulting checks in order corresponding to the # provided headers. @@ -83,28 +99,28 @@ def check_headers(*headers, **kwargs): # a test program. The return value of the template is a check function # returning True if the symbol can be found, and None if it is not. @template -def check_symbol(symbol, language='C', flags=None, when=None, onerror=lambda: None): +def check_symbol(symbol, language="C", flags=None, when=None, onerror=lambda: None): if when is None: when = always compiler = { - 'C': c_compiler, - 'C++': cxx_compiler, + "C": c_compiler, + "C++": cxx_compiler, }[language] # Stolen from autoconf 2.13 ; might be irrelevant now, but it doesn't hurt to # keep using a char return type. comment = [ - '/* Override any gcc2 internal prototype to avoid an error. */', - '/* We use char because int might match the return type of a gcc2', - ' builtin and then its argument prototype would still apply. */', + "/* Override any gcc2 internal prototype to avoid an error. */", + "/* We use char because int might match the return type of a gcc2", + " builtin and then its argument prototype would still apply. */", ] return compiler.try_run( - header=comment + ['char %s();' % symbol], - body='%s();' % symbol, + header=comment + ["char %s();" % symbol], + body="%s();" % symbol, flags=flags, - check_msg='for %s' % symbol, + check_msg="for %s" % symbol, when=when, onerror=onerror, ) @@ -126,8 +142,9 @@ def check_symbol(symbol, language='C', flags=None, when=None, onerror=lambda: No # - `check`, when not set, skips checking whether the flag is supported and # adds it to the list of flags unconditionally. @template -def check_and_add_flags(flag, flags_collection, test_flags, - compiler=None, when=None, check=True): +def check_and_add_flags( + flag, flags_collection, test_flags, compiler=None, when=None, check=True +): if compiler is not None: compilers = (compiler,) else: @@ -144,32 +161,34 @@ def check_and_add_flags(flag, flags_collection, test_flags, flags = [flag] for c in compilers: - assert c in {c_compiler, cxx_compiler, - host_c_compiler, host_cxx_compiler} + assert c in {c_compiler, cxx_compiler, host_c_compiler, host_cxx_compiler} lang, list_of_flags = { - c_compiler: ('C', flags_collection.cflags), - cxx_compiler: ('C++', flags_collection.cxxflags), - host_c_compiler: ('host C', flags_collection.host_cflags), - host_cxx_compiler: ('host C++', flags_collection.host_cxxflags), + c_compiler: ("C", flags_collection.cflags), + cxx_compiler: ("C++", flags_collection.cxxflags), + host_c_compiler: ("host C", flags_collection.host_cflags), + host_cxx_compiler: ("host C++", flags_collection.host_cxxflags), }[c] @depends(c, when) def result(c, when): - if when and c.type in ('clang', 'gcc'): + if when and c.type in ("clang", "gcc"): return True if check: + @depends(c, dependable(flags)) def flags(c, flags): # Don't error out just because clang complains about other things. - if c.type == 'clang': - flags += ['-Wno-error=unused-command-line-argument'] + if c.type == "clang": + flags += ["-Wno-error=unused-command-line-argument"] return flags result = c.try_compile( - flags=flags, when=result, - check_msg='whether the %s compiler supports %s' % (lang, flag)) + flags=flags, + when=result, + check_msg="whether the %s compiler supports %s" % (lang, flag), + ) @depends(result, list_of_flags) def maybe_add_flag(result, list_of_flags): @@ -204,15 +223,16 @@ def check_and_add_gcc_warning(warning, compiler=None, when=None, check=True): # warning option like -Wno-foobar. So when we are checking for support # of a negated warning option, we actually test the positive form, but # add the negated form to the flags variable. - if warning.startswith('-Wno-') and not warning.startswith('-Wno-error='): - flags = ['-Werror', '-W' + warning[5:]] - elif warning.startswith('-Werror='): + if warning.startswith("-Wno-") and not warning.startswith("-Wno-error="): + flags = ["-Werror", "-W" + warning[5:]] + elif warning.startswith("-Werror="): flags = [warning] else: - flags = ['-Werror', warning] + flags = ["-Werror", warning] - return check_and_add_flags(warning, warnings_flags, flags, - compiler=compiler, when=when, check=check) + return check_and_add_flags( + warning, warnings_flags, flags, compiler=compiler, when=when, check=check + ) # Add the given warning to the list of warning flags for the build. @@ -248,10 +268,11 @@ def compilation_flags(): # add_gcc_flag(). @template def check_and_add_gcc_flag(flag, compiler=None, when=None, check=True): - flags = ['-Werror', flag] + flags = ["-Werror", flag] - return check_and_add_flags(flag, compilation_flags, flags, - compiler=compiler, when=when, check=check) + return check_and_add_flags( + flag, compilation_flags, flags, compiler=compiler, when=when, check=check + ) # Add the given flag to the list of flags for the build. diff --git a/build/moz.configure/compilers-util.configure b/build/moz.configure/compilers-util.configure index 501855c5c84198..1d8930347f58ef 100644 --- a/build/moz.configure/compilers-util.configure +++ b/build/moz.configure/compilers-util.configure @@ -6,8 +6,8 @@ @template -@imports('textwrap') -@imports(_from='mozbuild.configure', _import='SandboxDependsFunction') +@imports("textwrap") +@imports(_from="mozbuild.configure", _import="SandboxDependsFunction") def compiler_class(compiler, host_or_target): is_target = host_or_target is target @@ -24,22 +24,34 @@ def compiler_class(compiler, host_or_target): # `-c`. # - `check_msg` is the message to be printed to accompany compiling the # test program. - def try_compile(self, includes=None, body='', flags=None, - check_msg=None, when=None, onerror=lambda: None): + def try_compile( + self, + includes=None, + body="", + flags=None, + check_msg=None, + when=None, + onerror=lambda: None, + ): @depends(dependable(flags)) def flags(flags): flags = list(flags or []) - flags.append('-c') + flags.append("-c") return flags @depends(dependable(includes)) def header(includes): includes = includes or [] - return ['#include <%s>' % f for f in includes] + return ["#include <%s>" % f for f in includes] return self.try_run( - header=header, body=body, flags=flags, check_msg=check_msg, - when=when, onerror=onerror) + header=header, + body=body, + flags=flags, + check_msg=check_msg, + when=when, + onerror=onerror, + ) # Generates a test program and run the compiler against it. In case of # failure, the resulting check will return None. @@ -52,9 +64,17 @@ def compiler_class(compiler, host_or_target): # - `check_msg` is the message to be printed to accompany compiling the # test program. # - `onerror` is a function called when the check fails. - def try_run(self, header=None, body='', flags=None, - check_msg=None, when=None, onerror=lambda: None): - source = textwrap.dedent('''\ + def try_run( + self, + header=None, + body="", + flags=None, + check_msg=None, + when=None, + onerror=lambda: None, + ): + source = textwrap.dedent( + """\ int main(void) { @@ -62,35 +82,51 @@ def compiler_class(compiler, host_or_target): ; return 0; } - ''' % body) + """ + % body + ) if check_msg: + def checking_fn(fn): return checking(check_msg)(fn) + else: + def checking_fn(fn): return fn - @depends(self, dependable(flags), extra_toolchain_flags, - stlport_cppflags, dependable(header), when=when) + @depends( + self, + dependable(flags), + extra_toolchain_flags, + stlport_cppflags, + dependable(header), + when=when, + ) @checking_fn def func(compiler, flags, extra_flags, stlport_flags, header): flags = list(flags or []) if is_target: flags += extra_flags or [] - if compiler.language == 'C++': + if compiler.language == "C++": flags += stlport_flags or [] - header = header or '' + header = header or "" if isinstance(header, (list, tuple)): - header = '\n'.join(header) + header = "\n".join(header) if header: - header += '\n' + header += "\n" - if try_invoke_compiler( - compiler.wrapper + - [compiler.compiler] + compiler.flags, - compiler.language, header + source, flags, - onerror=onerror) is not None: + if ( + try_invoke_compiler( + compiler.wrapper + [compiler.compiler] + compiler.flags, + compiler.language, + header + source, + flags, + onerror=onerror, + ) + is not None + ): return True return func diff --git a/build/moz.configure/flags.configure b/build/moz.configure/flags.configure index a499470e554218..e8bf6157b743f0 100644 --- a/build/moz.configure/flags.configure +++ b/build/moz.configure/flags.configure @@ -6,60 +6,73 @@ # We support C++14, but we don't want to enable the sized deallocation # facilities in C++14 yet. -check_and_add_gcc_flag('-fno-sized-deallocation', compiler=cxx_compiler) +check_and_add_gcc_flag("-fno-sized-deallocation", compiler=cxx_compiler) # Likewise for C++17 and aligned allocation. It's not immediately obvious # from the clang and GCC documentation, but they both support this. -check_and_add_gcc_flag('-fno-aligned-new', compiler=cxx_compiler) +check_and_add_gcc_flag("-fno-aligned-new", compiler=cxx_compiler) # Please keep these last in this file. -add_old_configure_assignment('_COMPILATION_CFLAGS', compilation_flags.cflags) +add_old_configure_assignment("_COMPILATION_CFLAGS", compilation_flags.cflags) +add_old_configure_assignment("_COMPILATION_CXXFLAGS", compilation_flags.cxxflags) +add_old_configure_assignment("_COMPILATION_HOST_CFLAGS", compilation_flags.host_cflags) add_old_configure_assignment( - '_COMPILATION_CXXFLAGS', compilation_flags.cxxflags) -add_old_configure_assignment( - '_COMPILATION_HOST_CFLAGS', compilation_flags.host_cflags) -add_old_configure_assignment( - '_COMPILATION_HOST_CXXFLAGS', compilation_flags.host_cxxflags) + "_COMPILATION_HOST_CXXFLAGS", compilation_flags.host_cxxflags +) @depends(rust_compile_flags, rust_warning_flags) def rust_flags(compile_flags, warning_flags): return compile_flags + warning_flags -set_config('MOZ_RUST_DEFAULT_FLAGS', rust_flags) + +set_config("MOZ_RUST_DEFAULT_FLAGS", rust_flags) -option('--disable-new-pass-manager', - help='Use the legacy LLVM pass manager in clang builds') +option( + "--disable-new-pass-manager", + help="Use the legacy LLVM pass manager in clang builds", +) -@depends('--enable-new-pass-manager', c_compiler, host, target, 'MOZ_PGO', - lto.enabled, enable_fuzzing, ubsan) -def new_pass_manager_flags(enabled, compiler, host, target, pgo, lto, - enable_fuzzing, ubsan): - if host.os == 'OSX': + +@depends( + "--enable-new-pass-manager", + c_compiler, + host, + target, + "MOZ_PGO", + lto.enabled, + enable_fuzzing, + ubsan, +) +def new_pass_manager_flags( + enabled, compiler, host, target, pgo, lto, enable_fuzzing, ubsan +): + if host.os == "OSX": # Some native Mac builds hang with the new pass manager. Given the # inability to test in CI, don't take the risk of further breakage. return None - if target.os == 'OSX' and not pgo: + if target.os == "OSX" and not pgo: # Also disable when cross-compiling to Mac, because plain-ish opt # builds hang. Variants like asan and ccov work fine, but it would be # too tedious to test them all here. PGO is the only thing that matters # enough to make an exception for. return None - if target.os == 'WINNT' and target.cpu == 'aarch64' and not lto: + if target.os == "WINNT" and target.cpu == "aarch64" and not lto: # Building without LTO on aarch64-windows fails on some large functions # because clang doesn't know how to split SEH unwind info. return None - if enable_fuzzing and compiler.version < '10.0.0': + if enable_fuzzing and compiler.version < "10.0.0": # Clang 9 does not seem to play well with libFuzzer return None - if ubsan and compiler.version >= '10.0.0': + if ubsan and compiler.version >= "10.0.0": # Temporary until https://bugs.llvm.org/show_bug.cgi?id=45835 gets a # real fix: clang 10 hangs with some ubsan-inserted code constructs. return None - if enabled and compiler.version >= '9.0.0': - if compiler.type == 'clang': - return ['-fexperimental-new-pass-manager'] - elif compiler.type == 'clang-cl': - return ['-Xclang', '-fexperimental-new-pass-manager'] + if enabled and compiler.version >= "9.0.0": + if compiler.type == "clang": + return ["-fexperimental-new-pass-manager"] + elif compiler.type == "clang-cl": + return ["-Xclang", "-fexperimental-new-pass-manager"] + -set_config('MOZ_NEW_PASS_MANAGER_FLAGS', new_pass_manager_flags) +set_config("MOZ_NEW_PASS_MANAGER_FLAGS", new_pass_manager_flags) diff --git a/build/moz.configure/headers.configure b/build/moz.configure/headers.configure index 0f4455d8eeeacb..5332c7365fb279 100644 --- a/build/moz.configure/headers.configure +++ b/build/moz.configure/headers.configure @@ -5,97 +5,109 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Check for headers defining standard int types. -check_header('stdint.h') -have_inttypes = check_header('inttypes.h') +check_header("stdint.h") +have_inttypes = check_header("inttypes.h") # Assume we have ansi C header files available. -set_define('STDC_HEADERS', True) +set_define("STDC_HEADERS", True) -set_config('HAVE_INTTYPES_H', have_inttypes) +set_config("HAVE_INTTYPES_H", have_inttypes) -building_linux = depends(target)(lambda target: target.kernel == 'Linux') +building_linux = depends(target)(lambda target: target.kernel == "Linux") -have_malloc = check_header('malloc.h') +have_malloc = check_header("malloc.h") -check_header('alloca.h') +check_header("alloca.h") -add_old_configure_assignment('HAVE_MALLOC_H', have_malloc) +add_old_configure_assignment("HAVE_MALLOC_H", have_malloc) check_headers( - 'sys/byteorder.h', - 'getopt.h', - 'unistd.h', - 'nl_types.h', - 'cpuid.h', - 'fts.h', + "sys/byteorder.h", + "getopt.h", + "unistd.h", + "nl_types.h", + "cpuid.h", + "fts.h", ) # These are all the places some variant of statfs can be hiding. check_headers( - 'sys/statvfs.h', - 'sys/statfs.h', - 'sys/vfs.h', - 'sys/mount.h', + "sys/statvfs.h", + "sys/statfs.h", + "sys/vfs.h", + "sys/mount.h", ) # Quota support # Check for both the header and quotactl() because Android headers can have the # header but not quotactl(). -set_define('HAVE_SYS_QUOTA_H', - try_compile(includes=['sys/quota.h'], - body='quotactl(0, nullptr, 0, (caddr_t)nullptr);', - check_msg='for sys/quota.h')) -check_header('linux/quota.h', - includes=['sys/socket.h'], - when=building_linux) +set_define( + "HAVE_SYS_QUOTA_H", + try_compile( + includes=["sys/quota.h"], + body="quotactl(0, nullptr, 0, (caddr_t)nullptr);", + check_msg="for sys/quota.h", + ), +) +check_header("linux/quota.h", includes=["sys/socket.h"], when=building_linux) # SCTP support - needs various network include headers check_headers( - 'linux/if_addr.h', - 'linux/rtnetlink.h', - includes=['sys/socket.h'], + "linux/if_addr.h", + "linux/rtnetlink.h", + includes=["sys/socket.h"], when=building_linux, ) -check_header('sys/queue.h') +check_header("sys/queue.h") check_headers( - 'sys/types.h', - 'netinet/in.h', - 'byteswap.h', + "sys/types.h", + "netinet/in.h", + "byteswap.h", ) # memfd_create(2) -- Note that older versions of the Linux man-pages # project incorrectly cite , which doesn't exist; this # was fixed in the man-pages-5.00 release. -set_define('HAVE_MEMFD_CREATE', - try_compile(includes=['sys/mman.h'], - body='memfd_create("", 0);', - check_msg='for memfd_create in sys/mman.h')) +set_define( + "HAVE_MEMFD_CREATE", + try_compile( + includes=["sys/mman.h"], + body='memfd_create("", 0);', + check_msg="for memfd_create in sys/mman.h", + ), +) # TODO: Move these checks to file specific to --enable-project=js. -have_perf_event_h = check_header('linux/perf_event.h', - when=building_linux) +have_perf_event_h = check_header("linux/perf_event.h", when=building_linux) -option('--with-linux-headers', - help='location where the Linux kernel headers can be found', - nargs=1) +option( + "--with-linux-headers", + help="location where the Linux kernel headers can be found", + nargs=1, +) -passed_linux_header_flags = depends_if( - '--with-linux-headers')(lambda v: ['-I%s' % v[0]]) +passed_linux_header_flags = depends_if("--with-linux-headers")( + lambda v: ["-I%s" % v[0]] +) -@depends(try_compile(includes=['asm/unistd.h'], - body='return sizeof(__NR_perf_event_open);', - flags=passed_linux_header_flags, - check_msg='for perf_event_open system call'), - when=have_perf_event_h) +@depends( + try_compile( + includes=["asm/unistd.h"], + body="return sizeof(__NR_perf_event_open);", + flags=passed_linux_header_flags, + check_msg="for perf_event_open system call", + ), + when=have_perf_event_h, +) def have_perf_event_open(have_perf_event_open): if have_perf_event_open: return True -set_config('HAVE_LINUX_PERF_EVENT_H', have_perf_event_open) +set_config("HAVE_LINUX_PERF_EVENT_H", have_perf_event_open) @depends(passed_linux_header_flags, have_perf_event_open) @@ -104,4 +116,4 @@ def linux_headers_includes(passed_linux_header_flags, have_perf_event_open): return passed_linux_header_flags[0] -set_config('LINUX_HEADERS_INCLUDES', linux_headers_includes) +set_config("LINUX_HEADERS_INCLUDES", linux_headers_includes) diff --git a/build/moz.configure/init.configure b/build/moz.configure/init.configure index e9c9d2f0d5c304..4c6d73ee92f66d 100644 --- a/build/moz.configure/init.configure +++ b/build/moz.configure/init.configure @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('util.configure') -include('checks.configure') +include("util.configure") +include("checks.configure") # Make `toolkit` available when toolkit/moz.configure is not included. toolkit = dependable(None) @@ -13,24 +13,23 @@ toolkit = dependable(None) # build/moz.configure/bindgen.configure is not included. bindgen_config_paths = dependable(None) -option(env='DIST', nargs=1, help='DIST directory') +option(env="DIST", nargs=1, help="DIST directory") # Do not allow objdir == srcdir builds. # ============================================================== -@depends('--help', 'DIST') -@imports(_from='__builtin__', _import='open') -@imports(_from='os.path', _import='exists') -@imports(_from='six', _import='ensure_text') +@depends("--help", "DIST") +@imports(_from="__builtin__", _import="open") +@imports(_from="os.path", _import="exists") +@imports(_from="six", _import="ensure_text") def check_build_environment(help, dist): - topobjdir = os.path.realpath('.') - topsrcdir = os.path.realpath( - os.path.join(os.path.dirname(__file__), '..', '..')) + topobjdir = os.path.realpath(".") + topsrcdir = os.path.realpath(os.path.join(os.path.dirname(__file__), "..", "..")) if dist: dist = normsep(dist[0]) else: - dist = os.path.join(topobjdir, 'dist') + dist = os.path.join(topobjdir, "dist") result = namespace( topsrcdir=topsrcdir, @@ -50,87 +49,86 @@ def check_build_environment(help, dist): # backend will make this limitation go away. But there is likely a long tail # of things that will need fixing due to e.g. lack of proper path quoting. if len(topsrcdir.split()) > 1: - die('Source directory cannot be located in a path with spaces: %s' % - topsrcdir) + die("Source directory cannot be located in a path with spaces: %s" % topsrcdir) if len(topobjdir.split()) > 1: - die('Object directory cannot be located in a path with spaces: %s' % - topobjdir) + die("Object directory cannot be located in a path with spaces: %s" % topobjdir) if topsrcdir == topobjdir: - die(' ***\n' - ' * Building directly in the main source directory is not allowed.\n' - ' *\n' - ' * To build, you must run configure from a separate directory\n' - ' * (referred to as an object directory).\n' - ' *\n' - ' * If you are building with a mozconfig, you will need to change your\n' - ' * mozconfig to point to a different object directory.\n' - ' ***' - ) + die( + " ***\n" + " * Building directly in the main source directory is not allowed.\n" + " *\n" + " * To build, you must run configure from a separate directory\n" + " * (referred to as an object directory).\n" + " *\n" + " * If you are building with a mozconfig, you will need to change your\n" + " * mozconfig to point to a different object directory.\n" + " ***" + ) # Check for CRLF line endings. - with open(os.path.join(topsrcdir, 'configure.py'), 'r') as fh: + with open(os.path.join(topsrcdir, "configure.py"), "r") as fh: data = ensure_text(fh.read()) - if '\r' in data: - die('\n ***\n' - ' * The source tree appears to have Windows-style line endings.\n' - ' *\n' - ' * If using Git, Git is likely configured to use Windows-style\n' - ' * line endings.\n' - ' *\n' - ' * To convert the working copy to UNIX-style line endings, run\n' - ' * the following:\n' - ' *\n' - ' * $ git config core.autocrlf false\n' - ' * $ git config core.eof lf\n' - ' * $ git rm --cached -r .\n' - ' * $ git reset --hard\n' - ' *\n' - ' * If not using Git, the tool you used to obtain the source\n' - ' * code likely converted files to Windows line endings. See\n' - ' * usage information for that tool for more.\n' - ' ***') + if "\r" in data: + die( + "\n ***\n" + " * The source tree appears to have Windows-style line endings.\n" + " *\n" + " * If using Git, Git is likely configured to use Windows-style\n" + " * line endings.\n" + " *\n" + " * To convert the working copy to UNIX-style line endings, run\n" + " * the following:\n" + " *\n" + " * $ git config core.autocrlf false\n" + " * $ git config core.eof lf\n" + " * $ git rm --cached -r .\n" + " * $ git reset --hard\n" + " *\n" + " * If not using Git, the tool you used to obtain the source\n" + " * code likely converted files to Windows line endings. See\n" + " * usage information for that tool for more.\n" + " ***" + ) # Check for a couple representative files in the source tree conflict_files = [ - '* %s' % f for f in ('Makefile', 'config/autoconf.mk') + "* %s" % f + for f in ("Makefile", "config/autoconf.mk") if exists(os.path.join(topsrcdir, f)) ] if conflict_files: - die(' ***\n' - ' * Your source tree contains these files:\n' - ' %s\n' - ' * This indicates that you previously built in the source tree.\n' - ' * A source tree build can confuse the separate objdir build.\n' - ' *\n' - ' * To clean up the source tree:\n' - ' * 1. cd %s\n' - ' * 2. gmake distclean\n' - ' ***' - % ('\n '.join(conflict_files), topsrcdir) - ) + die( + " ***\n" + " * Your source tree contains these files:\n" + " %s\n" + " * This indicates that you previously built in the source tree.\n" + " * A source tree build can confuse the separate objdir build.\n" + " *\n" + " * To clean up the source tree:\n" + " * 1. cd %s\n" + " * 2. gmake distclean\n" + " ***" % ("\n ".join(conflict_files), topsrcdir) + ) return result -set_config('TOPSRCDIR', check_build_environment.topsrcdir) -set_config('TOPOBJDIR', check_build_environment.topobjdir) -set_config('DIST', check_build_environment.dist) +set_config("TOPSRCDIR", check_build_environment.topsrcdir) +set_config("TOPOBJDIR", check_build_environment.topobjdir) +set_config("DIST", check_build_environment.dist) -add_old_configure_assignment( - '_topsrcdir', check_build_environment.topsrcdir) -add_old_configure_assignment( - '_objdir', check_build_environment.topobjdir) -add_old_configure_assignment( - 'DIST', check_build_environment.dist) +add_old_configure_assignment("_topsrcdir", check_build_environment.topsrcdir) +add_old_configure_assignment("_objdir", check_build_environment.topobjdir) +add_old_configure_assignment("DIST", check_build_environment.dist) -option(env='MOZ_AUTOMATION', help='Enable options for automated builds') -set_config('MOZ_AUTOMATION', depends_if('MOZ_AUTOMATION')(lambda x: True)) +option(env="MOZ_AUTOMATION", help="Enable options for automated builds") +set_config("MOZ_AUTOMATION", depends_if("MOZ_AUTOMATION")(lambda x: True)) -option(env='OLD_CONFIGURE', nargs=1, help='Path to the old configure script') +option(env="OLD_CONFIGURE", nargs=1, help="Path to the old configure script") -option(env='MOZCONFIG', nargs=1, help='Mozconfig location') +option(env="MOZCONFIG", nargs=1, help="Mozconfig location") # Read user mozconfig @@ -140,13 +138,12 @@ option(env='MOZCONFIG', nargs=1, help='Mozconfig location') # be called when --help is passed, and the mozconfig wouldn't be read. -@depends('MOZCONFIG', 'OLD_CONFIGURE', check_build_environment, - '--help') -@imports(_from='mozbuild.mozconfig', _import='MozconfigLoader') -@imports(_from='mozboot.mozconfig', _import='find_mozconfig') +@depends("MOZCONFIG", "OLD_CONFIGURE", check_build_environment, "--help") +@imports(_from="mozbuild.mozconfig", _import="MozconfigLoader") +@imports(_from="mozboot.mozconfig", _import="find_mozconfig") def mozconfig(mozconfig, old_configure, build_env, help): if not old_configure and not help: - die('The OLD_CONFIGURE environment variable must be set') + die("The OLD_CONFIGURE environment variable must be set") # Don't read the mozconfig for the js configure (yay backwards # compatibility) @@ -164,52 +161,53 @@ def mozconfig(mozconfig, old_configure, build_env, help): # OLD_CONFIGURE path, which points to js/src/old-configure. # I expect we'll have figured things out for mozconfigs well before # old-configure dies. - if old_configure and os.path.dirname(os.path.abspath(old_configure[0])).endswith('/js/src'): - return {'path': None} + if old_configure and os.path.dirname(os.path.abspath(old_configure[0])).endswith( + "/js/src" + ): + return {"path": None} topsrcdir = build_env.topsrcdir loader = MozconfigLoader(topsrcdir) mozconfig = mozconfig[0] if mozconfig else None - mozconfig = find_mozconfig(topsrcdir, env={'MOZCONFIG': mozconfig}) + mozconfig = find_mozconfig(topsrcdir, env={"MOZCONFIG": mozconfig}) mozconfig = loader.read_mozconfig(mozconfig) return mozconfig -set_config('MOZCONFIG', depends(mozconfig)(lambda m: m['path'])) +set_config("MOZCONFIG", depends(mozconfig)(lambda m: m["path"])) # Mozilla-Build # ============================================================== -option(env='MOZILLABUILD', nargs=1, - help='Path to Mozilla Build (Windows-only)') +option(env="MOZILLABUILD", nargs=1, help="Path to Mozilla Build (Windows-only)") -option(env='CONFIG_SHELL', nargs=1, help='Path to a POSIX shell') +option(env="CONFIG_SHELL", nargs=1, help="Path to a POSIX shell") # It feels dirty replicating this from python/mozbuild/mozbuild/mozconfig.py, # but the end goal being that the configure script would go away... -@depends('CONFIG_SHELL', 'MOZILLABUILD') -@checking('for a shell') -@imports('sys') +@depends("CONFIG_SHELL", "MOZILLABUILD") +@checking("for a shell") +@imports("sys") def shell(value, mozillabuild): if value: return find_program(value[0]) - shell = 'sh' + shell = "sh" if mozillabuild: - shell = mozillabuild[0] + '/msys/bin/sh' - if sys.platform == 'win32': - shell = shell + '.exe' + shell = mozillabuild[0] + "/msys/bin/sh" + if sys.platform == "win32": + shell = shell + ".exe" return find_program(shell) # This defines a reasonable shell for when running with --help. # If one was passed in the environment, though, fall back to that. -@depends('--help', 'CONFIG_SHELL') +@depends("--help", "CONFIG_SHELL") def help_shell(help, shell): if help and not shell: - return 'sh' + return "sh" shell = help_shell | shell @@ -218,25 +216,28 @@ shell = help_shell | shell # Python 3 # ======== -option(env='PYTHON3', nargs=1, help='Python 3 interpreter (3.6 or later)') - -option(env='VIRTUALENV_NAME', nargs=1, default='init_py3', - help='Name of the in-objdir virtualenv') - - -@depends('PYTHON3', 'VIRTUALENV_NAME', check_build_environment, mozconfig, - '--help') -@imports(_from='__builtin__', _import='Exception') -@imports('os') -@imports('sys') -@imports('subprocess') -@imports('distutils.sysconfig') -@imports(_from='mozbuild.configure.util', _import='LineIO') -@imports(_from='mozbuild.virtualenv', _import='VirtualenvManager') -@imports(_from='mozbuild.virtualenv', _import='verify_python_version') -@imports(_from='mozbuild.pythonutil', _import='find_python3_executable') -@imports(_from='mozbuild.pythonutil', _import='python_executable_version') -@imports(_from='six', _import='ensure_text') +option(env="PYTHON3", nargs=1, help="Python 3 interpreter (3.6 or later)") + +option( + env="VIRTUALENV_NAME", + nargs=1, + default="init_py3", + help="Name of the in-objdir virtualenv", +) + + +@depends("PYTHON3", "VIRTUALENV_NAME", check_build_environment, mozconfig, "--help") +@imports(_from="__builtin__", _import="Exception") +@imports("os") +@imports("sys") +@imports("subprocess") +@imports("distutils.sysconfig") +@imports(_from="mozbuild.configure.util", _import="LineIO") +@imports(_from="mozbuild.virtualenv", _import="VirtualenvManager") +@imports(_from="mozbuild.virtualenv", _import="verify_python_version") +@imports(_from="mozbuild.pythonutil", _import="find_python3_executable") +@imports(_from="mozbuild.pythonutil", _import="python_executable_version") +@imports(_from="six", _import="ensure_text") def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): # Avoid re-executing python when running configure --help. if help: @@ -261,15 +262,15 @@ def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): # Ideally we'd rely on the mozconfig injection from mozconfig_options, # but we'd rather avoid the verbosity when we need to reexecute with # a different python. - if mozconfig['path']: - if 'PYTHON3' in mozconfig['env']['added']: - python = mozconfig['env']['added']['PYTHON3'] - elif 'PYTHON3' in mozconfig['env']['modified']: - python = mozconfig['env']['modified']['PYTHON3'][1] - elif 'PYTHON3' in mozconfig['vars']['added']: - python = mozconfig['vars']['added']['PYTHON3'] - elif 'PYTHON3' in mozconfig['vars']['modified']: - python = mozconfig['vars']['modified']['PYTHON3'][1] + if mozconfig["path"]: + if "PYTHON3" in mozconfig["env"]["added"]: + python = mozconfig["env"]["added"]["PYTHON3"] + elif "PYTHON3" in mozconfig["env"]["modified"]: + python = mozconfig["env"]["modified"]["PYTHON3"][1] + elif "PYTHON3" in mozconfig["vars"]["added"]: + python = mozconfig["vars"]["added"]["PYTHON3"] + elif "PYTHON3" in mozconfig["vars"]["modified"]: + python = mozconfig["vars"]["modified"]["PYTHON3"][1] log.debug("python3: executable from configuration: %r" % python) @@ -284,25 +285,27 @@ def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): # See mozilla-central changeset d2cce982a7c809815d86d5daecefe2e7a563ecca # Bug 784841 topsrcdir, topobjdir = build_env.topsrcdir, build_env.topobjdir - if topobjdir.endswith('/js/src'): + if topobjdir.endswith("/js/src"): topobjdir = topobjdir[:-7] - virtualenvs_root = os.path.join(topobjdir, '_virtualenvs') - with LineIO(lambda l: log.info(l), 'replace') as out: + virtualenvs_root = os.path.join(topobjdir, "_virtualenvs") + with LineIO(lambda l: log.info(l), "replace") as out: manager = VirtualenvManager( topsrcdir, - os.path.join(virtualenvs_root, virtualenv_name), out, - os.path.join(topsrcdir, 'build', 'build_virtualenv_packages.txt')) + os.path.join(virtualenvs_root, virtualenv_name), + out, + os.path.join(topsrcdir, "build", "build_virtualenv_packages.txt"), + ) # If we're not in the virtualenv, we need to update the path to include some # necessary modules for find_program. - if 'MOZBUILD_VIRTUALENV' in os.environ: + if "MOZBUILD_VIRTUALENV" in os.environ: python = sys.executable else: + sys.path.insert(0, os.path.join(topsrcdir, "testing", "mozbase", "mozfile")) sys.path.insert( - 0, os.path.join(topsrcdir, 'testing', 'mozbase', 'mozfile')) - sys.path.insert( - 0, os.path.join(topsrcdir, 'third_party', 'python', 'backports')) + 0, os.path.join(topsrcdir, "third_party", "python", "backports") + ) # If we know the Python executable the caller is asking for then verify its # version. If the caller did not ask for a specific executable then find @@ -310,37 +313,44 @@ def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): if python: found_python = find_program(python) if not found_python: - die('The PYTHON3 environment variable does not contain ' - 'a valid path. Cannot find %s', python) + die( + "The PYTHON3 environment variable does not contain " + "a valid path. Cannot find %s", + python, + ) python = found_python try: version = python_executable_version(python).version except Exception as e: - raise FatalCheckError('could not determine version of PYTHON3 ' - '(%s): %s' % (python, e)) + raise FatalCheckError( + "could not determine version of PYTHON3 " "(%s): %s" % (python, e) + ) else: # Fall back to the search routine. - python, version = find_python3_executable(min_version='3.6.0') + python, version = find_python3_executable(min_version="3.6.0") # The API returns a bytes whereas everything in configure is unicode. if python: python = ensure_text(python) if not python: - raise FatalCheckError('Python 3.6 or newer is required to build. ' - 'Ensure a `python3.x` executable is in your ' - 'PATH or define PYTHON3 to point to a Python ' - '3.6 executable.') + raise FatalCheckError( + "Python 3.6 or newer is required to build. " + "Ensure a `python3.x` executable is in your " + "PATH or define PYTHON3 to point to a Python " + "3.6 executable." + ) if version < (3, 6, 0): - raise FatalCheckError('Python 3.6 or newer is required to build; ' - '%s is Python %d.%d' % (python, version[0], - version[1])) + raise FatalCheckError( + "Python 3.6 or newer is required to build; " + "%s is Python %d.%d" % (python, version[0], version[1]) + ) log.debug("python3: found executable: %r" % python) if not manager.up_to_date(python): - log.info('Creating Python 3 environment') + log.info("Creating Python 3 environment") manager.build(python) else: log.debug("python3: venv is up to date") @@ -348,33 +358,35 @@ def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): python = normsep(manager.python_path) if not normsep(sys.executable).startswith(normsep(virtualenvs_root)): - log.debug("python3: executing as %s, should be running as %s" % ( - sys.executable, manager.python_path)) - log.info('Re-executing in the virtualenv') + log.debug( + "python3: executing as %s, should be running as %s" + % (sys.executable, manager.python_path) + ) + log.info("Re-executing in the virtualenv") if env_python: - del os.environ['PYTHON3'] + del os.environ["PYTHON3"] # Homebrew on macOS will change Python's sys.executable to a custom # value which messes with mach's virtualenv handling code. Override # Homebrew's changes with the correct sys.executable value. - os.environ['PYTHONEXECUTABLE'] = python + os.environ["PYTHONEXECUTABLE"] = python # Another quirk on macOS, with the system python, the virtualenv is # not fully operational (missing entries in sys.path) if # __PYVENV_LAUNCHER__ is set. - os.environ.pop('__PYVENV_LAUNCHER__', None) + os.environ.pop("__PYVENV_LAUNCHER__", None) # One would prefer to use os.execl, but that's completely borked on # Windows. sys.exit(subprocess.call([python] + sys.argv)) # We are now in the virtualenv if not distutils.sysconfig.get_python_lib(): - die('Could not determine python site packages directory') + die("Could not determine python site packages directory") # We may have set PYTHONEXECUTABLE above, and that affects python # subprocesses we may invoke as part of configure (e.g. hg), so # unset it. - os.environ.pop('PYTHONEXECUTABLE', None) + os.environ.pop("PYTHONEXECUTABLE", None) - str_version = '.'.join(str(v) for v in version) + str_version = ".".join(str(v) for v in version) return namespace( path=python, @@ -384,14 +396,14 @@ def virtualenv_python3(env_python, virtualenv_name, build_env, mozconfig, help): @depends(virtualenv_python3) -@checking('for Python 3', callback=lambda x: '%s (%s)' % (x.path, x.str_version)) +@checking("for Python 3", callback=lambda x: "%s (%s)" % (x.path, x.str_version)) def virtualenv_python3(venv): return venv -set_config('PYTHON3', virtualenv_python3.path) -set_config('PYTHON3_VERSION', virtualenv_python3.str_version) -add_old_configure_assignment('PYTHON3', virtualenv_python3.path) +set_config("PYTHON3", virtualenv_python3.path) +set_config("PYTHON3_VERSION", virtualenv_python3.str_version) +add_old_configure_assignment("PYTHON3", virtualenv_python3.path) # Inject mozconfig options @@ -402,61 +414,64 @@ add_old_configure_assignment('PYTHON3', virtualenv_python3.path) @template def early_options(): - @depends('--help') - @imports('__sandbox__') - @imports(_from='six', _import='itervalues') + @depends("--help") + @imports("__sandbox__") + @imports(_from="six", _import="itervalues") def early_options(_): return set( - option.env - for option in itervalues(__sandbox__._options) - if option.env + option.env for option in itervalues(__sandbox__._options) if option.env ) + return early_options early_options = early_options() -@depends(mozconfig, early_options, 'MOZ_AUTOMATION', '--help') +@depends(mozconfig, early_options, "MOZ_AUTOMATION", "--help") # This gives access to the sandbox. Don't copy this blindly. -@imports('__sandbox__') -@imports('os') -@imports('six') +@imports("__sandbox__") +@imports("os") +@imports("six") def mozconfig_options(mozconfig, early_options, automation, help): - if mozconfig['path']: - if 'MOZ_AUTOMATION_MOZCONFIG' in mozconfig['env']['added']: + if mozconfig["path"]: + if "MOZ_AUTOMATION_MOZCONFIG" in mozconfig["env"]["added"]: if not automation: - log.error('%s directly or indirectly includes an in-tree ' - 'mozconfig.', mozconfig['path']) - log.error('In-tree mozconfigs make strong assumptions about ' - 'and are only meant to be used by Mozilla ' - 'automation.') + log.error( + "%s directly or indirectly includes an in-tree " "mozconfig.", + mozconfig["path"], + ) + log.error( + "In-tree mozconfigs make strong assumptions about " + "and are only meant to be used by Mozilla " + "automation." + ) die("Please don't use them.") helper = __sandbox__._helper - log.info('Adding configure options from %s' % mozconfig['path']) - for arg in mozconfig['configure_args']: - log.info(' %s' % arg) + log.info("Adding configure options from %s" % mozconfig["path"]) + for arg in mozconfig["configure_args"]: + log.info(" %s" % arg) # We could be using imply_option() here, but it has other # contraints that don't really apply to the command-line # emulation that mozconfig provides. - helper.add(arg, origin='mozconfig', args=helper._args) + helper.add(arg, origin="mozconfig", args=helper._args) def add(key, value): if key.isupper(): - arg = '%s=%s' % (key, value) - log.info(' %s' % arg) + arg = "%s=%s" % (key, value) + log.info(" %s" % arg) if key not in early_options: - helper.add(arg, origin='mozconfig', args=helper._args) + helper.add(arg, origin="mozconfig", args=helper._args) - for key, value in six.iteritems(mozconfig['env']['added']): + for key, value in six.iteritems(mozconfig["env"]["added"]): add(key, value) os.environ[key] = value - for key, (_, value) in six.iteritems(mozconfig['env']['modified']): + for key, (_, value) in six.iteritems(mozconfig["env"]["modified"]): add(key, value) os.environ[key] = value - for key, value in six.iteritems(mozconfig['vars']['added']): + for key, value in six.iteritems(mozconfig["vars"]["added"]): add(key, value) - for key, (_, value) in six.iteritems(mozconfig['vars']['modified']): + for key, (_, value) in six.iteritems(mozconfig["vars"]["modified"]): add(key, value) @@ -464,179 +479,199 @@ def mozconfig_options(mozconfig, early_options, automation, help): # ================================================ -@depends(check_build_environment, 'MOZ_AUTOMATION', '--help') -@checking('for vcs source checkout') -@imports('os') +@depends(check_build_environment, "MOZ_AUTOMATION", "--help") +@checking("for vcs source checkout") +@imports("os") def vcs_checkout_type(build_env, automation, help): - if os.path.exists(os.path.join(build_env.topsrcdir, '.hg')): - return 'hg' - elif os.path.exists(os.path.join(build_env.topsrcdir, '.git')): - return 'git' + if os.path.exists(os.path.join(build_env.topsrcdir, ".hg")): + return "hg" + elif os.path.exists(os.path.join(build_env.topsrcdir, ".git")): + return "git" elif automation and not help: - raise FatalCheckError('unable to resolve VCS type; must run ' - 'from a source checkout when MOZ_AUTOMATION ' - 'is set') + raise FatalCheckError( + "unable to resolve VCS type; must run " + "from a source checkout when MOZ_AUTOMATION " + "is set" + ) + # Resolve VCS binary for detected repository type. # TODO remove hg.exe once bug 1382940 addresses ambiguous executables case. -hg = check_prog('HG', ('hg.exe', 'hg',), allow_missing=True, - when=depends(vcs_checkout_type)(lambda x: x == 'hg')) -git = check_prog('GIT', ('git',), allow_missing=True, - when=depends(vcs_checkout_type)(lambda x: x == 'git')) +hg = check_prog( + "HG", + ( + "hg.exe", + "hg", + ), + allow_missing=True, + when=depends(vcs_checkout_type)(lambda x: x == "hg"), +) +git = check_prog( + "GIT", + ("git",), + allow_missing=True, + when=depends(vcs_checkout_type)(lambda x: x == "git"), +) @depends_if(hg) -@checking('for Mercurial version') -@imports('os') -@imports('re') +@checking("for Mercurial version") +@imports("os") +@imports("re") def hg_version(hg): # HGPLAIN in Mercurial 1.5+ forces stable output, regardless of set # locale or encoding. env = dict(os.environ) - env['HGPLAIN'] = '1' + env["HGPLAIN"] = "1" - out = check_cmd_output(hg, '--version', env=env) + out = check_cmd_output(hg, "--version", env=env) - match = re.search(r'Mercurial Distributed SCM \(version ([^\)]+)', out) + match = re.search(r"Mercurial Distributed SCM \(version ([^\)]+)", out) if not match: - raise FatalCheckError( - 'unable to determine Mercurial version: %s' % out) + raise FatalCheckError("unable to determine Mercurial version: %s" % out) # The version string may be "unknown" for Mercurial run out of its own # source checkout or for bad builds. But LooseVersion handles it. return Version(match.group(1)) + # Resolve Mercurial config items so other checks have easy access. # Do NOT set this in the config because it may contain sensitive data # like API keys. @depends_all(check_build_environment, hg, hg_version) -@imports('os') +@imports("os") def hg_config(build_env, hg, version): env = dict(os.environ) - env['HGPLAIN'] = '1' + env["HGPLAIN"] = "1" # Warnings may get sent to stderr. But check_cmd_output() ignores # stderr if exit code is 0. And the command should always succeed if # `hg version` worked. - out = check_cmd_output(hg, 'config', env=env, cwd=build_env.topsrcdir) + out = check_cmd_output(hg, "config", env=env, cwd=build_env.topsrcdir) config = {} for line in out.strip().splitlines(): - key, value = [s.strip() for s in line.split('=', 1)] + key, value = [s.strip() for s in line.split("=", 1)] config[key] = value return config @depends_if(git) -@checking('for Git version') -@imports('re') +@checking("for Git version") +@imports("re") def git_version(git): - out = check_cmd_output(git, '--version').rstrip() + out = check_cmd_output(git, "--version").rstrip() - match = re.search('git version (.*)$', out) + match = re.search("git version (.*)$", out) if not match: - raise FatalCheckError('unable to determine Git version: %s' % out) + raise FatalCheckError("unable to determine Git version: %s" % out) return Version(match.group(1)) + # Only set VCS_CHECKOUT_TYPE if we resolved the VCS binary. # Require resolved VCS info when running in automation so automation's # environment is more well-defined. -@depends(vcs_checkout_type, hg_version, git_version, 'MOZ_AUTOMATION') +@depends(vcs_checkout_type, hg_version, git_version, "MOZ_AUTOMATION") def exposed_vcs_checkout_type(vcs_checkout_type, hg, git, automation): - if vcs_checkout_type == 'hg': + if vcs_checkout_type == "hg": if hg: - return 'hg' + return "hg" if automation: - raise FatalCheckError('could not resolve Mercurial binary info') + raise FatalCheckError("could not resolve Mercurial binary info") - elif vcs_checkout_type == 'git': + elif vcs_checkout_type == "git": if git: - return 'git' + return "git" if automation: - raise FatalCheckError('could not resolve Git binary info') + raise FatalCheckError("could not resolve Git binary info") elif vcs_checkout_type: - raise FatalCheckError('unhandled VCS type: %s' % vcs_checkout_type) + raise FatalCheckError("unhandled VCS type: %s" % vcs_checkout_type) -set_config('VCS_CHECKOUT_TYPE', exposed_vcs_checkout_type) +set_config("VCS_CHECKOUT_TYPE", exposed_vcs_checkout_type) # Obtain a Repository interface for the current VCS repository. @depends(check_build_environment, exposed_vcs_checkout_type, hg, git) -@imports(_from='mozversioncontrol', _import='get_repository_object') +@imports(_from="mozversioncontrol", _import="get_repository_object") def vcs_repository(build_env, vcs_checkout_type, hg, git): - if vcs_checkout_type == 'hg': + if vcs_checkout_type == "hg": return get_repository_object(build_env.topsrcdir, hg=hg) - elif vcs_checkout_type == 'git': + elif vcs_checkout_type == "git": return get_repository_object(build_env.topsrcdir, git=git) elif vcs_checkout_type: - raise FatalCheckError('unhandled VCS type: %s' % vcs_checkout_type) + raise FatalCheckError("unhandled VCS type: %s" % vcs_checkout_type) @depends_if(vcs_repository) -@checking('for sparse checkout') +@checking("for sparse checkout") def vcs_sparse_checkout(repo): return repo.sparse_checkout_present() -set_config('VCS_SPARSE_CHECKOUT', vcs_sparse_checkout) +set_config("VCS_SPARSE_CHECKOUT", vcs_sparse_checkout) # The application/project to build # ============================================================== -option('--enable-application', nargs=1, env='MOZ_BUILD_APP', - help='Application to build. Same as --enable-project.') +option( + "--enable-application", + nargs=1, + env="MOZ_BUILD_APP", + help="Application to build. Same as --enable-project.", +) -@depends('--enable-application') +@depends("--enable-application") def application(app): if app: return app -imply_option('--enable-project', application) +imply_option("--enable-project", application) @depends(check_build_environment) def default_project(build_env): - if build_env.topobjdir.endswith('/js/src'): - return 'js' - return 'browser' + if build_env.topobjdir.endswith("/js/src"): + return "js" + return "browser" -option('--enable-project', nargs=1, default=default_project, - help='Project to build') +option("--enable-project", nargs=1, default=default_project, help="Project to build") # Host and target systems # ============================================================== -option('--host', nargs=1, help='Define the system type performing the build') +option("--host", nargs=1, help="Define the system type performing the build") -option('--target', nargs=1, - help='Define the system type where the resulting executables will be ' - 'used') +option( + "--target", + nargs=1, + help="Define the system type where the resulting executables will be " "used", +) -@imports(_from='mozbuild.configure.constants', _import='CPU') -@imports(_from='mozbuild.configure.constants', _import='CPU_bitness') -@imports(_from='mozbuild.configure.constants', _import='Endianness') -@imports(_from='mozbuild.configure.constants', _import='Kernel') -@imports(_from='mozbuild.configure.constants', _import='OS') -@imports(_from='__builtin__', _import='ValueError') +@imports(_from="mozbuild.configure.constants", _import="CPU") +@imports(_from="mozbuild.configure.constants", _import="CPU_bitness") +@imports(_from="mozbuild.configure.constants", _import="Endianness") +@imports(_from="mozbuild.configure.constants", _import="Kernel") +@imports(_from="mozbuild.configure.constants", _import="OS") +@imports(_from="__builtin__", _import="ValueError") def split_triplet(triplet, allow_msvc=False): # The standard triplet is defined as # CPU_TYPE-VENDOR-OPERATING_SYSTEM @@ -646,8 +681,8 @@ def split_triplet(triplet, allow_msvc=False): # Additionally, some may omit "unknown" when the vendor # is not specified and emit # CPU_TYPE-OPERATING_SYSTEM - vendor = 'unknown' - parts = triplet.split('-', 2) + vendor = "unknown" + parts = triplet.split("-", 2) if len(parts) == 3: cpu, vendor, os = parts elif len(parts) == 2: @@ -664,101 +699,101 @@ def split_triplet(triplet, allow_msvc=False): # which can probably be improved/cleaned up because they are based on a # mix of uname and config.guess output, while we now only use the latter, # which presumably has a cleaner and leaner output. Let's refine later. - os = os.replace('/', '_') - if 'android' in os: - canonical_os = 'Android' - canonical_kernel = 'Linux' - elif os.startswith('linux'): - canonical_os = 'GNU' - canonical_kernel = 'Linux' - elif os.startswith('kfreebsd') and os.endswith('-gnu'): - canonical_os = 'GNU' - canonical_kernel = 'kFreeBSD' - elif os.startswith('gnu'): - canonical_os = canonical_kernel = 'GNU' - elif os.startswith('mingw') or (allow_msvc and os == 'windows-msvc'): + os = os.replace("/", "_") + if "android" in os: + canonical_os = "Android" + canonical_kernel = "Linux" + elif os.startswith("linux"): + canonical_os = "GNU" + canonical_kernel = "Linux" + elif os.startswith("kfreebsd") and os.endswith("-gnu"): + canonical_os = "GNU" + canonical_kernel = "kFreeBSD" + elif os.startswith("gnu"): + canonical_os = canonical_kernel = "GNU" + elif os.startswith("mingw") or (allow_msvc and os == "windows-msvc"): # windows-msvc is only opt-in for the caller of this function until # full support in bug 1617793. - canonical_os = canonical_kernel = 'WINNT' - elif os.startswith('darwin'): - canonical_kernel = 'Darwin' - canonical_os = 'OSX' - elif os.startswith('dragonfly'): - canonical_os = canonical_kernel = 'DragonFly' - elif os.startswith('freebsd'): - canonical_os = canonical_kernel = 'FreeBSD' - elif os.startswith('netbsd'): - canonical_os = canonical_kernel = 'NetBSD' - elif os.startswith('openbsd'): - canonical_os = canonical_kernel = 'OpenBSD' - elif os.startswith('solaris'): - canonical_os = canonical_kernel = 'SunOS' + canonical_os = canonical_kernel = "WINNT" + elif os.startswith("darwin"): + canonical_kernel = "Darwin" + canonical_os = "OSX" + elif os.startswith("dragonfly"): + canonical_os = canonical_kernel = "DragonFly" + elif os.startswith("freebsd"): + canonical_os = canonical_kernel = "FreeBSD" + elif os.startswith("netbsd"): + canonical_os = canonical_kernel = "NetBSD" + elif os.startswith("openbsd"): + canonical_os = canonical_kernel = "OpenBSD" + elif os.startswith("solaris"): + canonical_os = canonical_kernel = "SunOS" else: - raise ValueError('Unknown OS: %s' % os) + raise ValueError("Unknown OS: %s" % os) # The CPU granularity is probably not enough. Moving more things from # old-configure will tell us if we need more - if cpu.endswith('86') or (cpu.startswith('i') and '86' in cpu): - canonical_cpu = 'x86' - endianness = 'little' - elif cpu in ('x86_64', 'ia64'): + if cpu.endswith("86") or (cpu.startswith("i") and "86" in cpu): + canonical_cpu = "x86" + endianness = "little" + elif cpu in ("x86_64", "ia64"): canonical_cpu = cpu - endianness = 'little' - elif cpu in ('s390', 's390x'): + endianness = "little" + elif cpu in ("s390", "s390x"): canonical_cpu = cpu - endianness = 'big' - elif cpu in ('powerpc64', 'ppc64', 'powerpc64le', 'ppc64le'): - canonical_cpu = 'ppc64' - endianness = 'little' if 'le' in cpu else 'big' - elif cpu in ('powerpc', 'ppc', 'rs6000') or cpu.startswith('powerpc'): - canonical_cpu = 'ppc' - endianness = 'big' - elif cpu in ('Alpha', 'alpha', 'ALPHA'): - canonical_cpu = 'Alpha' - endianness = 'little' - elif cpu.startswith('hppa') or cpu == 'parisc': - canonical_cpu = 'hppa' - endianness = 'big' - elif cpu.startswith('sparc64') or cpu.startswith('sparcv9'): - canonical_cpu = 'sparc64' - endianness = 'big' - elif cpu.startswith('sparc') or cpu == 'sun4u': - canonical_cpu = 'sparc' - endianness = 'big' - elif cpu.startswith('arm'): - canonical_cpu = 'arm' - endianness = 'big' if cpu.startswith(('armeb', 'armbe')) else 'little' - elif cpu in ('m68k'): - canonical_cpu = 'm68k' - endianness = 'big' - elif cpu in ('mips', 'mipsel'): - canonical_cpu = 'mips32' - endianness = 'little' if 'el' in cpu else 'big' - elif cpu in ('mips64', 'mips64el'): - canonical_cpu = 'mips64' - endianness = 'little' if 'el' in cpu else 'big' - elif cpu.startswith('aarch64'): - canonical_cpu = 'aarch64' - endianness = 'little' - elif cpu in ('riscv64', 'riscv64gc'): - canonical_cpu = 'riscv64' - endianness = 'little' - elif cpu == 'sh4': - canonical_cpu = 'sh4' - endianness = 'little' + endianness = "big" + elif cpu in ("powerpc64", "ppc64", "powerpc64le", "ppc64le"): + canonical_cpu = "ppc64" + endianness = "little" if "le" in cpu else "big" + elif cpu in ("powerpc", "ppc", "rs6000") or cpu.startswith("powerpc"): + canonical_cpu = "ppc" + endianness = "big" + elif cpu in ("Alpha", "alpha", "ALPHA"): + canonical_cpu = "Alpha" + endianness = "little" + elif cpu.startswith("hppa") or cpu == "parisc": + canonical_cpu = "hppa" + endianness = "big" + elif cpu.startswith("sparc64") or cpu.startswith("sparcv9"): + canonical_cpu = "sparc64" + endianness = "big" + elif cpu.startswith("sparc") or cpu == "sun4u": + canonical_cpu = "sparc" + endianness = "big" + elif cpu.startswith("arm"): + canonical_cpu = "arm" + endianness = "big" if cpu.startswith(("armeb", "armbe")) else "little" + elif cpu in ("m68k"): + canonical_cpu = "m68k" + endianness = "big" + elif cpu in ("mips", "mipsel"): + canonical_cpu = "mips32" + endianness = "little" if "el" in cpu else "big" + elif cpu in ("mips64", "mips64el"): + canonical_cpu = "mips64" + endianness = "little" if "el" in cpu else "big" + elif cpu.startswith("aarch64"): + canonical_cpu = "aarch64" + endianness = "little" + elif cpu in ("riscv64", "riscv64gc"): + canonical_cpu = "riscv64" + endianness = "little" + elif cpu == "sh4": + canonical_cpu = "sh4" + endianness = "little" else: - raise ValueError('Unknown CPU type: %s' % cpu) + raise ValueError("Unknown CPU type: %s" % cpu) # Toolchains, most notably for cross compilation may use cpu-os # prefixes. We need to be more specific about the LLVM target on Mac # so cross-language LTO will work correctly. - if os.startswith('darwin'): - toolchain = '%s-apple-%s' % (cpu, os) - elif canonical_cpu == 'aarch64' and canonical_os == 'WINNT': - toolchain = 'aarch64-windows-msvc' + if os.startswith("darwin"): + toolchain = "%s-apple-%s" % (cpu, os) + elif canonical_cpu == "aarch64" and canonical_os == "WINNT": + toolchain = "aarch64-windows-msvc" else: - toolchain = '%s-%s' % (cpu, os) + toolchain = "%s-%s" % (cpu, os) return namespace( alias=triplet, @@ -777,52 +812,53 @@ def split_triplet(triplet, allow_msvc=False): # This defines a fake target/host namespace for when running with --help # If either --host or --target is passed on the command line, then fall # back to the real deal. -@depends('--help', '--host', '--target') +@depends("--help", "--host", "--target") def help_host_target(help, host, target): if help and not host and not target: return namespace( - alias='unknown-unknown-unknown', - cpu='unknown', - bitness='unknown', - kernel='unknown', - os='unknown', - endianness='unknown', - raw_cpu='unknown', - raw_os='unknown', - toolchain='unknown-unknown', + alias="unknown-unknown-unknown", + cpu="unknown", + bitness="unknown", + kernel="unknown", + os="unknown", + endianness="unknown", + raw_cpu="unknown", + raw_os="unknown", + toolchain="unknown-unknown", ) def config_sub(shell, triplet): - config_sub = os.path.join(os.path.dirname(__file__), '..', - 'autoconf', 'config.sub') + config_sub = os.path.join(os.path.dirname(__file__), "..", "autoconf", "config.sub") return check_cmd_output(shell, config_sub, triplet).strip() -@depends('--host', shell) -@checking('for host system type', lambda h: h.alias) -@imports('os') -@imports('sys') -@imports(_from='__builtin__', _import='ValueError') +@depends("--host", shell) +@checking("for host system type", lambda h: h.alias) +@imports("os") +@imports("sys") +@imports(_from="__builtin__", _import="ValueError") def real_host(value, shell): - if not value and sys.platform == 'win32': - arch = (os.environ.get('PROCESSOR_ARCHITEW6432') or - os.environ.get('PROCESSOR_ARCHITECTURE')) - if arch == 'AMD64': - return split_triplet('x86_64-pc-mingw32') - elif arch == 'x86': - return split_triplet('i686-pc-mingw32') + if not value and sys.platform == "win32": + arch = os.environ.get("PROCESSOR_ARCHITEW6432") or os.environ.get( + "PROCESSOR_ARCHITECTURE" + ) + if arch == "AMD64": + return split_triplet("x86_64-pc-mingw32") + elif arch == "x86": + return split_triplet("i686-pc-mingw32") if not value: - config_guess = os.path.join(os.path.dirname(__file__), '..', - 'autoconf', 'config.guess') + config_guess = os.path.join( + os.path.dirname(__file__), "..", "autoconf", "config.guess" + ) # Ensure that config.guess is determining the host triplet, not the target # triplet env = os.environ.copy() - env.pop('CC_FOR_BUILD', None) - env.pop('HOST_CC', None) - env.pop('CC', None) + env.pop("CC_FOR_BUILD", None) + env.pop("HOST_CC", None) + env.pop("CC", None) host = check_cmd_output(shell, config_guess, env=env).strip() try: @@ -843,9 +879,9 @@ def real_host(value, shell): host = help_host_target | real_host -@depends('--target', real_host, shell, '--enable-project', '--enable-application') -@checking('for target system type', lambda t: t.alias) -@imports(_from='__builtin__', _import='ValueError') +@depends("--target", real_host, shell, "--enable-project", "--enable-application") +@checking("for target system type", lambda t: t.alias) +@imports(_from="__builtin__", _import="ValueError") def real_target(value, host, shell, project, application): # Because --enable-project is implied by --enable-application, and # implied options are not currently handled during --help, which is @@ -858,26 +894,27 @@ def real_target(value, host, shell, project, application): elif project: project = project[0] if not value: - if project == 'mobile/android': - if host.raw_os == 'mingw32': + if project == "mobile/android": + if host.raw_os == "mingw32": log.warning( - 'Building Firefox for Android on Windows is not fully ' - 'supported. See https://bugzilla.mozilla.org/show_bug.cgi?' - 'id=1169873 for details.') - return split_triplet('arm-unknown-linux-androideabi') + "Building Firefox for Android on Windows is not fully " + "supported. See https://bugzilla.mozilla.org/show_bug.cgi?" + "id=1169873 for details." + ) + return split_triplet("arm-unknown-linux-androideabi") return host # If --target was only given a cpu arch, expand it with the # non-cpu part of the host. For mobile/android, expand it with # unknown-linux-android. target = value[0] - if '-' not in target: - if project == 'mobile/android': - rest = 'unknown-linux-android' - if target.startswith('arm'): - rest += 'eabi' + if "-" not in target: + if project == "mobile/android": + rest = "unknown-linux-android" + if target.startswith("arm"): + rest += "eabi" else: - cpu, rest = host.alias.split('-', 1) - target = '-'.join((target, rest)) + cpu, rest = host.alias.split("-", 1) + target = "-".join((target, rest)) try: return split_triplet(target) except ValueError: @@ -893,14 +930,14 @@ target = help_host_target | real_target @depends(host, target) -@checking('whether cross compiling') +@checking("whether cross compiling") def cross_compiling(host, target): return host != target -set_config('CROSS_COMPILE', cross_compiling) -set_define('CROSS_COMPILE', cross_compiling) -add_old_configure_assignment('CROSS_COMPILE', cross_compiling) +set_config("CROSS_COMPILE", cross_compiling) +set_define("CROSS_COMPILE", cross_compiling) +add_old_configure_assignment("CROSS_COMPILE", cross_compiling) @depends(target) @@ -909,43 +946,42 @@ def have_64_bit(target): return True -set_config('HAVE_64BIT_BUILD', have_64_bit) -set_define('HAVE_64BIT_BUILD', have_64_bit) -add_old_configure_assignment('HAVE_64BIT_BUILD', have_64_bit) +set_config("HAVE_64BIT_BUILD", have_64_bit) +set_define("HAVE_64BIT_BUILD", have_64_bit) +add_old_configure_assignment("HAVE_64BIT_BUILD", have_64_bit) @depends(host) def host_os_kernel_major_version(host): - versions = host.raw_os.split('.') - version = ''.join(x for x in versions[0] if x.isdigit()) + versions = host.raw_os.split(".") + version = "".join(x for x in versions[0] if x.isdigit()) return version -set_config('HOST_MAJOR_VERSION', host_os_kernel_major_version) +set_config("HOST_MAJOR_VERSION", host_os_kernel_major_version) # Autoconf needs these set @depends(host) def host_for_sub_configure(host): - return '--host=%s' % host.alias + return "--host=%s" % host.alias @depends(target) def target_for_sub_configure(target): target_alias = target.alias - return '--target=%s' % target_alias + return "--target=%s" % target_alias # These variables are for compatibility with the current moz.builds and # old-configure. Eventually, we'll want to canonicalize better. @depends(target) def target_variables(target): - if target.kernel == 'kFreeBSD': - os_target = 'GNU/kFreeBSD' - os_arch = 'GNU_kFreeBSD' - elif target.kernel == 'Darwin' or (target.kernel == 'Linux' and - target.os == 'GNU'): + if target.kernel == "kFreeBSD": + os_target = "GNU/kFreeBSD" + os_arch = "GNU_kFreeBSD" + elif target.kernel == "Darwin" or (target.kernel == "Linux" and target.os == "GNU"): os_target = target.kernel os_arch = target.kernel else: @@ -955,28 +991,26 @@ def target_variables(target): return namespace( OS_TARGET=os_target, OS_ARCH=os_arch, - INTEL_ARCHITECTURE=target.cpu in ('x86', 'x86_64') or None, + INTEL_ARCHITECTURE=target.cpu in ("x86", "x86_64") or None, ) -set_config('OS_TARGET', target_variables.OS_TARGET) -add_old_configure_assignment('OS_TARGET', - target_variables.OS_TARGET) -set_config('OS_ARCH', target_variables.OS_ARCH) -add_old_configure_assignment('OS_ARCH', - target_variables.OS_ARCH) -set_config('CPU_ARCH', target.cpu) -add_old_configure_assignment('CPU_ARCH', target.cpu) -set_config('INTEL_ARCHITECTURE', target_variables.INTEL_ARCHITECTURE) -set_config('TARGET_CPU', target.raw_cpu) -set_config('TARGET_OS', target.raw_os) -set_config('TARGET_ENDIANNESS', target.endianness) +set_config("OS_TARGET", target_variables.OS_TARGET) +add_old_configure_assignment("OS_TARGET", target_variables.OS_TARGET) +set_config("OS_ARCH", target_variables.OS_ARCH) +add_old_configure_assignment("OS_ARCH", target_variables.OS_ARCH) +set_config("CPU_ARCH", target.cpu) +add_old_configure_assignment("CPU_ARCH", target.cpu) +set_config("INTEL_ARCHITECTURE", target_variables.INTEL_ARCHITECTURE) +set_config("TARGET_CPU", target.raw_cpu) +set_config("TARGET_OS", target.raw_os) +set_config("TARGET_ENDIANNESS", target.endianness) @depends(host) def host_variables(host): - if host.kernel == 'kFreeBSD': - os_arch = 'GNU_kFreeBSD' + if host.kernel == "kFreeBSD": + os_arch = "GNU_kFreeBSD" else: os_arch = host.kernel return namespace( @@ -984,193 +1018,200 @@ def host_variables(host): ) -set_config('HOST_CPU_ARCH', host.cpu) -set_config('HOST_OS_ARCH', host_variables.HOST_OS_ARCH) -add_old_configure_assignment('HOST_OS_ARCH', - host_variables.HOST_OS_ARCH) +set_config("HOST_CPU_ARCH", host.cpu) +set_config("HOST_OS_ARCH", host_variables.HOST_OS_ARCH) +add_old_configure_assignment("HOST_OS_ARCH", host_variables.HOST_OS_ARCH) @depends(target) def target_is_windows(target): - if target.kernel == 'WINNT': + if target.kernel == "WINNT": return True -set_define('_WINDOWS', target_is_windows) -set_define('WIN32', target_is_windows) -set_define('XP_WIN', target_is_windows) +set_define("_WINDOWS", target_is_windows) +set_define("WIN32", target_is_windows) +set_define("XP_WIN", target_is_windows) @depends(target) def target_is_unix(target): - if target.kernel != 'WINNT': + if target.kernel != "WINNT": return True -set_define('XP_UNIX', target_is_unix) +set_define("XP_UNIX", target_is_unix) @depends(target) def target_is_darwin(target): - if target.kernel == 'Darwin': + if target.kernel == "Darwin": return True -set_define('XP_DARWIN', target_is_darwin) +set_define("XP_DARWIN", target_is_darwin) @depends(target) def target_is_osx(target): - if target.kernel == 'Darwin' and target.os == 'OSX': + if target.kernel == "Darwin" and target.os == "OSX": return True -set_define('XP_MACOSX', target_is_osx) +set_define("XP_MACOSX", target_is_osx) @depends(target) def target_is_linux(target): - if target.kernel == 'Linux': + if target.kernel == "Linux": return True -set_define('XP_LINUX', target_is_linux) +set_define("XP_LINUX", target_is_linux) @depends(target) def target_is_android(target): - if target.os == 'Android': + if target.os == "Android": return True -set_define('ANDROID', target_is_android) +set_define("ANDROID", target_is_android) @depends(target) def target_is_openbsd(target): - if target.kernel == 'OpenBSD': + if target.kernel == "OpenBSD": return True -set_define('XP_OPENBSD', target_is_openbsd) +set_define("XP_OPENBSD", target_is_openbsd) + @depends(target) def target_is_netbsd(target): - if target.kernel == 'NetBSD': + if target.kernel == "NetBSD": return True -set_define('XP_NETBSD', target_is_netbsd) +set_define("XP_NETBSD", target_is_netbsd) + @depends(target) def target_is_freebsd(target): - if target.kernel == 'FreeBSD': + if target.kernel == "FreeBSD": return True -set_define('XP_FREEBSD', target_is_freebsd) +set_define("XP_FREEBSD", target_is_freebsd) + @depends(target) def target_is_solaris(target): - if target.kernel == 'SunOS': + if target.kernel == "SunOS": return True -set_define('XP_SOLARIS', target_is_solaris) +set_define("XP_SOLARIS", target_is_solaris) @depends(target) def target_is_sparc(target): - if target.cpu == 'sparc64': + if target.cpu == "sparc64": return True -set_define('SPARC64', target_is_sparc) + +set_define("SPARC64", target_is_sparc) -@depends('--enable-project', check_build_environment, '--help') -@imports(_from='os.path', _import='exists') +@depends("--enable-project", check_build_environment, "--help") +@imports(_from="os.path", _import="exists") def include_project_configure(project, build_env, help): if not project: - die('--enable-project is required.') + die("--enable-project is required.") base_dir = build_env.topsrcdir - path = os.path.join(base_dir, project[0], 'moz.configure') + path = os.path.join(base_dir, project[0], "moz.configure") if not exists(path): - die('Cannot find project %s', project[0]) + die("Cannot find project %s", project[0]) return path @depends(include_project_configure, check_build_environment) def build_project(include_project_configure, build_env): - ret = os.path.dirname(os.path.relpath(include_project_configure, - build_env.topsrcdir)) + ret = os.path.dirname( + os.path.relpath(include_project_configure, build_env.topsrcdir) + ) return ret -set_config('MOZ_BUILD_APP', build_project) -set_define('MOZ_BUILD_APP', build_project) -add_old_configure_assignment('MOZ_BUILD_APP', build_project) +set_config("MOZ_BUILD_APP", build_project) +set_define("MOZ_BUILD_APP", build_project) +add_old_configure_assignment("MOZ_BUILD_APP", build_project) -option(env='MOZILLA_OFFICIAL', - help='Build an official release') +option(env="MOZILLA_OFFICIAL", help="Build an official release") -@depends('MOZILLA_OFFICIAL') +@depends("MOZILLA_OFFICIAL") def mozilla_official(official): if official: return True -set_config('MOZILLA_OFFICIAL', mozilla_official) -set_define('MOZILLA_OFFICIAL', mozilla_official) -add_old_configure_assignment('MOZILLA_OFFICIAL', mozilla_official) +set_config("MOZILLA_OFFICIAL", mozilla_official) +set_define("MOZILLA_OFFICIAL", mozilla_official) +add_old_configure_assignment("MOZILLA_OFFICIAL", mozilla_official) # Allow specifying custom paths to the version files used by the milestone() function below. -option('--with-version-file-path', - nargs=1, - help='Specify a custom path to app version files instead of auto-detecting', - default=None) +option( + "--with-version-file-path", + nargs=1, + help="Specify a custom path to app version files instead of auto-detecting", + default=None, +) + -@depends('--with-version-file-path') +@depends("--with-version-file-path") def version_path(path): return path + # set RELEASE_OR_BETA and NIGHTLY_BUILD variables depending on the cycle we're in # The logic works like this: # - if we have "a1" in GRE_MILESTONE, we're building Nightly (define NIGHTLY_BUILD) # - otherwise, if we have "a" in GRE_MILESTONE, we're building Nightly or Aurora # - otherwise, we're building Release/Beta (define RELEASE_OR_BETA) -@depends(check_build_environment, build_project, version_path, '--help') -@imports(_from='__builtin__', _import='open') -@imports('os') -@imports('re') +@depends(check_build_environment, build_project, version_path, "--help") +@imports(_from="__builtin__", _import="open") +@imports("os") +@imports("re") def milestone(build_env, build_project, version_path, _): versions = [] - paths = ['config/milestone.txt'] - if build_project == 'js': + paths = ["config/milestone.txt"] + if build_project == "js": paths = paths * 3 else: paths += [ - 'browser/config/version.txt', - 'browser/config/version_display.txt', + "browser/config/version.txt", + "browser/config/version_display.txt", ] if version_path: version_path = version_path[0] else: - version_path = os.path.join(build_project, 'config') - for f in ('version.txt', 'version_display.txt'): + version_path = os.path.join(build_project, "config") + for f in ("version.txt", "version_display.txt"): f = os.path.join(version_path, f) if not os.path.exists(os.path.join(build_env.topsrcdir, f)): break paths.append(f) for p in paths: - with open(os.path.join(build_env.topsrcdir, p), 'r') as fh: + with open(os.path.join(build_env.topsrcdir, p), "r") as fh: content = fh.read().splitlines() if not content: - die('Could not find a version number in {}'.format(p)) + die("Could not find a version number in {}".format(p)) versions.append(content[-1]) milestone, firefox_version, firefox_version_display = versions[:3] @@ -1185,26 +1226,28 @@ def milestone(build_env, build_project, version_path, _): is_nightly = is_release_or_beta = is_early_beta_or_earlier = None - if 'a1' in milestone: + if "a1" in milestone: is_nightly = True - elif 'a' not in milestone: + elif "a" not in milestone: is_release_or_beta = True - major_version = milestone.split('.')[0] + major_version = milestone.split(".")[0] m = re.search(r"([ab]\d+)", milestone) - ab_patch = m.group(1) if m else '' + ab_patch = m.group(1) if m else "" - defines = os.path.join(build_env.topsrcdir, 'build', 'defines.sh') - with open(defines, 'r') as fh: + defines = os.path.join(build_env.topsrcdir, "build", "defines.sh") + with open(defines, "r") as fh: for line in fh.read().splitlines(): line = line.strip() - if not line or line.startswith('#'): + if not line or line.startswith("#"): continue - name, _, value = line.partition('=') + name, _, value = line.partition("=") name = name.strip() value = value.strip() - if name != 'EARLY_BETA_OR_EARLIER': - die('Only the EARLY_BETA_OR_EARLIER variable can be set in build/defines.sh') + if name != "EARLY_BETA_OR_EARLIER": + die( + "Only the EARLY_BETA_OR_EARLIER variable can be set in build/defines.sh" + ) if value: is_early_beta_or_earlier = True @@ -1213,39 +1256,41 @@ def milestone(build_env, build_project, version_path, _): # # Only expose major milestone and alpha version in the symbolversion # string; as the name suggests, we use it for symbol versioning on Linux. - return namespace(version=milestone, - uaversion='%s.0' % major_version, - symbolversion='%s%s' % (major_version, ab_patch), - is_nightly=is_nightly, - is_release_or_beta=is_release_or_beta, - is_early_beta_or_earlier=is_early_beta_or_earlier, - app_version=app_version, - app_version_display=app_version_display) - - -set_config('GRE_MILESTONE', milestone.version) -set_config('NIGHTLY_BUILD', milestone.is_nightly) -set_define('NIGHTLY_BUILD', milestone.is_nightly) -set_config('RELEASE_OR_BETA', milestone.is_release_or_beta) -set_define('RELEASE_OR_BETA', milestone.is_release_or_beta) -add_old_configure_assignment('RELEASE_OR_BETA', - milestone.is_release_or_beta) -set_config('EARLY_BETA_OR_EARLIER', milestone.is_early_beta_or_earlier) -set_define('EARLY_BETA_OR_EARLIER', milestone.is_early_beta_or_earlier) -add_old_configure_assignment('EARLY_BETA_OR_EARLIER', - milestone.is_early_beta_or_earlier) -set_define('MOZILLA_VERSION', depends(milestone)(lambda m: '"%s"' % m.version)) -set_config('MOZILLA_VERSION', milestone.version) -set_define('MOZILLA_VERSION_U', milestone.version) -set_define('MOZILLA_UAVERSION', depends(milestone)(lambda m: '"%s"' % m.uaversion)) -set_config('MOZILLA_SYMBOLVERSION', milestone.symbolversion) + return namespace( + version=milestone, + uaversion="%s.0" % major_version, + symbolversion="%s%s" % (major_version, ab_patch), + is_nightly=is_nightly, + is_release_or_beta=is_release_or_beta, + is_early_beta_or_earlier=is_early_beta_or_earlier, + app_version=app_version, + app_version_display=app_version_display, + ) + + +set_config("GRE_MILESTONE", milestone.version) +set_config("NIGHTLY_BUILD", milestone.is_nightly) +set_define("NIGHTLY_BUILD", milestone.is_nightly) +set_config("RELEASE_OR_BETA", milestone.is_release_or_beta) +set_define("RELEASE_OR_BETA", milestone.is_release_or_beta) +add_old_configure_assignment("RELEASE_OR_BETA", milestone.is_release_or_beta) +set_config("EARLY_BETA_OR_EARLIER", milestone.is_early_beta_or_earlier) +set_define("EARLY_BETA_OR_EARLIER", milestone.is_early_beta_or_earlier) +add_old_configure_assignment( + "EARLY_BETA_OR_EARLIER", milestone.is_early_beta_or_earlier +) +set_define("MOZILLA_VERSION", depends(milestone)(lambda m: '"%s"' % m.version)) +set_config("MOZILLA_VERSION", milestone.version) +set_define("MOZILLA_VERSION_U", milestone.version) +set_define("MOZILLA_UAVERSION", depends(milestone)(lambda m: '"%s"' % m.uaversion)) +set_config("MOZILLA_SYMBOLVERSION", milestone.symbolversion) # JS configure still wants to look at these. -add_old_configure_assignment('MOZILLA_VERSION', milestone.version) -add_old_configure_assignment('MOZILLA_SYMBOLVERSION', milestone.symbolversion) +add_old_configure_assignment("MOZILLA_VERSION", milestone.version) +add_old_configure_assignment("MOZILLA_SYMBOLVERSION", milestone.symbolversion) -set_config('MOZ_APP_VERSION', milestone.app_version) -set_config('MOZ_APP_VERSION_DISPLAY', milestone.app_version_display) -add_old_configure_assignment('MOZ_APP_VERSION', milestone.app_version) +set_config("MOZ_APP_VERSION", milestone.app_version) +set_config("MOZ_APP_VERSION_DISPLAY", milestone.app_version_display) +add_old_configure_assignment("MOZ_APP_VERSION", milestone.app_version) # Dummy function for availability in toolkit/moz.configure. Overridden in @@ -1258,35 +1303,40 @@ def fennec_nightly(is_nightly): # The app update channel is 'default' when not supplied. The value is used in # the application's confvars.sh (and is made available to a project specific # moz.configure). -option('--enable-update-channel', - nargs=1, - help='Select application update channel', - default='default') +option( + "--enable-update-channel", + nargs=1, + help="Select application update channel", + default="default", +) -@depends('--enable-update-channel') +@depends("--enable-update-channel") def update_channel(channel): - if not channel or channel[0] == '': - return 'default' + if not channel or channel[0] == "": + return "default" return channel[0].lower() -set_config('MOZ_UPDATE_CHANNEL', update_channel) -set_define('MOZ_UPDATE_CHANNEL', update_channel) -add_old_configure_assignment('MOZ_UPDATE_CHANNEL', update_channel) +set_config("MOZ_UPDATE_CHANNEL", update_channel) +set_define("MOZ_UPDATE_CHANNEL", update_channel) +add_old_configure_assignment("MOZ_UPDATE_CHANNEL", update_channel) -option(env='MOZBUILD_STATE_PATH', nargs=1, - help='Path to a persistent state directory for the build system ' - 'and related tools') +option( + env="MOZBUILD_STATE_PATH", + nargs=1, + help="Path to a persistent state directory for the build system " + "and related tools", +) -@depends('MOZBUILD_STATE_PATH', '--help') -@imports('os') +@depends("MOZBUILD_STATE_PATH", "--help") +@imports("os") def mozbuild_state_path(path, _): if path: return path[0] - return os.path.expanduser(os.path.join('~', '.mozbuild')) + return os.path.expanduser(os.path.join("~", ".mozbuild")) # A template providing a shorthand for setting a variable. The created @@ -1298,14 +1348,12 @@ def mozbuild_state_path(path, _): # add_old_configure_assignment. util.configure would be an appropriate place for # this, but it uses add_old_configure_assignment, which is defined in this file. @template -def project_flag(env=None, set_for_old_configure=False, - set_as_define=False, **kwargs): +def project_flag(env=None, set_for_old_configure=False, set_as_define=False, **kwargs): if not env: - configure_error( - "A project_flag must be passed a variable name to set.") + configure_error("A project_flag must be passed a variable name to set.") - opt = option(env=env, possible_origins=('implied',), **kwargs) + opt = option(env=env, possible_origins=("implied",), **kwargs) @depends(opt.option) def option_implementation(value): @@ -1320,29 +1368,41 @@ def project_flag(env=None, set_for_old_configure=False, if set_for_old_configure: add_old_configure_assignment(env, option_implementation) + # milestone.is_nightly corresponds to cases NIGHTLY_BUILD is set. + @depends(milestone) def enabled_in_nightly(milestone): return milestone.is_nightly + # Branding # ============================================================== -option('--with-app-basename', env='MOZ_APP_BASENAME', nargs=1, - help='Typically stays consistent for multiple branded versions of a ' - 'given application (e.g. Aurora and Firefox both use "Firefox"), but ' - 'may vary for full rebrandings (e.g. Iceweasel). Used for ' - 'application.ini\'s "Name" field, which controls profile location in ' - 'the absence of a "Profile" field (see below), and various system ' - 'integration hooks (Unix remoting, Windows MessageWindow name, etc.') - -@depends('--with-app-basename', target_is_android) +option( + "--with-app-basename", + env="MOZ_APP_BASENAME", + nargs=1, + help="Typically stays consistent for multiple branded versions of a " + 'given application (e.g. Aurora and Firefox both use "Firefox"), but ' + "may vary for full rebrandings (e.g. Iceweasel). Used for " + 'application.ini\'s "Name" field, which controls profile location in ' + 'the absence of a "Profile" field (see below), and various system ' + "integration hooks (Unix remoting, Windows MessageWindow name, etc.", +) + + +@depends("--with-app-basename", target_is_android) def moz_app_basename(value, target_is_android): if value: return value[0] if target_is_android: - return 'Fennec' - return 'Firefox' + return "Fennec" + return "Firefox" + -set_config('MOZ_APP_BASENAME', moz_app_basename, - when=depends(build_project)(lambda p: p != 'js')) +set_config( + "MOZ_APP_BASENAME", + moz_app_basename, + when=depends(build_project)(lambda p: p != "js"), +) diff --git a/build/moz.configure/java.configure b/build/moz.configure/java.configure index b58e9817b3aa17..f45f3b77fb4aed 100644 --- a/build/moz.configure/java.configure +++ b/build/moz.configure/java.configure @@ -7,28 +7,37 @@ # Java detection # ======================================================== -option('--with-java-bin-path', nargs=1, - help='Location of Java binaries (java, jarsigner, keytool)') +option( + "--with-java-bin-path", + nargs=1, + help="Location of Java binaries (java, jarsigner, keytool)", +) -@depends(host, '--with-java-bin-path') -@imports(_from='os', _import='environ') +@depends(host, "--with-java-bin-path") +@imports(_from="os", _import="environ") def java_search_paths(host, path): if path: # Look for javac and jar in the specified path. return path # With no path specified, look for javac and jar in $JAVA_HOME (if set), # JDK registery on Windows, and $PATH. - if 'JAVA_HOME' in environ: - return [os.path.join(environ['JAVA_HOME'], 'bin')] - if host.os == 'WINNT': - for x in get_registry_values(r'HKEY_LOCAL_MACHINE\SOFTWARE\JavaSoft\Java Development Kit\1.8\JavaHome', get_32_and_64_bit=True): - return [os.path.join(x[0], 'bin')] - if host.os == 'OSX': - home = check_cmd_output('/usr/libexec/java_home', '-v', '1.8', onerror=lambda: '').rstrip() + if "JAVA_HOME" in environ: + return [os.path.join(environ["JAVA_HOME"], "bin")] + if host.os == "WINNT": + for x in get_registry_values( + r"HKEY_LOCAL_MACHINE\SOFTWARE\JavaSoft\Java Development Kit\1.8\JavaHome", + get_32_and_64_bit=True, + ): + return [os.path.join(x[0], "bin")] + if host.os == "OSX": + home = check_cmd_output( + "/usr/libexec/java_home", "-v", "1.8", onerror=lambda: "" + ).rstrip() if home: - return [os.path.join(home, 'bin')] - return [environ.get('PATH')] + return [os.path.join(home, "bin")] + return [environ.get("PATH")] + # Finds the given java tool, failing with a custom error message if we can't # find it. @@ -36,27 +45,35 @@ def java_search_paths(host, path): @template def check_java_tool(tool): - check = check_prog(tool.upper(), (tool,), paths=java_search_paths, - allow_missing=True) + check = check_prog( + tool.upper(), (tool,), paths=java_search_paths, allow_missing=True + ) @depends(check) def require_tool(result): if result is None: - die("The program %s was not found. Set $JAVA_HOME to your Java " - "SDK directory or use '--with-java-bin-path={java-bin-dir}'" - % tool) + die( + "The program %s was not found. Set $JAVA_HOME to your Java " + "SDK directory or use '--with-java-bin-path={java-bin-dir}'" % tool + ) return result return require_tool -check_java_tool('java') -check_java_tool('jarsigner') -check_java_tool('keytool') +check_java_tool("java") +check_java_tool("jarsigner") +check_java_tool("keytool") # Java Code Coverage # ======================================================== -option('--enable-java-coverage', env='MOZ_JAVA_CODE_COVERAGE', help='Enable Java code coverage') +option( + "--enable-java-coverage", + env="MOZ_JAVA_CODE_COVERAGE", + help="Enable Java code coverage", +) -set_config('MOZ_JAVA_CODE_COVERAGE', depends('--enable-java-coverage')(lambda v: bool(v))) +set_config( + "MOZ_JAVA_CODE_COVERAGE", depends("--enable-java-coverage")(lambda v: bool(v)) +) diff --git a/build/moz.configure/keyfiles.configure b/build/moz.configure/keyfiles.configure index 5d51cccea28096..242a773aac2144 100644 --- a/build/moz.configure/keyfiles.configure +++ b/build/moz.configure/keyfiles.configure @@ -7,17 +7,18 @@ @template def keyfile(desc, default=None, help=None, callback=lambda x: x): - help = help or ('Use the secret key contained in the given keyfile ' - 'for %s requests' % desc) - name = desc.lower().replace(' ', '-') - no_key = callback('no-%s-key' % name) + help = help or ( + "Use the secret key contained in the given keyfile " "for %s requests" % desc + ) + name = desc.lower().replace(" ", "-") + no_key = callback("no-%s-key" % name) - option('--with-%s-keyfile' % name, nargs=1, default=default, help=help) + option("--with-%s-keyfile" % name, nargs=1, default=default, help=help) - @depends('--with-%s-keyfile' % name) - @checking('for the %s key' % desc, lambda x: x and x is not no_key) - @imports(_from='__builtin__', _import='open') - @imports(_from='__builtin__', _import='IOError') + @depends("--with-%s-keyfile" % name) + @checking("for the %s key" % desc, lambda x: x and x is not no_key) + @imports(_from="__builtin__", _import="open") + @imports(_from="__builtin__", _import="IOError") def keyfile(value): if value: try: @@ -36,29 +37,32 @@ def keyfile(desc, default=None, help=None, callback=lambda x: x): @template def simple_keyfile(desc, default=None): value = keyfile(desc, default=default) - set_config('MOZ_%s_KEY' % desc.upper().replace(' ', '_'), value) + set_config("MOZ_%s_KEY" % desc.upper().replace(" ", "_"), value) @template def id_and_secret_keyfile(desc, default=None): def id_and_secret(value): - if value.startswith('no-') and value.endswith('-key'): - id = value[:-3] + 'clientid' + if value.startswith("no-") and value.endswith("-key"): + id = value[:-3] + "clientid" secret = value - elif ' ' in value: - id, secret = value.split(' ', 1) + elif " " in value: + id, secret = value.split(" ", 1) else: - raise FatalCheckError('%s key file has an invalid format.' % desc) + raise FatalCheckError("%s key file has an invalid format." % desc) return namespace( id=id, secret=secret, ) - content = keyfile(desc, help='Use the client id and secret key contained ' - 'in the given keyfile for %s requests' % desc, - default=default, - callback=id_and_secret) + content = keyfile( + desc, + help="Use the client id and secret key contained " + "in the given keyfile for %s requests" % desc, + default=default, + callback=id_and_secret, + ) - name = desc.upper().replace(' ', '_') - set_config('MOZ_%s_CLIENTID' % name, content.id) - set_config('MOZ_%s_KEY' % name, content.secret) + name = desc.upper().replace(" ", "_") + set_config("MOZ_%s_CLIENTID" % name, content.id) + set_config("MOZ_%s_KEY" % name, content.secret) diff --git a/build/moz.configure/lto-pgo.configure b/build/moz.configure/lto-pgo.configure index c4436d24db3612..2f32b1158807fd 100644 --- a/build/moz.configure/lto-pgo.configure +++ b/build/moz.configure/lto-pgo.configure @@ -6,121 +6,134 @@ # PGO # ============================================================== -llvm_profdata = check_prog('LLVM_PROFDATA', ['llvm-profdata'], - allow_missing=True, - paths=toolchain_search_path) - -option('--enable-profile-generate', - env='MOZ_PROFILE_GENERATE', - nargs='?', - choices=('cross',), - help='Build a PGO instrumented binary') - -imply_option('MOZ_PGO', - depends_if('--enable-profile-generate')(lambda _: True)) - -set_config('MOZ_PROFILE_GENERATE', - depends_if('--enable-profile-generate')(lambda _: True)) - -set_define('MOZ_PROFILE_GENERATE', - depends_if('--enable-profile-generate')(lambda _: True)) - -add_old_configure_assignment('MOZ_PROFILE_GENERATE', 1, - when='--enable-profile-generate') - -option('--enable-profile-use', - env='MOZ_PROFILE_USE', - nargs='?', - choices=('cross',), - help='Use a generated profile during the build') - -option('--with-pgo-profile-path', - help='Path to the directory with unmerged profile data to use during the build', - nargs=1) - -imply_option('MOZ_PGO', - depends_if('--enable-profile-use')(lambda _: True)) - -set_config('MOZ_PROFILE_USE', - depends_if('--enable-profile-use')(lambda _: True)) - - -@depends('--with-pgo-profile-path', '--enable-profile-use', llvm_profdata, check_build_environment) -@imports('os') +llvm_profdata = check_prog( + "LLVM_PROFDATA", ["llvm-profdata"], allow_missing=True, paths=toolchain_search_path +) + +option( + "--enable-profile-generate", + env="MOZ_PROFILE_GENERATE", + nargs="?", + choices=("cross",), + help="Build a PGO instrumented binary", +) + +imply_option("MOZ_PGO", depends_if("--enable-profile-generate")(lambda _: True)) + +set_config( + "MOZ_PROFILE_GENERATE", depends_if("--enable-profile-generate")(lambda _: True) +) + +set_define( + "MOZ_PROFILE_GENERATE", depends_if("--enable-profile-generate")(lambda _: True) +) + +add_old_configure_assignment( + "MOZ_PROFILE_GENERATE", 1, when="--enable-profile-generate" +) + +option( + "--enable-profile-use", + env="MOZ_PROFILE_USE", + nargs="?", + choices=("cross",), + help="Use a generated profile during the build", +) + +option( + "--with-pgo-profile-path", + help="Path to the directory with unmerged profile data to use during the build", + nargs=1, +) + +imply_option("MOZ_PGO", depends_if("--enable-profile-use")(lambda _: True)) + +set_config("MOZ_PROFILE_USE", depends_if("--enable-profile-use")(lambda _: True)) + + +@depends( + "--with-pgo-profile-path", + "--enable-profile-use", + llvm_profdata, + check_build_environment, +) +@imports("os") def pgo_profile_path(path, pgo_use, profdata, build_env): topobjdir = build_env.topobjdir - if topobjdir.endswith('/js/src'): + if topobjdir.endswith("/js/src"): topobjdir = topobjdir[:-7] if not path: - return os.path.join(topobjdir, 'instrumented', 'merged.profdata') + return os.path.join(topobjdir, "instrumented", "merged.profdata") if path and not pgo_use: - die('Pass --enable-profile-use to use --with-pgo-profile-path.') + die("Pass --enable-profile-use to use --with-pgo-profile-path.") if path and not profdata: - die('LLVM_PROFDATA must be set to process the pgo profile.') + die("LLVM_PROFDATA must be set to process the pgo profile.") if not os.path.isfile(path[0]): - die('Argument to --with-pgo-profile-path must be a file.') + die("Argument to --with-pgo-profile-path must be a file.") if not os.path.isabs(path[0]): - die('Argument to --with-pgo-profile-path must be an absolute path.') + die("Argument to --with-pgo-profile-path must be an absolute path.") return path[0] -set_config('PGO_PROFILE_PATH', pgo_profile_path) +set_config("PGO_PROFILE_PATH", pgo_profile_path) + @depends(c_compiler, pgo_profile_path, target_is_windows) -@imports('multiprocessing') -@imports(_from='__builtin__', _import='min') +@imports("multiprocessing") +@imports(_from="__builtin__", _import="min") def pgo_flags(compiler, profdata, target_is_windows): - if compiler.type == 'gcc': + if compiler.type == "gcc": return namespace( - gen_cflags=['-fprofile-generate'], - gen_ldflags=['-fprofile-generate'], - use_cflags=['-fprofile-use', '-fprofile-correction', - '-Wcoverage-mismatch'], - use_ldflags=['-fprofile-use'], + gen_cflags=["-fprofile-generate"], + gen_ldflags=["-fprofile-generate"], + use_cflags=["-fprofile-use", "-fprofile-correction", "-Wcoverage-mismatch"], + use_ldflags=["-fprofile-use"], ) - if compiler.type in ('clang-cl', 'clang'): - prefix = '' - if compiler.type == 'clang-cl': - prefix = '/clang:' + if compiler.type in ("clang-cl", "clang"): + prefix = "" + if compiler.type == "clang-cl": + prefix = "/clang:" gen_ldflags = None else: - gen_ldflags = ['-fprofile-generate'] + gen_ldflags = ["-fprofile-generate"] - gen_cflags = [prefix + '-fprofile-generate'] + gen_cflags = [prefix + "-fprofile-generate"] if target_is_windows: # native llvm-profdata.exe on Windows can't read profile data # if name compression is enabled (which cross-compiling enables # by default) - gen_cflags += ['-mllvm', '-enable-name-compression=false'] + gen_cflags += ["-mllvm", "-enable-name-compression=false"] return namespace( gen_cflags=gen_cflags, gen_ldflags=gen_ldflags, - use_cflags=[prefix + '-fprofile-use=%s' % profdata, - # Some error messages about mismatched profile data - # come in via -Wbackend-plugin, so disable those too. - '-Wno-error=backend-plugin'], + use_cflags=[ + prefix + "-fprofile-use=%s" % profdata, + # Some error messages about mismatched profile data + # come in via -Wbackend-plugin, so disable those too. + "-Wno-error=backend-plugin", + ], use_ldflags=[], ) -set_config('PROFILE_GEN_CFLAGS', pgo_flags.gen_cflags) -set_config('PROFILE_GEN_LDFLAGS', pgo_flags.gen_ldflags) -set_config('PROFILE_USE_CFLAGS', pgo_flags.use_cflags) -set_config('PROFILE_USE_LDFLAGS', pgo_flags.use_ldflags) +set_config("PROFILE_GEN_CFLAGS", pgo_flags.gen_cflags) +set_config("PROFILE_GEN_LDFLAGS", pgo_flags.gen_ldflags) +set_config("PROFILE_USE_CFLAGS", pgo_flags.use_cflags) +set_config("PROFILE_USE_LDFLAGS", pgo_flags.use_ldflags) -option('--with-pgo-jarlog', - help='Use the provided jarlog file when packaging during a profile-use ' - 'build', - nargs=1) +option( + "--with-pgo-jarlog", + help="Use the provided jarlog file when packaging during a profile-use " "build", + nargs=1, +) -set_config('PGO_JARLOG_PATH', depends_if('--with-pgo-jarlog')(lambda p: p)) +set_config("PGO_JARLOG_PATH", depends_if("--with-pgo-jarlog")(lambda p: p)) -@depends('MOZ_PGO', '--enable-profile-use', '--enable-profile-generate', - c_compiler) +@depends("MOZ_PGO", "--enable-profile-use", "--enable-profile-generate", c_compiler) def moz_pgo_rust(pgo, profile_use, profile_generate, c_compiler): if not pgo: return @@ -130,38 +143,51 @@ def moz_pgo_rust(pgo, profile_use, profile_generate, c_compiler): return if profile_use and profile_generate: - die('Cannot build with --enable-profile-use and --enable-profile-generate.') + die("Cannot build with --enable-profile-use and --enable-profile-generate.") - want_cross = (len(profile_use) and profile_use[0] == 'cross') \ - or (len(profile_generate) and profile_generate[0] == 'cross') + want_cross = (len(profile_use) and profile_use[0] == "cross") or ( + len(profile_generate) and profile_generate[0] == "cross" + ) if not want_cross: return - if c_compiler.type == 'gcc': - die('Cannot use cross-language PGO with GCC.') + if c_compiler.type == "gcc": + die("Cannot use cross-language PGO with GCC.") return True -set_config('MOZ_PGO_RUST', moz_pgo_rust) + +set_config("MOZ_PGO_RUST", moz_pgo_rust) # LTO # ============================================================== -option('--enable-lto', - env='MOZ_LTO', - nargs='?', - choices=('full', 'thin', 'cross'), - help='Enable LTO') - -option(env='MOZ_LD64_KNOWN_GOOD', - nargs=1, - help='Indicate that ld64 is free of symbol aliasing bugs.') - -imply_option('MOZ_LD64_KNOWN_GOOD', depends_if('MOZ_AUTOMATION')(lambda _: True)) - -@depends('--enable-lto', c_compiler, 'MOZ_LD64_KNOWN_GOOD', target, '--enable-profile-generate') -@imports('multiprocessing') +option( + "--enable-lto", + env="MOZ_LTO", + nargs="?", + choices=("full", "thin", "cross"), + help="Enable LTO", +) + +option( + env="MOZ_LD64_KNOWN_GOOD", + nargs=1, + help="Indicate that ld64 is free of symbol aliasing bugs.", +) + +imply_option("MOZ_LD64_KNOWN_GOOD", depends_if("MOZ_AUTOMATION")(lambda _: True)) + + +@depends( + "--enable-lto", + c_compiler, + "MOZ_LD64_KNOWN_GOOD", + target, + "--enable-profile-generate", +) +@imports("multiprocessing") def lto(value, c_compiler, ld64_known_good, target, instrumented_build): cflags = [] ldflags = [] @@ -170,41 +196,48 @@ def lto(value, c_compiler, ld64_known_good, target, instrumented_build): if value: if instrumented_build: - log.warning('Disabling LTO because --enable-profile-generate is specified') + log.warning("Disabling LTO because --enable-profile-generate is specified") return enabled = True # `cross` implies `thin`, but with Rust code participating in LTO # as well. Make that a little more explicit. - if len(value) and value[0].lower() == 'cross': - if c_compiler.type == 'gcc': - die('Cross-language LTO is not supported with GCC.') + if len(value) and value[0].lower() == "cross": + if c_compiler.type == "gcc": + die("Cross-language LTO is not supported with GCC.") rust_lto = True - value = ['thin'] - - if target.kernel == 'Darwin' and target.os == 'OSX' \ - and len(value) and value[0].lower() == 'cross' and not ld64_known_good: - die('The Mac linker is known to have a bug that affects cross-language ' - 'LTO. If you know that your linker is free from this bug, please ' - 'set the environment variable `MOZ_LD64_KNOWN_GOOD=1` and re-run ' - 'configure.') - - if c_compiler.type == 'clang': - if len(value) and value[0].lower() == 'full': + value = ["thin"] + + if ( + target.kernel == "Darwin" + and target.os == "OSX" + and len(value) + and value[0].lower() == "cross" + and not ld64_known_good + ): + die( + "The Mac linker is known to have a bug that affects cross-language " + "LTO. If you know that your linker is free from this bug, please " + "set the environment variable `MOZ_LD64_KNOWN_GOOD=1` and re-run " + "configure." + ) + + if c_compiler.type == "clang": + if len(value) and value[0].lower() == "full": cflags.append("-flto") ldflags.append("-flto") else: cflags.append("-flto=thin") ldflags.append("-flto=thin") - elif c_compiler.type == 'clang-cl': - if len(value) and value[0].lower() == 'full': + elif c_compiler.type == "clang-cl": + if len(value) and value[0].lower() == "full": cflags.append("-flto") else: cflags.append("-flto=thin") # With clang-cl, -flto can only be used with -c or -fuse-ld=lld. # AC_TRY_LINKs during configure don't have -c, so pass -fuse-ld=lld. - cflags.append("-fuse-ld=lld"); + cflags.append("-fuse-ld=lld") # Explicitly set the CPU to optimize for so the linker doesn't # choose a poor default. Rust compilation by default uses the @@ -220,19 +253,19 @@ def lto(value, c_compiler, ld64_known_good, target, instrumented_build): # specifically schedule code for the pentium4's deep pipeline, so # that probably contributes to it being an OK default for our # purposes. - if target.cpu == 'x86': - ldflags.append('-mllvm:-mcpu=pentium4') + if target.cpu == "x86": + ldflags.append("-mllvm:-mcpu=pentium4") # This is also the CPU that Rust uses. The LLVM source code # recommends this as the "generic 64-bit specific x86 processor model": # # https://github.com/llvm/llvm-project/blob/e7694f34ab6a12b8bb480cbfcb396d0a64fe965f/llvm/lib/Target/X86/X86.td#L1165-L1187 - if target.cpu == 'x86_64': - ldflags.append('-mllvm:-mcpu=x86-64') + if target.cpu == "x86_64": + ldflags.append("-mllvm:-mcpu=x86-64") # We do not need special flags for arm64. Hooray for fixed-length # instruction sets. else: num_cores = multiprocessing.cpu_count() - if len(value) and value[0].lower() == 'full': + if len(value) and value[0].lower() == "full": cflags.append("-flto") else: cflags.append("-flto=thin") @@ -244,12 +277,12 @@ def lto(value, c_compiler, ld64_known_good, target, instrumented_build): # Tell LTO not to inline functions above a certain size, to mitigate # binary size growth while still getting good performance. # (For hot functions, PGO will put a multiplier on this limit.) - if target.os == 'WINNT': - ldflags.append('-mllvm:-import-instr-limit=10'); - elif target.os == 'OSX': - ldflags.append('-Wl,-mllvm,-import-instr-limit=10'); - elif c_compiler.type == 'clang': - ldflags.append('-Wl,-plugin-opt=-import-instr-limit=10'); + if target.os == "WINNT": + ldflags.append("-mllvm:-import-instr-limit=10") + elif target.os == "OSX": + ldflags.append("-Wl,-mllvm,-import-instr-limit=10") + elif c_compiler.type == "clang": + ldflags.append("-Wl,-plugin-opt=-import-instr-limit=10") return namespace( enabled=enabled, @@ -259,11 +292,11 @@ def lto(value, c_compiler, ld64_known_good, target, instrumented_build): ) -add_old_configure_assignment('MOZ_LTO', lto.enabled) -set_config('MOZ_LTO', lto.enabled) -set_define('MOZ_LTO', lto.enabled) -set_config('MOZ_LTO_CFLAGS', lto.cflags) -set_config('MOZ_LTO_LDFLAGS', lto.ldflags) -set_config('MOZ_LTO_RUST_CROSS', lto.rust_lto) -add_old_configure_assignment('MOZ_LTO_CFLAGS', lto.cflags) -add_old_configure_assignment('MOZ_LTO_LDFLAGS', lto.ldflags) +add_old_configure_assignment("MOZ_LTO", lto.enabled) +set_config("MOZ_LTO", lto.enabled) +set_define("MOZ_LTO", lto.enabled) +set_config("MOZ_LTO_CFLAGS", lto.cflags) +set_config("MOZ_LTO_LDFLAGS", lto.ldflags) +set_config("MOZ_LTO_RUST_CROSS", lto.rust_lto) +add_old_configure_assignment("MOZ_LTO_CFLAGS", lto.cflags) +add_old_configure_assignment("MOZ_LTO_LDFLAGS", lto.ldflags) diff --git a/build/moz.configure/memory.configure b/build/moz.configure/memory.configure index b0f8548fb78b86..855706b457daca 100644 --- a/build/moz.configure/memory.configure +++ b/build/moz.configure/memory.configure @@ -7,55 +7,68 @@ @depends(target) def jemalloc_default(target): - return target.kernel in ('Darwin', 'Linux', 'WINNT') + return target.kernel in ("Darwin", "Linux", "WINNT") -option('--enable-jemalloc', env='MOZ_MEMORY', default=jemalloc_default, - help='{Replace|Do not replace} memory allocator with jemalloc') +option( + "--enable-jemalloc", + env="MOZ_MEMORY", + default=jemalloc_default, + help="{Replace|Do not replace} memory allocator with jemalloc", +) -set_config('MOZ_MEMORY', True, when='--enable-jemalloc') -set_define('MOZ_MEMORY', True, when='--enable-jemalloc') -add_old_configure_assignment('MOZ_MEMORY', True, when='--enable-jemalloc') +set_config("MOZ_MEMORY", True, when="--enable-jemalloc") +set_define("MOZ_MEMORY", True, when="--enable-jemalloc") +add_old_configure_assignment("MOZ_MEMORY", True, when="--enable-jemalloc") @depends(milestone, build_project) def replace_malloc_default(milestone, build_project): - if build_project == 'memory': + if build_project == "memory": return True - if milestone.is_early_beta_or_earlier and build_project != 'js': + if milestone.is_early_beta_or_earlier and build_project != "js": return True -option('--enable-replace-malloc', default=replace_malloc_default, - when='--enable-jemalloc', - help='{Enable|Disable} ability to dynamically replace the malloc implementation') +option( + "--enable-replace-malloc", + default=replace_malloc_default, + when="--enable-jemalloc", + help="{Enable|Disable} ability to dynamically replace the malloc implementation", +) -set_config('MOZ_REPLACE_MALLOC', True, when='--enable-replace-malloc') -set_define('MOZ_REPLACE_MALLOC', True, when='--enable-replace-malloc') +set_config("MOZ_REPLACE_MALLOC", True, when="--enable-replace-malloc") +set_define("MOZ_REPLACE_MALLOC", True, when="--enable-replace-malloc") -@depends(build_project, when='--enable-replace-malloc') +@depends(build_project, when="--enable-replace-malloc") def replace_malloc_static(build_project): # Default to statically linking replace-malloc libraries that can be # statically linked, except when building with --enable-project=memory. - if build_project != 'memory': + if build_project != "memory": return True -set_config('MOZ_REPLACE_MALLOC_STATIC', replace_malloc_static) +set_config("MOZ_REPLACE_MALLOC_STATIC", replace_malloc_static) # PHC (Probabilistic Heap Checker) # ============================================================== # In general, it only makes sense for PHC to run on the platforms that have a # crash reporter. -@depends(milestone, target, replace_malloc_default, '--enable-replace-malloc', - when='--enable-jemalloc') +@depends( + milestone, + target, + replace_malloc_default, + "--enable-replace-malloc", + when="--enable-jemalloc", +) def phc_default(milestone, target, replace_malloc_default, replace_malloc): - if not replace_malloc_default or \ - (replace_malloc.origin != 'default' and not replace_malloc): + if not replace_malloc_default or ( + replace_malloc.origin != "default" and not replace_malloc + ): return False # Nightly or early beta only because PHC has a non-negligible performance cost. if not milestone.is_early_beta_or_earlier: @@ -65,18 +78,21 @@ def phc_default(milestone, target, replace_malloc_default, replace_malloc): # # XXX: PHC is implemented but not yet enabled on Mac. Bug 1576515 is about # enabling it on Mac, but it is blocked by bug 1035892. - return (target.os == 'GNU' and target.kernel == 'Linux' and - target.bitness == 64) or \ - (target.kernel == 'WINNT' and target.bitness == 64) + return ( + target.os == "GNU" and target.kernel == "Linux" and target.bitness == 64 + ) or (target.kernel == "WINNT" and target.bitness == 64) -option('--enable-phc', env='MOZ_PHC', default=phc_default, - when='--enable-jemalloc', - help='{Enable|Disable} PHC (Probabilistic Memory Checker). ' - 'Also enables replace-malloc and frame pointers') -imply_option('--enable-replace-malloc', True, when='--enable-phc') -imply_option('--enable-frame-pointers', True, when='--enable-phc') +option( + "--enable-phc", + env="MOZ_PHC", + default=phc_default, + when="--enable-jemalloc", + help="{Enable|Disable} PHC (Probabilistic Memory Checker). " + "Also enables replace-malloc and frame pointers", +) +imply_option("--enable-replace-malloc", True, when="--enable-phc") +imply_option("--enable-frame-pointers", True, when="--enable-phc") -set_config('MOZ_PHC', True, when='--enable-phc') - +set_config("MOZ_PHC", True, when="--enable-phc") diff --git a/build/moz.configure/node.configure b/build/moz.configure/node.configure index 7e87a24424d38e..1c8d55fa070f9e 100644 --- a/build/moz.configure/node.configure +++ b/build/moz.configure/node.configure @@ -4,45 +4,49 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -option('--disable-nodejs', - help='Require Node.js to build') -option(env='NODEJS', nargs=1, help='Path to nodejs') +option("--disable-nodejs", help="Require Node.js to build") +option(env="NODEJS", nargs=1, help="Path to nodejs") -@depends('--enable-nodejs', 'NODEJS') -@checking('for nodejs', - callback=lambda x: '%s (%s)' % (x.path, x.str_version) if x else 'no') -@imports(_from='mozbuild.nodeutil', _import='find_node_executable') -@imports(_from='mozbuild.nodeutil', _import='NODE_MIN_VERSION') +@depends("--enable-nodejs", "NODEJS") +@checking( + "for nodejs", callback=lambda x: "%s (%s)" % (x.path, x.str_version) if x else "no" +) +@imports(_from="mozbuild.nodeutil", _import="find_node_executable") +@imports(_from="mozbuild.nodeutil", _import="NODE_MIN_VERSION") def nodejs(require, env_node): node_exe = env_node[0] if env_node else None nodejs, version = find_node_executable(node_exe) - MAYBE_FILE_A_BUG = ''' + MAYBE_FILE_A_BUG = """ Executing `mach bootstrap --no-system-changes` should install a compatible version in ~/.mozbuild on most platforms. If you believe this is a bug, is a good way to file. More details: - ''' + """ if not nodejs: - msg = ('could not find Node.js executable later than %s; ensure ' - '`node` or `nodejs` is in PATH or set NODEJS in environment ' - 'to point to an executable.%s' % (NODE_MIN_VERSION, MAYBE_FILE_A_BUG) - ) + msg = ( + "could not find Node.js executable later than %s; ensure " + "`node` or `nodejs` is in PATH or set NODEJS in environment " + "to point to an executable.%s" % (NODE_MIN_VERSION, MAYBE_FILE_A_BUG) + ) if require: raise FatalCheckError(msg) else: log.warning(msg) - log.warning('(This will become an error in the near future.)') + log.warning("(This will become an error in the near future.)") return if not version: - msg = 'NODEJS must point to node %s or newer; found node location: %s. %s' % ( - NODE_MIN_VERSION, nodejs, MAYBE_FILE_A_BUG) + msg = "NODEJS must point to node %s or newer; found node location: %s. %s" % ( + NODE_MIN_VERSION, + nodejs, + MAYBE_FILE_A_BUG, + ) if require: raise FatalCheckError(msg) @@ -53,8 +57,8 @@ def nodejs(require, env_node): return namespace( path=nodejs, version=version, - str_version='.'.join(str(v) for v in version), + str_version=".".join(str(v) for v in version), ) -set_config('NODEJS', depends_if(nodejs)(lambda p: p.path)) +set_config("NODEJS", depends_if(nodejs)(lambda p: p.path)) diff --git a/build/moz.configure/nspr.configure b/build/moz.configure/nspr.configure index 0d6f73fff89d40..2b21a66f03b269 100644 --- a/build/moz.configure/nspr.configure +++ b/build/moz.configure/nspr.configure @@ -6,54 +6,66 @@ # Top-level configure defaults to building NSPR from source. Standalone JS # doesn't. -option('--enable-nspr-build', when=js_standalone, - help='{Build|Do not build} NSPR from source tree') +option( + "--enable-nspr-build", + when=js_standalone, + help="{Build|Do not build} NSPR from source tree", +) -@depends('--enable-nspr-build', when=js_standalone) + +@depends("--enable-nspr-build", when=js_standalone) def enable_nspr_build(enable): if enable: return enable -option('--with-system-nspr', help='Use system NSPR') -@depends(enable_nspr_build, '--with-system-nspr', js_standalone) +option("--with-system-nspr", help="Use system NSPR") + + +@depends(enable_nspr_build, "--with-system-nspr", js_standalone) def build_nspr(nspr_build, system_nspr, js_standalone): - if nspr_build is not None and nspr_build.origin != 'default': + if nspr_build is not None and nspr_build.origin != "default": if nspr_build and system_nspr: - die('Cannot use both --enable-nspr-build and --with-system-nspr') + die("Cannot use both --enable-nspr-build and --with-system-nspr") if js_standalone: return nspr_build return not system_nspr -set_config('MOZ_BUILD_NSPR', True, when=build_nspr) -set_config('MOZ_SYSTEM_NSPR', True, when='--with-system-nspr') -@depends(build_nspr, '--with-system-nspr', js_standalone) +set_config("MOZ_BUILD_NSPR", True, when=build_nspr) +set_config("MOZ_SYSTEM_NSPR", True, when="--with-system-nspr") + + +@depends(build_nspr, "--with-system-nspr", js_standalone) def js_without_nspr(build_nspr, system_nspr, js_standalone): if js_standalone: return not build_nspr and not system_nspr -set_config('JS_WITHOUT_NSPR', True, when=js_without_nspr) -set_define('JS_WITHOUT_NSPR', True, when=js_without_nspr) + +set_config("JS_WITHOUT_NSPR", True, when=js_without_nspr) +set_define("JS_WITHOUT_NSPR", True, when=js_without_nspr) + @depends(js_standalone) def nspr_minver(js_standalone): if js_standalone: - return 'nspr >= 4.10' - return 'nspr >= 4.26' + return "nspr >= 4.10" + return "nspr >= 4.26" + + +nspr_pkg = pkg_check_modules("NSPR", nspr_minver, when="--with-system-nspr") -nspr_pkg = pkg_check_modules('NSPR', nspr_minver, when='--with-system-nspr') @depends_if(nspr_pkg) def nspr_pkg(nspr_pkg): def extract(prefix, list): for item in list: if item.startswith(prefix): - return item[len(prefix):] - return '' + return item[len(prefix) :] + return "" - include_dir = extract('-I', nspr_pkg.cflags) - lib_dir = extract('-L', nspr_pkg.libs) + include_dir = extract("-I", nspr_pkg.cflags) + lib_dir = extract("-L", nspr_pkg.libs) return namespace( cflags=nspr_pkg.cflags, include_dir=include_dir, @@ -61,42 +73,45 @@ def nspr_pkg(nspr_pkg): lib_dir=lib_dir, ) -@depends('--with-system-nspr', nspr_minver) + +@depends("--with-system-nspr", nspr_minver) def pkgconf_requires_private(system_nspr, nspr_minver): if not system_nspr: - return '' - return 'Requires.private: %s' % nspr_minver + return "" + return "Requires.private: %s" % nspr_minver + -set_config('PKGCONF_REQUIRES_PRIVATE', pkgconf_requires_private) +set_config("PKGCONF_REQUIRES_PRIVATE", pkgconf_requires_private) # pkg_check_modules takes care of NSPR_CFLAGS and NSPR_LIBS when using --with-system-nspr. @depends(check_build_environment, c_compiler, fold_libs, when=build_nspr) def nspr_config(build_env, c_compiler, fold_libs): - libs = ['nspr4', 'plc4', 'plds4'] - if c_compiler.type == 'clang-cl': - lib_dir = os.path.join(build_env.dist, 'lib') - libs = [os.path.join(lib_dir, '%s.lib' % lib) for lib in libs] + libs = ["nspr4", "plc4", "plds4"] + if c_compiler.type == "clang-cl": + lib_dir = os.path.join(build_env.dist, "lib") + libs = [os.path.join(lib_dir, "%s.lib" % lib) for lib in libs] else: - lib_dir = os.path.join(build_env.dist, 'lib' if fold_libs else 'bin') - libs = ['-L%s' % lib_dir] + ['-l%s' % lib for lib in libs] + lib_dir = os.path.join(build_env.dist, "lib" if fold_libs else "bin") + libs = ["-L%s" % lib_dir] + ["-l%s" % lib for lib in libs] - include_dir = os.path.join(build_env.dist, 'include', 'nspr') + include_dir = os.path.join(build_env.dist, "include", "nspr") return namespace( - cflags=['-I%s' % include_dir], + cflags=["-I%s" % include_dir], include_dir=include_dir, libs=libs, lib_dir=lib_dir, ) -set_config('NSPR_CFLAGS', nspr_config.cflags, when=nspr_config) -set_config('NSPR_LIBS', nspr_config.libs, when=nspr_config) -set_config('NSPR_INCLUDE_DIR', nspr_config.include_dir, when=nspr_config) -set_config('NSPR_LIB_DIR', nspr_config.lib_dir, when=nspr_config) -set_config('NSPR_INCLUDE_DIR', nspr_pkg.include_dir, when=nspr_pkg) -set_config('NSPR_LIB_DIR', nspr_pkg.lib_dir, when=nspr_pkg) +set_config("NSPR_CFLAGS", nspr_config.cflags, when=nspr_config) +set_config("NSPR_LIBS", nspr_config.libs, when=nspr_config) + +set_config("NSPR_INCLUDE_DIR", nspr_config.include_dir, when=nspr_config) +set_config("NSPR_LIB_DIR", nspr_config.lib_dir, when=nspr_config) +set_config("NSPR_INCLUDE_DIR", nspr_pkg.include_dir, when=nspr_pkg) +set_config("NSPR_LIB_DIR", nspr_pkg.lib_dir, when=nspr_pkg) -add_old_configure_assignment('NSPR_CFLAGS', nspr_config.cflags, when=nspr_config) -add_old_configure_assignment('NSPR_LIBS', nspr_config.libs, when=nspr_config) -add_old_configure_assignment('NSPR_CFLAGS', nspr_pkg.cflags, when=nspr_pkg) -add_old_configure_assignment('NSPR_LIBS', nspr_pkg.libs, when=nspr_pkg) +add_old_configure_assignment("NSPR_CFLAGS", nspr_config.cflags, when=nspr_config) +add_old_configure_assignment("NSPR_LIBS", nspr_config.libs, when=nspr_config) +add_old_configure_assignment("NSPR_CFLAGS", nspr_pkg.cflags, when=nspr_pkg) +add_old_configure_assignment("NSPR_LIBS", nspr_pkg.libs, when=nspr_pkg) diff --git a/build/moz.configure/nss.configure b/build/moz.configure/nss.configure index b239f0532d3e1a..f3ac215885d432 100644 --- a/build/moz.configure/nss.configure +++ b/build/moz.configure/nss.configure @@ -4,18 +4,20 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -option('--with-system-nss', help='Use system NSS') +option("--with-system-nss", help="Use system NSS") -imply_option('--with-system-nspr', True, when='--with-system-nss') +imply_option("--with-system-nspr", True, when="--with-system-nss") -nss_pkg = pkg_check_modules('NSS', 'nss >= 3.59', when='--with-system-nss', config=False) +nss_pkg = pkg_check_modules( + "NSS", "nss >= 3.59", when="--with-system-nss", config=False +) -set_config('MOZ_SYSTEM_NSS', True, when='--with-system-nss') +set_config("MOZ_SYSTEM_NSS", True, when="--with-system-nss") @depends(nss_pkg, check_build_environment) def nss_config(nss_pkg, build_env): - cflags = ['-I%s' % os.path.join(build_env.dist, 'include', 'nss')] + cflags = ["-I%s" % os.path.join(build_env.dist, "include", "nss")] libs = None if nss_pkg: cflags = list(nss_pkg.cflags) + cflags @@ -23,6 +25,6 @@ def nss_config(nss_pkg, build_env): return namespace(cflags=cflags, libs=libs) -set_config('NSS_CFLAGS', nss_config.cflags) -set_config('NSS_LIBS', nss_config.libs) -add_old_configure_assignment('NSS_CFLAGS', nss_config.cflags) +set_config("NSS_CFLAGS", nss_config.cflags) +set_config("NSS_LIBS", nss_config.libs) +add_old_configure_assignment("NSS_CFLAGS", nss_config.cflags) diff --git a/build/moz.configure/old.configure b/build/moz.configure/old.configure index 8c807c0794fab7..4c4987e1d4e9ff 100644 --- a/build/moz.configure/old.configure +++ b/build/moz.configure/old.configure @@ -5,113 +5,115 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -m4 = check_prog('M4', ('m4',)) +m4 = check_prog("M4", ("m4",)) @depends(mozconfig) def prepare_mozconfig(mozconfig): - if mozconfig['path']: + if mozconfig["path"]: items = {} - for key, value in mozconfig['vars']['added'].items(): - items[key] = (value, 'added') - for key, (old, value) in mozconfig['vars']['modified'].items(): - items[key] = (value, 'modified') - for t in ('env', 'vars'): - for key in mozconfig[t]['removed'].keys(): - items[key] = (None, 'removed ' + t) + for key, value in mozconfig["vars"]["added"].items(): + items[key] = (value, "added") + for key, (old, value) in mozconfig["vars"]["modified"].items(): + items[key] = (value, "modified") + for t in ("env", "vars"): + for key in mozconfig[t]["removed"].keys(): + items[key] = (None, "removed " + t) return items -@depends('OLD_CONFIGURE', build_project) +@depends("OLD_CONFIGURE", build_project) def old_configure(old_configure, build_project): # os.path.abspath in the sandbox will ensure forward slashes on Windows, # which is actually necessary because this path actually ends up literally # as $0, and backslashes there breaks autoconf's detection of the source # directory. old_configure = os.path.abspath(old_configure[0]) - if build_project == 'js': + if build_project == "js": old_configure_dir = os.path.dirname(old_configure) - if not old_configure_dir.endswith('/js/src'): - old_configure = os.path.join(old_configure_dir, 'js', 'src', - os.path.basename(old_configure)) + if not old_configure_dir.endswith("/js/src"): + old_configure = os.path.join( + old_configure_dir, "js", "src", os.path.basename(old_configure) + ) return old_configure @depends(prepare_mozconfig, old_configure_assignments) -@imports(_from='__builtin__', _import='open') -@imports(_from='__builtin__', _import='print') -@imports(_from='__builtin__', _import='sorted') -@imports(_from='mozbuild.shellutil', _import='quote') +@imports(_from="__builtin__", _import="open") +@imports(_from="__builtin__", _import="print") +@imports(_from="__builtin__", _import="sorted") +@imports(_from="mozbuild.shellutil", _import="quote") def prepare_configure(mozconfig, old_configure_assignments): - with open('old-configure.vars', 'w') as out: - log.debug('Injecting the following to old-configure:') + with open("old-configure.vars", "w") as out: + log.debug("Injecting the following to old-configure:") def inject(command): - print(command, file=out) # noqa Python 2vs3 - log.debug('| %s', command) + print(command, file=out) # noqa Python 2vs3 + log.debug("| %s", command) if mozconfig: - inject('# start of mozconfig values') + inject("# start of mozconfig values") for key, (value, action) in sorted(mozconfig.items()): - if action.startswith('removed '): - inject("unset %s # from %s" % ( - key, action[len('removed '):])) + if action.startswith("removed "): + inject("unset %s # from %s" % (key, action[len("removed ") :])) else: inject("%s=%s # %s" % (key, quote(value), action)) - inject('# end of mozconfig values') + inject("# end of mozconfig values") for k, v in old_configure_assignments: - inject('%s=%s' % (k, quote(v))) + inject("%s=%s" % (k, quote(v))) @template def old_configure_options(*options): for opt in options: - option(opt, nargs='*', help='Help missing for old configure options') + option(opt, nargs="*", help="Help missing for old configure options") @dependable def all_options(): return list(options) - return depends(host_for_sub_configure, target_for_sub_configure, all_options, *options) + return depends( + host_for_sub_configure, target_for_sub_configure, all_options, *options + ) @old_configure_options( - '--cache-file', - '--datadir', - '--enable-crashreporter', - '--enable-dbus', - '--enable-debug-js-modules', - '--enable-dump-painting', - '--enable-extensions', - '--enable-libproxy', - '--enable-logrefcnt', - '--enable-necko-wifi', - '--enable-negotiateauth', - '--enable-official-branding', - '--enable-parental-controls', - '--enable-sandbox', - '--enable-system-cairo', - '--enable-system-extension-dirs', - '--enable-system-pixman', - '--enable-universalchardet', - '--enable-updater', - '--enable-xul', - '--enable-zipwriter', - '--includedir', - '--libdir', - '--prefix', - '--with-android-max-sdk', - '--with-android-min-sdk', - '--with-branding', - '--with-distribution-id', - '--with-macbundlename-prefix', - '--with-system-libevent', - '--with-system-png', - '--with-user-appdir', - '--x-includes', - '--x-libraries', + "--cache-file", + "--datadir", + "--enable-crashreporter", + "--enable-dbus", + "--enable-debug-js-modules", + "--enable-dump-painting", + "--enable-extensions", + "--enable-libproxy", + "--enable-logrefcnt", + "--enable-necko-wifi", + "--enable-negotiateauth", + "--enable-official-branding", + "--enable-parental-controls", + "--enable-sandbox", + "--enable-system-cairo", + "--enable-system-extension-dirs", + "--enable-system-pixman", + "--enable-universalchardet", + "--enable-updater", + "--enable-xul", + "--enable-zipwriter", + "--includedir", + "--libdir", + "--prefix", + "--with-android-max-sdk", + "--with-android-min-sdk", + "--with-branding", + "--with-distribution-id", + "--with-macbundlename-prefix", + "--with-system-libevent", + "--with-system-png", + "--with-user-appdir", + "--x-includes", + "--x-libraries", ) def prepare_configure_options(host, target, all_options, *options): # old-configure only supports the options listed in @old_configure_options @@ -120,7 +122,7 @@ def prepare_configure_options(host, target, all_options, *options): options = [ value.format(name) for name, value in zip(all_options, options) - if value.origin != 'default' + if value.origin != "default" ] + [host, target] return namespace(options=options, all_options=all_options) @@ -131,36 +133,57 @@ def old_configure_for(old_configure_path, extra_env=None): if extra_env is None: extra_env = dependable(None) - @depends(prepare_configure, prepare_configure_options, altered_path, extra_env, - check_build_environment, old_configure_path, 'MOZILLABUILD', awk, m4, shell) - @imports(_from='__builtin__', _import='compile') - @imports(_from='__builtin__', _import='open') - @imports(_from='__builtin__', _import='OSError') - @imports('glob') - @imports('itertools') - @imports('logging') - @imports('os') - @imports('subprocess') - @imports('sys') - @imports(_from='mozbuild.shellutil', _import='quote') - @imports(_from='mozbuild.shellutil', _import='split') - @imports(_from='tempfile', _import='NamedTemporaryFile') - @imports(_from='subprocess', _import='CalledProcessError') - @imports(_from='six', _import='exec_') - @imports(_from='six', _import='iteritems') - @imports(_from='six', _import='string_types') - def old_configure(prepare_configure, prepare_configure_options, altered_path, extra_env, - build_env, old_configure, mozillabuild, awk, m4, shell): + @depends( + prepare_configure, + prepare_configure_options, + altered_path, + extra_env, + check_build_environment, + old_configure_path, + "MOZILLABUILD", + awk, + m4, + shell, + ) + @imports(_from="__builtin__", _import="compile") + @imports(_from="__builtin__", _import="open") + @imports(_from="__builtin__", _import="OSError") + @imports("glob") + @imports("itertools") + @imports("logging") + @imports("os") + @imports("subprocess") + @imports("sys") + @imports(_from="mozbuild.shellutil", _import="quote") + @imports(_from="mozbuild.shellutil", _import="split") + @imports(_from="tempfile", _import="NamedTemporaryFile") + @imports(_from="subprocess", _import="CalledProcessError") + @imports(_from="six", _import="exec_") + @imports(_from="six", _import="iteritems") + @imports(_from="six", _import="string_types") + def old_configure( + prepare_configure, + prepare_configure_options, + altered_path, + extra_env, + build_env, + old_configure, + mozillabuild, + awk, + m4, + shell, + ): # Use prepare_configure to make lint happy prepare_configure refresh = True if os.path.exists(old_configure): mtime = os.path.getmtime(old_configure) - aclocal = os.path.join(build_env.topsrcdir, 'build', 'autoconf', - '*.m4') + aclocal = os.path.join(build_env.topsrcdir, "build", "autoconf", "*.m4") for input in itertools.chain( - (old_configure + '.in', - os.path.join(os.path.dirname(old_configure), 'aclocal.m4')), + ( + old_configure + ".in", + os.path.join(os.path.dirname(old_configure), "aclocal.m4"), + ), glob.iglob(aclocal), ): if os.path.getmtime(input) > mtime: @@ -169,40 +192,54 @@ def old_configure_for(old_configure_path, extra_env=None): refresh = False if refresh: - autoconf = os.path.join(build_env.topsrcdir, 'build', 'autoconf', 'autoconf.sh') - log.info('Refreshing %s with %s', old_configure, autoconf) + autoconf = os.path.join( + build_env.topsrcdir, "build", "autoconf", "autoconf.sh" + ) + log.info("Refreshing %s with %s", old_configure, autoconf) env = dict(os.environ) - env['M4'] = m4 - env['AWK'] = awk - env['AC_MACRODIR'] = os.path.join(build_env.topsrcdir, 'build', 'autoconf') + env["M4"] = m4 + env["AWK"] = awk + env["AC_MACRODIR"] = os.path.join(build_env.topsrcdir, "build", "autoconf") try: - script = subprocess.check_output([ - shell, autoconf, - '--localdir=%s' % os.path.dirname(old_configure), - old_configure + '.in'], env=env) + script = subprocess.check_output( + [ + shell, + autoconf, + "--localdir=%s" % os.path.dirname(old_configure), + old_configure + ".in", + ], + env=env, + ) except CalledProcessError as exc: # Autoconf on win32 may break due to a bad $PATH. Let the user know # their $PATH is suspect. if mozillabuild: mozillabuild_path = normsep(mozillabuild[0]) - sh_path = normsep(find_program('sh')) + sh_path = normsep(find_program("sh")) if mozillabuild_path not in sh_path: - log.warning("The '{}msys/bin' directory is not first in $PATH. " - "This may cause autoconf to fail. ($PATH is currently " - "set to: {})".format(mozillabuild_path, os.environ[ - 'PATH'])) - die('autoconf exited with return code {}'.format(exc.returncode)) + log.warning( + "The '{}msys/bin' directory is not first in $PATH. " + "This may cause autoconf to fail. ($PATH is currently " + "set to: {})".format(mozillabuild_path, os.environ["PATH"]) + ) + die("autoconf exited with return code {}".format(exc.returncode)) if not script: - die('Generated old-configure is empty! Check that your autoconf 2.13 program works!') + die( + "Generated old-configure is empty! Check that your autoconf 2.13 program works!" + ) # Make old-configure append to config.log, where we put our own log. # This could be done with a m4 macro, but it's way easier this way - script = script.replace(b'>./config.log', b'>>${CONFIG_LOG=./config.log}') - - with NamedTemporaryFile(mode='wb', prefix=os.path.basename(old_configure), - dir=os.path.dirname(old_configure), delete=False) as fh: + script = script.replace(b">./config.log", b">>${CONFIG_LOG=./config.log}") + + with NamedTemporaryFile( + mode="wb", + prefix=os.path.basename(old_configure), + dir=os.path.dirname(old_configure), + delete=False, + ) as fh: fh.write(script) try: @@ -213,37 +250,40 @@ def old_configure_for(old_configure_path, extra_env=None): os.remove(old_configure) os.rename(fh.name, old_configure) except OSError as e: - die('Failed re-creating old-configure: %s' % e.message) + die("Failed re-creating old-configure: %s" % e.message) cmd = [shell, old_configure] + prepare_configure_options.options env = dict(os.environ) # For debugging purpose, in case it's not what we'd expect. - log.debug('Running %s', quote(*cmd)) + log.debug("Running %s", quote(*cmd)) # Our logging goes to config.log, the same file old.configure uses. # We can't share the handle on the file, so close it. - logger = logging.getLogger('moz.configure') + logger = logging.getLogger("moz.configure") config_log = None for handler in logger.handlers: if isinstance(handler, logging.FileHandler): config_log = handler config_log.close() logger.removeHandler(config_log) - env['CONFIG_LOG'] = config_log.baseFilename + env["CONFIG_LOG"] = config_log.baseFilename log_size = os.path.getsize(config_log.baseFilename) break if altered_path: - env['PATH'] = altered_path + env["PATH"] = altered_path if extra_env: env.update(extra_env) - env['OLD_CONFIGURE_VARS'] = os.path.join(build_env.topobjdir, 'old-configure.vars') - proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - env=env) + env["OLD_CONFIGURE_VARS"] = os.path.join( + build_env.topobjdir, "old-configure.vars" + ) + proc = subprocess.Popen( + cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env + ) while True: line = proc.stdout.readline() if not line: @@ -254,53 +294,58 @@ def old_configure_for(old_configure_path, extra_env=None): if ret: with log.queue_debug(): if config_log: - with open(config_log.baseFilename, 'r') as fh: + with open(config_log.baseFilename, "r") as fh: fh.seek(log_size) for line in fh: log.debug(line.rstrip()) - log.error('old-configure failed') + log.error("old-configure failed") sys.exit(ret) if config_log: # Create a new handler in append mode - handler = logging.FileHandler(config_log.baseFilename, mode='a', delay=True) + handler = logging.FileHandler(config_log.baseFilename, mode="a", delay=True) handler.setFormatter(config_log.formatter) logger.addHandler(handler) raw_config = { - 'split': split, - 'unique_list': unique_list, + "split": split, + "unique_list": unique_list, } - with open('config.data', 'r') as fh: - code = compile(fh.read(), 'config.data', 'exec') + with open("config.data", "r") as fh: + code = compile(fh.read(), "config.data", "exec") exec_(code, raw_config) # Ensure all the flags known to old-configure appear in the # @old_configure_options above. all_options = set(prepare_configure_options.all_options) - for flag in raw_config['flags']: + for flag in raw_config["flags"]: if flag not in all_options: - die('Missing option in `@old_configure_options` in %s: %s', - __file__, flag) + die( + "Missing option in `@old_configure_options` in %s: %s", + __file__, + flag, + ) # If the code execution above fails, we want to keep the file around for # debugging. - os.remove('config.data') - - return namespace(**{ - c: [ - (k[1:-1], v[1:-1] if isinstance(v, string_types) else v) - for k, v in raw_config[c] - ] - for c in ('substs', 'defines') - }) + os.remove("config.data") + + return namespace( + **{ + c: [ + (k[1:-1], v[1:-1] if isinstance(v, string_types) else v) + for k, v in raw_config[c] + ] + for c in ("substs", "defines") + } + ) return old_configure old_configure = old_configure_for(old_configure) -set_config('OLD_CONFIGURE_SUBSTS', old_configure.substs) -set_config('OLD_CONFIGURE_DEFINES', old_configure.defines) +set_config("OLD_CONFIGURE_SUBSTS", old_configure.substs) +set_config("OLD_CONFIGURE_DEFINES", old_configure.defines) # Assuming no other option is declared after this function, handle the @@ -309,17 +354,18 @@ set_config('OLD_CONFIGURE_DEFINES', old_configure.defines) # them. We only do so for options that haven't been declared so far, # which should be a proxy for the options that old-configure handles # and that we don't know anything about. -@depends('--help') -@imports('__sandbox__') -@imports(_from='mozbuild.configure.options', _import='Option') +@depends("--help") +@imports("__sandbox__") +@imports(_from="mozbuild.configure.options", _import="Option") def remaining_mozconfig_options(_): helper = __sandbox__._helper for arg in list(helper): - if helper._origins[arg] != 'mozconfig': + if helper._origins[arg] != "mozconfig": continue - name = arg.split('=', 1)[0] + name = arg.split("=", 1)[0] if name.isupper() and name not in __sandbox__._options: - option = Option(env=name, nargs='*', help=name) + option = Option(env=name, nargs="*", help=name) helper.handle(option) + # Please do not add anything after remaining_mozconfig_options() diff --git a/build/moz.configure/pkg.configure b/build/moz.configure/pkg.configure index cbc34409a629dd..20d90f17fdd726 100644 --- a/build/moz.configure/pkg.configure +++ b/build/moz.configure/pkg.configure @@ -7,17 +7,17 @@ @depends(toolchain_prefix, when=compile_environment) def pkg_config(prefixes): - return tuple('{}pkg-config'.format(p) - for p in (prefixes or ()) + ('',)) + return tuple("{}pkg-config".format(p) for p in (prefixes or ()) + ("",)) -pkg_config = check_prog('PKG_CONFIG', pkg_config, allow_missing=True) +pkg_config = check_prog("PKG_CONFIG", pkg_config, allow_missing=True) @depends_if(pkg_config) -@checking('for pkg-config version') +@checking("for pkg-config version") def pkg_config_version(pkg_config): - return Version(check_cmd_output(pkg_config, '--version').rstrip()) + return Version(check_cmd_output(pkg_config, "--version").rstrip()) + # Locates the given module using pkg-config. # - `var` determines the name of variables to set when the package is found. @@ -34,38 +34,41 @@ def pkg_config_version(pkg_config): @template -def pkg_check_modules(var, package_desc, when=always, - allow_missing=False, config=True): +def pkg_check_modules(var, package_desc, when=always, allow_missing=False, config=True): if isinstance(package_desc, (tuple, list)): - package_desc = ' '.join(package_desc) + package_desc = " ".join(package_desc) package_desc = dependable(package_desc) allow_missing = dependable(allow_missing) - @depends(when, '--enable-compile-environment') + @depends(when, "--enable-compile-environment") def when_and_compile_environment(when, compile_environment): return when and compile_environment - @depends(pkg_config, pkg_config_version, - when=when_and_compile_environment) + @depends(pkg_config, pkg_config_version, when=when_and_compile_environment) def check_pkg_config(pkg_config, version): - min_version = '0.9.0' + min_version = "0.9.0" if pkg_config is None: - die("*** The pkg-config script could not be found. Make sure it is\n" + die( + "*** The pkg-config script could not be found. Make sure it is\n" "*** in your path, or set the PKG_CONFIG environment variable\n" - "*** to the full path to pkg-config.") + "*** to the full path to pkg-config." + ) if version < min_version: - die("*** Your version of pkg-config is too old. You need version %s or newer.", - min_version) + die( + "*** Your version of pkg-config is too old. You need version %s or newer.", + min_version, + ) @depends(pkg_config, package_desc, allow_missing, when=when_and_compile_environment) - @imports('sys') - @imports(_from='mozbuild.configure.util', _import='LineIO') + @imports("sys") + @imports(_from="mozbuild.configure.util", _import="LineIO") def package(pkg_config, package_desc, allow_missing): # package_desc may start as a depends function, so we can't use # @checking here. log.info("checking for %s... " % package_desc) retcode, stdout, stderr = get_cmd_output( - pkg_config, '--errors-to-stdout', '--print-errors', package_desc) + pkg_config, "--errors-to-stdout", "--print-errors", package_desc + ) if retcode == 0: log.info("yes") return True @@ -77,24 +80,24 @@ def pkg_check_modules(var, package_desc, when=always, sys.exit(1) @depends(pkg_config, package_desc, when=package) - @checking('%s_CFLAGS' % var, callback=lambda t: ' '.join(t)) + @checking("%s_CFLAGS" % var, callback=lambda t: " ".join(t)) def pkg_cflags(pkg_config, package_desc): - flags = check_cmd_output(pkg_config, '--cflags', package_desc) + flags = check_cmd_output(pkg_config, "--cflags", package_desc) return tuple(flags.split()) @depends(pkg_config, package_desc, when=package) - @checking('%s_LIBS' % var, callback=lambda t: ' '.join(t)) + @checking("%s_LIBS" % var, callback=lambda t: " ".join(t)) def pkg_libs(pkg_config, package_desc): - libs = check_cmd_output(pkg_config, '--libs', package_desc) + libs = check_cmd_output(pkg_config, "--libs", package_desc) # Remove evil flags like -Wl,--export-dynamic - return tuple(libs.replace('-Wl,--export-dynamic', '').split()) + return tuple(libs.replace("-Wl,--export-dynamic", "").split()) @depends(pkg_cflags, pkg_libs, when=package) def pkg_info(cflags, libs): return namespace(cflags=cflags, libs=libs) if config: - set_config('%s_CFLAGS' % var, pkg_cflags) - set_config('%s_LIBS' % var, pkg_libs) + set_config("%s_CFLAGS" % var, pkg_cflags) + set_config("%s_LIBS" % var, pkg_libs) return pkg_info diff --git a/build/moz.configure/rust.configure b/build/moz.configure/rust.configure index 0d6d2faf9377ba..f4fcdd60e9325e 100644 --- a/build/moz.configure/rust.configure +++ b/build/moz.configure/rust.configure @@ -7,15 +7,25 @@ # Rust is required by `rust_compiler` below. We allow_missing here # to propagate failures to the better error message there. -option(env='RUSTC', nargs=1, help='Path to the rust compiler') -option(env='CARGO', nargs=1, help='Path to the Cargo package manager') - -rustc = check_prog('_RUSTC', ['rustc'], what='rustc', - paths=toolchain_search_path, input='RUSTC', - allow_missing=True) -cargo = check_prog('_CARGO', ['cargo'], what='cargo', - paths=toolchain_search_path, input='CARGO', - allow_missing=True) +option(env="RUSTC", nargs=1, help="Path to the rust compiler") +option(env="CARGO", nargs=1, help="Path to the Cargo package manager") + +rustc = check_prog( + "_RUSTC", + ["rustc"], + what="rustc", + paths=toolchain_search_path, + input="RUSTC", + allow_missing=True, +) +cargo = check_prog( + "_CARGO", + ["cargo"], + what="cargo", + paths=toolchain_search_path, + input="CARGO", + allow_missing=True, +) @template @@ -41,29 +51,28 @@ def unwrap_rustup(prog, name): # "+stable" file. We'll examine the error output to try and distinguish # between failing rustup and failing rustc. @depends(prog, dependable(name)) - @imports(_from='__builtin__', _import='open') - @imports('os') + @imports(_from="__builtin__", _import="open") + @imports("os") def unwrap(prog, name): if not prog: return def from_rustup_which(): - out = check_cmd_output('rustup', 'which', name, - executable=prog).rstrip() + out = check_cmd_output("rustup", "which", name, executable=prog).rstrip() # If for some reason the above failed to return something, keep the # PROG we found originally. if out: - log.info('Actually using \'%s\'', out) + log.info("Actually using '%s'", out) return out - log.info('No `rustup which` output, using \'%s\'', prog) + log.info("No `rustup which` output, using '%s'", prog) return prog - (retcode, stdout, stderr) = get_cmd_output(prog, '+stable') + (retcode, stdout, stderr) = get_cmd_output(prog, "+stable") - if name == 'cargo' and retcode != 101: + if name == "cargo" and retcode != 101: prog = from_rustup_which() - elif name == 'rustc': + elif name == "rustc": if retcode == 0: prog = from_rustup_which() elif "+stable" in stderr: @@ -81,51 +90,56 @@ def unwrap_rustup(prog, name): return unwrap -rustc = unwrap_rustup(rustc, 'rustc') -cargo = unwrap_rustup(cargo, 'cargo') + +rustc = unwrap_rustup(rustc, "rustc") +cargo = unwrap_rustup(cargo, "cargo") -set_config('CARGO', cargo) -set_config('RUSTC', rustc) +set_config("CARGO", cargo) +set_config("RUSTC", rustc) @depends_if(rustc) -@checking('rustc version', lambda info: info.version) +@checking("rustc version", lambda info: info.version) def rustc_info(rustc): if not rustc: return - out = check_cmd_output(rustc, '--version', '--verbose').splitlines() - info = dict((s.strip() for s in line.split(':', 1)) for line in out[1:]) + out = check_cmd_output(rustc, "--version", "--verbose").splitlines() + info = dict((s.strip() for s in line.split(":", 1)) for line in out[1:]) return namespace( - version=Version(info.get('release', '0')), - commit=info.get('commit-hash', 'unknown'), - host=info['host'], - llvm_version=Version(info.get('LLVM version', '0')), + version=Version(info.get("release", "0")), + commit=info.get("commit-hash", "unknown"), + host=info["host"], + llvm_version=Version(info.get("LLVM version", "0")), ) -set_config('RUSTC_VERSION', depends(rustc_info)( - lambda info: str(info.version) if info else None)) + +set_config( + "RUSTC_VERSION", + depends(rustc_info)(lambda info: str(info.version) if info else None), +) + @depends_if(cargo) -@checking('cargo version', lambda info: info.version) -@imports('re') +@checking("cargo version", lambda info: info.version) +@imports("re") def cargo_info(cargo): if not cargo: return - out = check_cmd_output(cargo, '--version', '--verbose').splitlines() - info = dict((s.strip() for s in line.split(':', 1)) for line in out[1:]) - version = info.get('release') + out = check_cmd_output(cargo, "--version", "--verbose").splitlines() + info = dict((s.strip() for s in line.split(":", 1)) for line in out[1:]) + version = info.get("release") # Older versions of cargo didn't support --verbose, in which case, they # only output a not-really-pleasant-to-parse output. Fortunately, they # don't error out, so we can just try some regexp matching on the output # we already got. if version is None: - VERSION_FORMAT = r'^cargo (\d\.\d+\.\d+).*' + VERSION_FORMAT = r"^cargo (\d\.\d+\.\d+).*" m = re.search(VERSION_FORMAT, out[0]) # Fail fast if cargo changes its output on us. if not m: - die('Could not determine cargo version from output: %s', out) + die("Could not determine cargo version from output: %s", out) version = m.group(1) return namespace( @@ -134,35 +148,42 @@ def cargo_info(cargo): @depends(rustc_info, cargo_info, build_project, c_compiler, target) -@imports(_from='mozboot.util', _import='MINIMUM_RUST_VERSION') -@imports(_from='textwrap', _import='dedent') +@imports(_from="mozboot.util", _import="MINIMUM_RUST_VERSION") +@imports(_from="textwrap", _import="dedent") def rust_compiler(rustc_info, cargo_info, build_project, c_compiler, target): if not rustc_info: - die(dedent('''\ + die( + dedent( + """\ Rust compiler not found. To compile rust language sources, you must have 'rustc' in your path. See https://www.rust-lang.org/ for more information. You can install rust by running './mach bootstrap' or by directly running the installer from https://rustup.rs/ - ''')) - if build_project == 'tools/crashreporter': - rustc_min_version = Version('1.38.0') - elif target.os == 'WINNT' and c_compiler.type != 'clang-cl': - rustc_min_version = Version('1.44.0') + """ + ) + ) + if build_project == "tools/crashreporter": + rustc_min_version = Version("1.38.0") + elif target.os == "WINNT" and c_compiler.type != "clang-cl": + rustc_min_version = Version("1.44.0") else: rustc_min_version = Version(MINIMUM_RUST_VERSION) cargo_min_version = rustc_min_version version = rustc_info.version - is_nightly = 'nightly' in version.version + is_nightly = "nightly" in version.version is_version_number_match = ( - version.major == rustc_min_version.major and - version.minor == rustc_min_version.minor and - version.patch == rustc_min_version.patch) + version.major == rustc_min_version.major + and version.minor == rustc_min_version.minor + and version.patch == rustc_min_version.patch + ) if version < rustc_min_version or (is_version_number_match and is_nightly): - die(dedent('''\ + die( + dedent( + """\ Rust compiler {} is too old. To compile Rust language sources please install at least @@ -175,48 +196,60 @@ def rust_compiler(rustc_info, cargo_info, build_project, c_compiler, target): If you have the 'rustup' tool installed you can upgrade to the latest release by typing 'rustup update'. The installer is available from https://rustup.rs/ - '''.format(version, rustc_min_version, rustc_min_version))) + """.format( + version, rustc_min_version, rustc_min_version + ) + ) + ) if not cargo_info: - die(dedent('''\ + die( + dedent( + """\ Cargo package manager not found. To compile Rust language sources, you must have 'cargo' in your path. See https://www.rust-lang.org/ for more information. You can install cargo by running './mach bootstrap' or by directly running the installer from https://rustup.rs/ - ''')) + """ + ) + ) version = cargo_info.version if version < cargo_min_version: - die(dedent('''\ + die( + dedent( + """\ Cargo package manager {} is too old. To compile Rust language sources please install at least version {} of 'cargo' and make sure it is first in your path. You can verify this by typing 'cargo --version'. - ''').format(version, cargo_min_version)) + """ + ).format(version, cargo_min_version) + ) return True @depends(rustc, when=rust_compiler) -@imports(_from='__builtin__', _import='ValueError') +@imports(_from="__builtin__", _import="ValueError") def rust_supported_targets(rustc): - out = check_cmd_output(rustc, '--print', 'target-list').splitlines() + out = check_cmd_output(rustc, "--print", "target-list").splitlines() data = {} for t in out: try: info = split_triplet(t) except ValueError: - if t.startswith('thumb'): - cpu, rest = t.split('-', 1) - retry = '-'.join(('arm', rest)) - elif t.endswith('-windows-msvc'): - retry = t[:-len('windows-msvc')] + 'mingw32' - elif t.endswith('-windows-gnu'): - retry = t[:-len('windows-gnu')] + 'mingw32' + if t.startswith("thumb"): + cpu, rest = t.split("-", 1) + retry = "-".join(("arm", rest)) + elif t.endswith("-windows-msvc"): + retry = t[: -len("windows-msvc")] + "mingw32" + elif t.endswith("-windows-gnu"): + retry = t[: -len("windows-gnu")] + "mingw32" else: continue try: @@ -228,7 +261,9 @@ def rust_supported_targets(rustc): return data -def detect_rustc_target(host_or_target, compiler_info, arm_target, rust_supported_targets): +def detect_rustc_target( + host_or_target, compiler_info, arm_target, rust_supported_targets +): # Rust's --target options are similar to, but not exactly the same # as, the autoconf-derived targets we use. An example would be that # Rust uses distinct target triples for targetting the GNU C++ ABI @@ -239,7 +274,8 @@ def detect_rustc_target(host_or_target, compiler_info, arm_target, rust_supporte # We correlate the autoconf-derived targets with the list of targets # rustc gives us with --print target-list. candidates = rust_supported_targets.get( - (host_or_target.cpu, host_or_target.endianness, host_or_target.os), []) + (host_or_target.cpu, host_or_target.endianness, host_or_target.os), [] + ) def find_candidate(candidates): if len(candidates) == 1: @@ -250,20 +286,25 @@ def detect_rustc_target(host_or_target, compiler_info, arm_target, rust_supporte # We have multiple candidates. There are two cases where we can try to # narrow further down using extra information from the build system. # - For windows targets, correlate with the C compiler type - if host_or_target.kernel == 'WINNT': - if compiler_info.type in ('gcc', 'clang'): - suffix = 'windows-gnu' + if host_or_target.kernel == "WINNT": + if compiler_info.type in ("gcc", "clang"): + suffix = "windows-gnu" else: - suffix = 'windows-msvc' - narrowed = [c for c in candidates if c.rust_target.endswith('-{}'.format(suffix))] + suffix = "windows-msvc" + narrowed = [ + c for c in candidates if c.rust_target.endswith("-{}".format(suffix)) + ] if len(narrowed) == 1: return narrowed[0].rust_target elif narrowed: candidates = narrowed - vendor_aliases = {'pc': ('w64', 'windows')} - narrowed = [c for c in candidates - if host_or_target.vendor in vendor_aliases.get(c.target.vendor, ())] + vendor_aliases = {"pc": ("w64", "windows")} + narrowed = [ + c + for c in candidates + if host_or_target.vendor in vendor_aliases.get(c.target.vendor, ()) + ] if len(narrowed) == 1: return narrowed[0].rust_target @@ -274,39 +315,40 @@ def detect_rustc_target(host_or_target, compiler_info, arm_target, rust_supporte # Also, sadly, the only interface to check the rust target cpu features # is --print target-spec-json, and it's unstable, so we have to rely on # our own knowledge of what each arm target means. - if host_or_target.cpu == 'arm' and host_or_target.endianness == 'little': + if host_or_target.cpu == "arm" and host_or_target.endianness == "little": prefixes = [] if arm_target.arm_arch >= 7: - if arm_target.thumb2 and arm_target.fpu == 'neon': - prefixes.append('thumbv7neon') + if arm_target.thumb2 and arm_target.fpu == "neon": + prefixes.append("thumbv7neon") if arm_target.thumb2: - prefixes.append('thumbv7a') - prefixes.append('armv7') + prefixes.append("thumbv7a") + prefixes.append("armv7") if arm_target.arm_arch >= 6: - prefixes.append('armv6') - if host_or_target.os != 'Android': + prefixes.append("armv6") + if host_or_target.os != "Android": # arm-* rust targets are armv6... except arm-linux-androideabi - prefixes.append('arm') + prefixes.append("arm") if arm_target.arm_arch >= 5: - prefixes.append('armv5te') - if host_or_target.os == 'Android': + prefixes.append("armv5te") + if host_or_target.os == "Android": # arm-* rust targets are armv6... except arm-linux-androideabi - prefixes.append('arm') + prefixes.append("arm") if arm_target.arm_arch >= 4: - prefixes.append('armv4t') + prefixes.append("armv4t") # rust freebsd targets are the only ones that don't have a 'hf' suffix # for hard-float. Technically, that means if the float abi ever is not # hard-float, this will pick a wrong target, but since rust only # supports hard-float, let's assume that means freebsd only support # hard-float. - if arm_target.float_abi == 'hard' and host_or_target.os != 'FreeBSD': - suffix = 'hf' + if arm_target.float_abi == "hard" and host_or_target.os != "FreeBSD": + suffix = "hf" else: - suffix = '' + suffix = "" for p in prefixes: for c in candidates: - if c.rust_target.startswith('{}-'.format(p)) and \ - c.rust_target.endswith(suffix): + if c.rust_target.startswith( + "{}-".format(p) + ) and c.rust_target.endswith(suffix): return c.rust_target # See if we can narrow down on the exact alias @@ -325,18 +367,16 @@ def detect_rustc_target(host_or_target, compiler_info, arm_target, rust_supporte # See if we can narrow down with the raw OS and raw CPU narrowed = [ - c for c in candidates - if c.target.raw_os == host_or_target.raw_os and - c.target.raw_cpu == host_or_target.raw_cpu + c + for c in candidates + if c.target.raw_os == host_or_target.raw_os + and c.target.raw_cpu == host_or_target.raw_cpu ] if len(narrowed) == 1: return narrowed[0].rust_target # Finally, see if the vendor can be used to disambiguate. - narrowed = [ - c for c in candidates - if c.target.vendor == host_or_target.vendor - ] + narrowed = [c for c in candidates if c.target.vendor == host_or_target.vendor] if len(narrowed) == 1: return narrowed[0].rust_target @@ -345,28 +385,27 @@ def detect_rustc_target(host_or_target, compiler_info, arm_target, rust_supporte rustc_target = find_candidate(candidates) if rustc_target is None: - die("Don't know how to translate {} for rustc".format( - host_or_target.alias)) + die("Don't know how to translate {} for rustc".format(host_or_target.alias)) return rustc_target -@imports('os') -@imports(_from='six', _import='ensure_binary') -@imports(_from='tempfile', _import='mkstemp') -@imports(_from='textwrap', _import='dedent') -@imports(_from='mozbuild.configure.util', _import='LineIO') +@imports("os") +@imports(_from="six", _import="ensure_binary") +@imports(_from="tempfile", _import="mkstemp") +@imports(_from="textwrap", _import="dedent") +@imports(_from="mozbuild.configure.util", _import="LineIO") def assert_rust_compile(host_or_target, rustc_target, rustc): # Check to see whether our rustc has a reasonably functional stdlib # for our chosen target. - target_arg = '--target=' + rustc_target - in_fd, in_path = mkstemp(prefix='conftest', suffix='.rs', text=True) - out_fd, out_path = mkstemp(prefix='conftest', suffix='.rlib') + target_arg = "--target=" + rustc_target + in_fd, in_path = mkstemp(prefix="conftest", suffix=".rs", text=True) + out_fd, out_path = mkstemp(prefix="conftest", suffix=".rlib") os.close(out_fd) try: source = 'pub extern fn hello() { println!("Hello world"); }' - log.debug('Creating `%s` with content:', in_path) - with LineIO(lambda l: log.debug('| %s', l)) as out: + log.debug("Creating `%s` with content:", in_path) + with LineIO(lambda l: log.debug("| %s", l)) as out: out.write(source) os.write(in_fd, ensure_binary(source)) @@ -374,20 +413,29 @@ def assert_rust_compile(host_or_target, rustc_target, rustc): cmd = [ rustc, - '--crate-type', 'staticlib', + "--crate-type", + "staticlib", target_arg, - '-o', out_path, + "-o", + out_path, in_path, ] def failed(): - die(dedent('''\ + die( + dedent( + """\ Cannot compile for {} with {} The target may be unsupported, or you may not have a rust std library for that target installed. Try: rustup target add {} - '''.format(host_or_target.alias, rustc, rustc_target))) + """.format( + host_or_target.alias, rustc, rustc_target + ) + ) + ) + check_cmd_output(*cmd, onerror=failed) if not os.path.exists(out_path) or os.path.getsize(out_path) == 0: failed() @@ -396,20 +444,31 @@ def assert_rust_compile(host_or_target, rustc_target, rustc): os.remove(out_path) -@depends(rustc, host, host_c_compiler, rustc_info.host, rust_supported_targets, - arm_target, when=rust_compiler) -@checking('for rust host triplet') -@imports(_from='textwrap', _import='dedent') -def rust_host_triple(rustc, host, compiler_info, rustc_host, rust_supported_targets, - arm_target): - rustc_target = detect_rustc_target(host, compiler_info, arm_target, - rust_supported_targets) +@depends( + rustc, + host, + host_c_compiler, + rustc_info.host, + rust_supported_targets, + arm_target, + when=rust_compiler, +) +@checking("for rust host triplet") +@imports(_from="textwrap", _import="dedent") +def rust_host_triple( + rustc, host, compiler_info, rustc_host, rust_supported_targets, arm_target +): + rustc_target = detect_rustc_target( + host, compiler_info, arm_target, rust_supported_targets + ) if rustc_target != rustc_host: if host.alias == rustc_target: configure_host = host.alias else: - configure_host = '{}/{}'.format(host.alias, rustc_target) - die(dedent('''\ + configure_host = "{}/{}".format(host.alias, rustc_target) + die( + dedent( + """\ The rust compiler host ({rustc}) is not suitable for the configure host ({configure}). You can solve this by: @@ -417,59 +476,79 @@ def rust_host_triple(rustc, host, compiler_info, rustc_host, rust_supported_targ mozconfig and adding "ac_add_options --host={rustc}". * Or, install the rust toolchain for {configure}, if supported, by running "rustup default stable-{rustc_target}" - '''.format(rustc=rustc_host, configure=configure_host, rustc_target=rustc_target))) + """.format( + rustc=rustc_host, + configure=configure_host, + rustc_target=rustc_target, + ) + ) + ) assert_rust_compile(host, rustc_target, rustc) return rustc_target -@depends(rustc, target, c_compiler, rust_supported_targets, arm_target, when=rust_compiler) -@checking('for rust target triplet') -def rust_target_triple(rustc, target, compiler_info, rust_supported_targets, arm_target): - rustc_target = detect_rustc_target(target, compiler_info, arm_target, rust_supported_targets) + +@depends( + rustc, target, c_compiler, rust_supported_targets, arm_target, when=rust_compiler +) +@checking("for rust target triplet") +def rust_target_triple( + rustc, target, compiler_info, rust_supported_targets, arm_target +): + rustc_target = detect_rustc_target( + target, compiler_info, arm_target, rust_supported_targets + ) assert_rust_compile(target, rustc_target, rustc) return rustc_target -set_config('RUST_TARGET', rust_target_triple) -set_config('RUST_HOST_TARGET', rust_host_triple) +set_config("RUST_TARGET", rust_target_triple) +set_config("RUST_HOST_TARGET", rust_host_triple) # This is used for putting source info into symbol files. -set_config('RUSTC_COMMIT', depends(rustc_info)(lambda i: i.commit)) +set_config("RUSTC_COMMIT", depends(rustc_info)(lambda i: i.commit)) # Rustdoc is required by Rust tests below. -option(env='RUSTDOC', nargs=1, help='Path to the rustdoc program') +option(env="RUSTDOC", nargs=1, help="Path to the rustdoc program") -rustdoc = check_prog('RUSTDOC', ['rustdoc'], paths=toolchain_search_path, - input='RUSTDOC', allow_missing=True) +rustdoc = check_prog( + "RUSTDOC", + ["rustdoc"], + paths=toolchain_search_path, + input="RUSTDOC", + allow_missing=True, +) # This option is separate from --enable-tests because Rust tests are particularly # expensive in terms of compile time (especially for code in libxul). -option('--enable-rust-tests', - help='Enable building and running of Rust tests during `make check`') +option( + "--enable-rust-tests", + help="Enable building and running of Rust tests during `make check`", +) -@depends('--enable-rust-tests', rustdoc) +@depends("--enable-rust-tests", rustdoc) def rust_tests(enable_rust_tests, rustdoc): if enable_rust_tests and not rustdoc: - die('--enable-rust-tests requires rustdoc') + die("--enable-rust-tests requires rustdoc") return bool(enable_rust_tests) -set_config('MOZ_RUST_TESTS', rust_tests) +set_config("MOZ_RUST_TESTS", rust_tests) @depends(target, c_compiler, rustc) -@imports('os') +@imports("os") def rustc_natvis_ldflags(target, compiler_info, rustc): - if target.kernel == 'WINNT' and compiler_info.type == 'clang-cl': - sysroot = check_cmd_output(rustc, '--print', 'sysroot').strip() - etc = os.path.join(sysroot, 'lib/rustlib/etc') + if target.kernel == "WINNT" and compiler_info.type == "clang-cl": + sysroot = check_cmd_output(rustc, "--print", "sysroot").strip() + etc = os.path.join(sysroot, "lib/rustlib/etc") ldflags = [] if os.path.isdir(etc): for f in os.listdir(etc): - if f.endswith('.natvis'): - ldflags.append('-NATVIS:' + normsep(os.path.join(etc, f))) + if f.endswith(".natvis"): + ldflags.append("-NATVIS:" + normsep(os.path.join(etc, f))) return ldflags -set_config('RUSTC_NATVIS_LDFLAGS', rustc_natvis_ldflags) +set_config("RUSTC_NATVIS_LDFLAGS", rustc_natvis_ldflags) diff --git a/build/moz.configure/toolchain.configure b/build/moz.configure/toolchain.configure index 661701e238c72f..d7d3886163ea8e 100755 --- a/build/moz.configure/toolchain.configure +++ b/build/moz.configure/toolchain.configure @@ -4,41 +4,41 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -imply_option('--enable-release', mozilla_official) -imply_option('--enable-release', depends_if('MOZ_AUTOMATION')(lambda x: True)) +imply_option("--enable-release", mozilla_official) +imply_option("--enable-release", depends_if("MOZ_AUTOMATION")(lambda x: True)) -option('--enable-release', - default=milestone.is_release_or_beta, - help='{Build|Do not build} with more conservative, release ' - 'engineering-oriented options.{ This may slow down builds.|}') +option( + "--enable-release", + default=milestone.is_release_or_beta, + help="{Build|Do not build} with more conservative, release " + "engineering-oriented options.{ This may slow down builds.|}", +) -@depends('--enable-release') +@depends("--enable-release") def developer_options(value): if not value: return True -add_old_configure_assignment('DEVELOPER_OPTIONS', developer_options) -set_config('DEVELOPER_OPTIONS', developer_options) +add_old_configure_assignment("DEVELOPER_OPTIONS", developer_options) +set_config("DEVELOPER_OPTIONS", developer_options) # Code optimization # ============================================================== -option('--disable-optimize', - nargs='?', - help='Disable optimizations via compiler flags') +option("--disable-optimize", nargs="?", help="Disable optimizations via compiler flags") -@depends('--enable-optimize', '--help') +@depends("--enable-optimize", "--help") def moz_optimize(option, _): flags = None if len(option): - val = '2' + val = "2" flags = option[0] elif option: - val = '1' + val = "1" else: val = None @@ -48,22 +48,25 @@ def moz_optimize(option, _): ) -set_config('MOZ_OPTIMIZE', moz_optimize.optimize) -add_old_configure_assignment('MOZ_OPTIMIZE', moz_optimize.optimize) -add_old_configure_assignment('MOZ_CONFIGURE_OPTIMIZE_FLAGS', moz_optimize.flags) +set_config("MOZ_OPTIMIZE", moz_optimize.optimize) +add_old_configure_assignment("MOZ_OPTIMIZE", moz_optimize.optimize) +add_old_configure_assignment("MOZ_CONFIGURE_OPTIMIZE_FLAGS", moz_optimize.flags) # yasm detection # ============================================================== -yasm = check_prog('YASM', ['yasm'], allow_missing=True) +yasm = check_prog("YASM", ["yasm"], allow_missing=True) @depends_if(yasm) -@checking('yasm version') +@checking("yasm version") def yasm_version(yasm): - version = check_cmd_output( - yasm, '--version', - onerror=lambda: die('Failed to get yasm version.') - ).splitlines()[0].split()[1] + version = ( + check_cmd_output( + yasm, "--version", onerror=lambda: die("Failed to get yasm version.") + ) + .splitlines()[0] + .split()[1] + ) return Version(version) @@ -71,172 +74,201 @@ def yasm_version(yasm): def yasm_asflags(yasm, target): if yasm: asflags = { - ('OSX', 'x86'): ['-f', 'macho32'], - ('OSX', 'x86_64'): ['-f', 'macho64'], - ('WINNT', 'x86'): ['-f', 'win32'], - ('WINNT', 'x86_64'): ['-f', 'x64'], + ("OSX", "x86"): ["-f", "macho32"], + ("OSX", "x86_64"): ["-f", "macho64"], + ("WINNT", "x86"): ["-f", "win32"], + ("WINNT", "x86_64"): ["-f", "x64"], }.get((target.os, target.cpu), None) if asflags is None: # We're assuming every x86 platform we support that's # not Windows or Mac is ELF. - if target.cpu == 'x86': - asflags = ['-f', 'elf32'] - elif target.cpu == 'x86_64': - asflags = ['-f', 'elf64'] + if target.cpu == "x86": + asflags = ["-f", "elf32"] + elif target.cpu == "x86_64": + asflags = ["-f", "elf64"] if asflags: - asflags += ['-rnasm', '-pnasm'] + asflags += ["-rnasm", "-pnasm"] return asflags -set_config('YASM_ASFLAGS', yasm_asflags) +set_config("YASM_ASFLAGS", yasm_asflags) # Android NDK # ============================================================== -@depends('--disable-compile-environment', target) +@depends("--disable-compile-environment", target) def compiling_android(compile_env, target): - return compile_env and target.os == 'Android' + return compile_env and target.os == "Android" -include('android-ndk.configure', when=compiling_android) +include("android-ndk.configure", when=compiling_android) with only_when(target_is_osx): # MacOS deployment target version # ============================================================== # This needs to happen before any compilation test is done. - option('--enable-macos-target', env='MACOSX_DEPLOYMENT_TARGET', nargs=1, - default='10.9', help='Set the minimum MacOS version needed at runtime') - + option( + "--enable-macos-target", + env="MACOSX_DEPLOYMENT_TARGET", + nargs=1, + default="10.9", + help="Set the minimum MacOS version needed at runtime", + ) - @depends('--enable-macos-target') - @imports(_from='os', _import='environ') + @depends("--enable-macos-target") + @imports(_from="os", _import="environ") def macos_target(value): if value: # Ensure every compiler process we spawn uses this value. - environ['MACOSX_DEPLOYMENT_TARGET'] = value[0] + environ["MACOSX_DEPLOYMENT_TARGET"] = value[0] return value[0] - - set_config('MACOSX_DEPLOYMENT_TARGET', macos_target) - add_old_configure_assignment('MACOSX_DEPLOYMENT_TARGET', macos_target) + set_config("MACOSX_DEPLOYMENT_TARGET", macos_target) + add_old_configure_assignment("MACOSX_DEPLOYMENT_TARGET", macos_target) @depends(host) def host_is_osx(host): - if host.os == 'OSX': + if host.os == "OSX": return True with only_when(host_is_osx | target_is_osx): # MacOS SDK # ========= - option('--with-macos-sdk', env='MACOS_SDK_DIR', nargs=1, - help='Location of platform SDK to use') + option( + "--with-macos-sdk", + env="MACOS_SDK_DIR", + nargs=1, + help="Location of platform SDK to use", + ) - @depends('--with-macos-sdk', host) - @imports(_from='__builtin__', _import='open') - @imports(_from='os.path', _import='isdir') - @imports('plistlib') + @depends("--with-macos-sdk", host) + @imports(_from="__builtin__", _import="open") + @imports(_from="os.path", _import="isdir") + @imports("plistlib") def macos_sdk(sdk, host): - sdk_min_version = Version('10.11') - sdk_max_version = Version('11.0') + sdk_min_version = Version("10.11") + sdk_max_version = Version("11.0") if sdk: sdk = sdk[0] - elif host.os == 'OSX': - sdk = check_cmd_output('xcrun', '--show-sdk-path', onerror=lambda: '').rstrip() + elif host.os == "OSX": + sdk = check_cmd_output( + "xcrun", "--show-sdk-path", onerror=lambda: "" + ).rstrip() if not sdk: - die('Could not find the macOS SDK. Please use --with-macos-sdk to give ' - 'the path to a macOS SDK.') + die( + "Could not find the macOS SDK. Please use --with-macos-sdk to give " + "the path to a macOS SDK." + ) else: - die('Need a macOS SDK when targeting macOS. Please use --with-macos-sdk ' - 'to give the path to a macOS SDK.') + die( + "Need a macOS SDK when targeting macOS. Please use --with-macos-sdk " + "to give the path to a macOS SDK." + ) if not isdir(sdk): - die('SDK not found in %s. When using --with-macos-sdk, you must specify a ' - 'valid SDK. SDKs are installed when the optional cross-development ' - 'tools are selected during the Xcode/Developer Tools installation.' - % sdk) - with open(os.path.join(sdk, 'SDKSettings.plist'), 'rb') as plist: + die( + "SDK not found in %s. When using --with-macos-sdk, you must specify a " + "valid SDK. SDKs are installed when the optional cross-development " + "tools are selected during the Xcode/Developer Tools installation." + % sdk + ) + with open(os.path.join(sdk, "SDKSettings.plist"), "rb") as plist: obj = plistlib.load(plist) if not obj: - die('Error parsing SDKSettings.plist in the SDK directory: %s' % sdk) - if 'Version' not in obj: - die('Error finding Version information in SDKSettings.plist from the SDK: %s' % sdk) - version = Version(obj['Version']) + die("Error parsing SDKSettings.plist in the SDK directory: %s" % sdk) + if "Version" not in obj: + die( + "Error finding Version information in SDKSettings.plist from the SDK: %s" + % sdk + ) + version = Version(obj["Version"]) if version < sdk_min_version: - die('SDK version "%s" is too old. Please upgrade to at least %s. ' - 'You may need to point to it using --with-macos-sdk= in your ' - 'mozconfig.' % (version, sdk_min_version)) + die( + 'SDK version "%s" is too old. Please upgrade to at least %s. ' + "You may need to point to it using --with-macos-sdk= in your " + "mozconfig." % (version, sdk_min_version) + ) if version > sdk_max_version: - die('SDK version "%s" is unsupported. Please downgrade to version ' - '%s. You may need to point to it using --with-macos-sdk= in ' - 'your mozconfig.' % (version, sdk_max_version)) + die( + 'SDK version "%s" is unsupported. Please downgrade to version ' + "%s. You may need to point to it using --with-macos-sdk= in " + "your mozconfig." % (version, sdk_max_version) + ) return sdk - set_config('MACOS_SDK_DIR', macos_sdk) + set_config("MACOS_SDK_DIR", macos_sdk) with only_when(target_is_osx): with only_when(cross_compiling): - option('--with-macos-private-frameworks', - env="MACOS_PRIVATE_FRAMEWORKS_DIR", nargs=1, - help='Location of private frameworks to use') + option( + "--with-macos-private-frameworks", + env="MACOS_PRIVATE_FRAMEWORKS_DIR", + nargs=1, + help="Location of private frameworks to use", + ) - @depends_if('--with-macos-private-frameworks') - @imports(_from='os.path', _import='isdir') + @depends_if("--with-macos-private-frameworks") + @imports(_from="os.path", _import="isdir") def macos_private_frameworks(value): if value and not isdir(value[0]): - die('PrivateFrameworks not found not found in %s. When using ' - '--with-macos-private-frameworks, you must specify a valid ' - 'directory', value[0]) + die( + "PrivateFrameworks not found not found in %s. When using " + "--with-macos-private-frameworks, you must specify a valid " + "directory", + value[0], + ) return value[0] @depends(macos_private_frameworks) def macos_private_frameworks(value): if value: return value - return '/System/Library/PrivateFrameworks' + return "/System/Library/PrivateFrameworks" - set_config('MACOS_PRIVATE_FRAMEWORKS_DIR', macos_private_frameworks) + set_config("MACOS_PRIVATE_FRAMEWORKS_DIR", macos_private_frameworks) # Compiler wrappers # ============================================================== -option('--with-compiler-wrapper', env='COMPILER_WRAPPER', nargs=1, - help='Enable compiling with wrappers such as distcc and ccache') +option( + "--with-compiler-wrapper", + env="COMPILER_WRAPPER", + nargs=1, + help="Enable compiling with wrappers such as distcc and ccache", +) -option('--with-ccache', env='CCACHE', nargs='?', - help='Enable compiling with ccache') +option("--with-ccache", env="CCACHE", nargs="?", help="Enable compiling with ccache") -@depends_if('--with-ccache') +@depends_if("--with-ccache") def ccache(value): if len(value): return value # If --with-ccache was given without an explicit value, we default to # 'ccache'. - return 'ccache' + return "ccache" -ccache = check_prog('CCACHE', progs=(), input=ccache) +ccache = check_prog("CCACHE", progs=(), input=ccache) -option(env='CCACHE_PREFIX', - nargs=1, - help='Compiler prefix to use when using ccache') +option(env="CCACHE_PREFIX", nargs=1, help="Compiler prefix to use when using ccache") -ccache_prefix = depends_if('CCACHE_PREFIX')(lambda prefix: prefix[0]) -set_config('CCACHE_PREFIX', ccache_prefix) +ccache_prefix = depends_if("CCACHE_PREFIX")(lambda prefix: prefix[0]) +set_config("CCACHE_PREFIX", ccache_prefix) # Distinguish ccache from sccache. @depends_if(ccache) def ccache_is_sccache(ccache): - return check_cmd_output(ccache, '--version').startswith('sccache') + return check_cmd_output(ccache, "--version").startswith("sccache") @depends(ccache, ccache_is_sccache) @@ -248,59 +280,70 @@ def using_ccache(ccache, ccache_is_sccache): def using_sccache(ccache, ccache_is_sccache): return ccache and ccache_is_sccache -option(env='RUSTC_WRAPPER', nargs=1, - help='Wrap rust compilation with given tool') -@depends(ccache, ccache_is_sccache, 'RUSTC_WRAPPER') -@imports(_from='textwrap', _import='dedent') -@imports('os') +option(env="RUSTC_WRAPPER", nargs=1, help="Wrap rust compilation with given tool") + + +@depends(ccache, ccache_is_sccache, "RUSTC_WRAPPER") +@imports(_from="textwrap", _import="dedent") +@imports("os") def check_sccache_version(ccache, ccache_is_sccache, rustc_wrapper): - sccache_min_version = Version('0.2.13') + sccache_min_version = Version("0.2.13") def check_version(path): - out = check_cmd_output(path, '--version') + out = check_cmd_output(path, "--version") version = Version(out.rstrip().split()[-1]) if version < sccache_min_version: - die(dedent('''\ + die( + dedent( + """\ sccache %s or later is required. sccache in use at %s has version %s. Please upgrade or acquire a new version with |./mach bootstrap|. - '''), sccache_min_version, path, version) + """ + ), + sccache_min_version, + path, + version, + ) if ccache and ccache_is_sccache: check_version(ccache) - if (rustc_wrapper and - (os.path.splitext(os.path.basename(rustc_wrapper[0]))[0].lower() == - 'sccache')): + if rustc_wrapper and ( + os.path.splitext(os.path.basename(rustc_wrapper[0]))[0].lower() == "sccache" + ): check_version(rustc_wrapper[0]) -set_config('MOZ_USING_CCACHE', using_ccache) -set_config('MOZ_USING_SCCACHE', using_sccache) -option(env='SCCACHE_VERBOSE_STATS', - help='Print verbose sccache stats after build') +set_config("MOZ_USING_CCACHE", using_ccache) +set_config("MOZ_USING_SCCACHE", using_sccache) +option(env="SCCACHE_VERBOSE_STATS", help="Print verbose sccache stats after build") -@depends(using_sccache, 'SCCACHE_VERBOSE_STATS') + +@depends(using_sccache, "SCCACHE_VERBOSE_STATS") def sccache_verbose_stats(using_sccache, verbose_stats): return using_sccache and bool(verbose_stats) -set_config('SCCACHE_VERBOSE_STATS', sccache_verbose_stats) +set_config("SCCACHE_VERBOSE_STATS", sccache_verbose_stats) -@depends('--with-compiler-wrapper', ccache) -@imports(_from='mozbuild.shellutil', _import='split', _as='shell_split') +@depends("--with-compiler-wrapper", ccache) +@imports(_from="mozbuild.shellutil", _import="split", _as="shell_split") def compiler_wrapper(wrapper, ccache): if wrapper: raw_wrapper = wrapper[0] wrapper = shell_split(raw_wrapper) wrapper_program = find_program(wrapper[0]) if not wrapper_program: - die('Cannot find `%s` from the given compiler wrapper `%s`', - wrapper[0], raw_wrapper) + die( + "Cannot find `%s` from the given compiler wrapper `%s`", + wrapper[0], + raw_wrapper, + ) wrapper[0] = wrapper_program if ccache: @@ -317,35 +360,39 @@ def using_compiler_wrapper(compiler_wrapper): return True -set_config('MOZ_USING_COMPILER_WRAPPER', using_compiler_wrapper) +set_config("MOZ_USING_COMPILER_WRAPPER", using_compiler_wrapper) # GC rooting and hazard analysis. # ============================================================== -option(env='MOZ_HAZARD', help='Build for the GC rooting hazard analysis') +option(env="MOZ_HAZARD", help="Build for the GC rooting hazard analysis") -@depends('MOZ_HAZARD') +@depends("MOZ_HAZARD") def hazard_analysis(value): if value: return True -set_config('MOZ_HAZARD', hazard_analysis) +set_config("MOZ_HAZARD", hazard_analysis) # Cross-compilation related things. # ============================================================== -option('--with-toolchain-prefix', env='TOOLCHAIN_PREFIX', nargs=1, - help='Prefix for the target toolchain') +option( + "--with-toolchain-prefix", + env="TOOLCHAIN_PREFIX", + nargs=1, + help="Prefix for the target toolchain", +) -@depends('--with-toolchain-prefix', target, cross_compiling) +@depends("--with-toolchain-prefix", target, cross_compiling) def toolchain_prefix(value, target, cross_compiling): if value: return tuple(value) if cross_compiling: - return ('%s-' % target.toolchain, '%s-' % target.alias) + return ("%s-" % target.toolchain, "%s-" % target.alias) @depends(toolchain_prefix, target) @@ -353,35 +400,34 @@ def first_toolchain_prefix(toolchain_prefix, target): # Pass TOOLCHAIN_PREFIX down to the build system if it was given from the # command line/environment (in which case there's only one value in the tuple), # or when cross-compiling for Android or OSX. - if toolchain_prefix and (target.os in ('Android', 'OSX') or len(toolchain_prefix) == 1): + if toolchain_prefix and ( + target.os in ("Android", "OSX") or len(toolchain_prefix) == 1 + ): return toolchain_prefix[0] -set_config('TOOLCHAIN_PREFIX', first_toolchain_prefix) -add_old_configure_assignment('TOOLCHAIN_PREFIX', first_toolchain_prefix) +set_config("TOOLCHAIN_PREFIX", first_toolchain_prefix) +add_old_configure_assignment("TOOLCHAIN_PREFIX", first_toolchain_prefix) # Compilers # ============================================================== -include('compilers-util.configure') +include("compilers-util.configure") def try_preprocess(compiler, language, source, onerror=None): - return try_invoke_compiler(compiler, language, source, ['-E'], onerror) - - -@imports(_from='mozbuild.configure.constants', _import='CompilerType') -@imports(_from='mozbuild.configure.constants', - _import='CPU_preprocessor_checks') -@imports(_from='mozbuild.configure.constants', - _import='kernel_preprocessor_checks') -@imports(_from='mozbuild.configure.constants', - _import='OS_preprocessor_checks') -@imports(_from='six', _import='iteritems') -@imports(_from='textwrap', _import='dedent') -@imports(_from='__builtin__', _import='Exception') + return try_invoke_compiler(compiler, language, source, ["-E"], onerror) + + +@imports(_from="mozbuild.configure.constants", _import="CompilerType") +@imports(_from="mozbuild.configure.constants", _import="CPU_preprocessor_checks") +@imports(_from="mozbuild.configure.constants", _import="kernel_preprocessor_checks") +@imports(_from="mozbuild.configure.constants", _import="OS_preprocessor_checks") +@imports(_from="six", _import="iteritems") +@imports(_from="textwrap", _import="dedent") +@imports(_from="__builtin__", _import="Exception") def get_compiler_info(compiler, language): - '''Returns information about the given `compiler` (command line in the + """Returns information about the given `compiler` (command line in the form of a list or tuple), in the given `language`. The returned information includes: @@ -389,12 +435,13 @@ def get_compiler_info(compiler, language): - the compiler version - the compiler supported language - the compiler supported language version - ''' + """ # Xcode clang versions are different from the underlying llvm version (they # instead are aligned with the Xcode version). Fortunately, we can tell # apart plain clang from Xcode clang, and convert the Xcode clang version # into the more or less corresponding plain clang version. - check = dedent('''\ + check = dedent( + """\ #if defined(_MSC_VER) && defined(__clang__) && defined(_MT) %COMPILER "clang-cl" %VERSION __clang_major__.__clang_minor__.__clang_patchlevel__ @@ -414,109 +461,112 @@ def get_compiler_info(compiler, language): #elif __STDC_VERSION__ %STDC_VERSION __STDC_VERSION__ #endif - ''') + """ + ) # While we're doing some preprocessing, we might as well do some more # preprocessor-based tests at the same time, to check the toolchain # matches what we want. for name, preprocessor_checks in ( - ('CPU', CPU_preprocessor_checks), - ('KERNEL', kernel_preprocessor_checks), - ('OS', OS_preprocessor_checks), + ("CPU", CPU_preprocessor_checks), + ("KERNEL", kernel_preprocessor_checks), + ("OS", OS_preprocessor_checks), ): for n, (value, condition) in enumerate(iteritems(preprocessor_checks)): - check += dedent('''\ + check += dedent( + """\ #%(if)s %(condition)s %%%(name)s "%(value)s" - ''' % { - 'if': 'elif' if n else 'if', - 'condition': condition, - 'name': name, - 'value': value, - }) - check += '#endif\n' + """ + % { + "if": "elif" if n else "if", + "condition": condition, + "name": name, + "value": value, + } + ) + check += "#endif\n" # Also check for endianness. The advantage of living in modern times is # that all the modern compilers we support now have __BYTE_ORDER__ defined # by the preprocessor. - check += dedent('''\ + check += dedent( + """\ #if __BYTE_ORDER__ == __ORDER_LITTLE_ENDIAN__ %ENDIANNESS "little" #elif __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__ %ENDIANNESS "big" #endif - ''') + """ + ) result = try_preprocess(compiler, language, check) if not result: - raise FatalCheckError( - 'Unknown compiler or compiler not supported.') + raise FatalCheckError("Unknown compiler or compiler not supported.") # Metadata emitted by preprocessors such as GCC with LANG=ja_JP.utf-8 may # have non-ASCII characters. Treat the output as bytearray. data = {} for line in result.splitlines(): - if line.startswith('%'): - k, _, v = line.partition(' ') - k = k.lstrip('%') - data[k] = v.replace(' ', '').lstrip('"').rstrip('"') - log.debug('%s = %s', k, data[k]) + if line.startswith("%"): + k, _, v = line.partition(" ") + k = k.lstrip("%") + data[k] = v.replace(" ", "").lstrip('"').rstrip('"') + log.debug("%s = %s", k, data[k]) try: - type = CompilerType(data['COMPILER']) + type = CompilerType(data["COMPILER"]) except Exception: - raise FatalCheckError( - 'Unknown compiler or compiler not supported.') + raise FatalCheckError("Unknown compiler or compiler not supported.") - cplusplus = int(data.get('cplusplus', '0L').rstrip('L')) - stdc_version = int(data.get('STDC_VERSION', '0L').rstrip('L')) + cplusplus = int(data.get("cplusplus", "0L").rstrip("L")) + stdc_version = int(data.get("STDC_VERSION", "0L").rstrip("L")) - version = data.get('VERSION') + version = data.get("VERSION") if version: version = Version(version) - if data.get('XCODE'): + if data.get("XCODE"): # Derived from https://en.wikipedia.org/wiki/Xcode#Toolchain_versions # with enough granularity for major.minor version checks further # down the line - if version < '9.1': - version = Version('4.0.0.or.less') - elif version < '10.0': - version = Version('5.0.2') - elif version < '10.0.1': - version = Version('6.0.1') - elif version < '11.0': - version = Version('7.0.0') - elif version < '11.0.3': - version = Version('8.0.0') - elif version < '12.0': - version = Version('9.0.0') + if version < "9.1": + version = Version("4.0.0.or.less") + elif version < "10.0": + version = Version("5.0.2") + elif version < "10.0.1": + version = Version("6.0.1") + elif version < "11.0": + version = Version("7.0.0") + elif version < "11.0.3": + version = Version("8.0.0") + elif version < "12.0": + version = Version("9.0.0") else: - version = Version('10.0.0.or.more') + version = Version("10.0.0.or.more") return namespace( type=type, version=version, - cpu=data.get('CPU'), - kernel=data.get('KERNEL'), - endianness=data.get('ENDIANNESS'), - os=data.get('OS'), - language='C++' if cplusplus else 'C', + cpu=data.get("CPU"), + kernel=data.get("KERNEL"), + endianness=data.get("ENDIANNESS"), + os=data.get("OS"), + language="C++" if cplusplus else "C", language_version=cplusplus if cplusplus else stdc_version, ) def same_arch_different_bits(): return ( - ('x86', 'x86_64'), - ('ppc', 'ppc64'), - ('sparc', 'sparc64'), + ("x86", "x86_64"), + ("ppc", "ppc64"), + ("sparc", "sparc64"), ) -@imports(_from='mozbuild.shellutil', _import='quote') -@imports(_from='mozbuild.configure.constants', - _import='OS_preprocessor_checks') +@imports(_from="mozbuild.shellutil", _import="quote") +@imports(_from="mozbuild.configure.constants", _import="OS_preprocessor_checks") def check_compiler(compiler, language, target): info = get_compiler_info(compiler, language) @@ -526,66 +576,72 @@ def check_compiler(compiler, language, target): # -------------------------------------------------------------------- if language != info.language: raise FatalCheckError( - '`%s` is not a %s compiler.' % (quote(*compiler), language)) + "`%s` is not a %s compiler." % (quote(*compiler), language) + ) # Note: We do a strict version check because there sometimes are backwards # incompatible changes in the standard, and not all code that compiles as # C99 compiles as e.g. C11 (as of writing, this is true of libnestegg, for # example) - if info.language == 'C' and info.language_version != 199901: - if info.type == 'clang-cl': - flags.append('-Xclang') - flags.append('-std=gnu99') + if info.language == "C" and info.language_version != 199901: + if info.type == "clang-cl": + flags.append("-Xclang") + flags.append("-std=gnu99") cxx17_version = 201703 - if info.language == 'C++': + if info.language == "C++": if info.language_version != cxx17_version: # MSVC headers include C++17 features, but don't guard them # with appropriate checks. - if info.type == 'clang-cl': - flags.append('-Xclang') - flags.append('-std=c++17') + if info.type == "clang-cl": + flags.append("-Xclang") + flags.append("-std=c++17") else: - flags.append('-std=gnu++17') + flags.append("-std=gnu++17") # Check compiler target # -------------------------------------------------------------------- has_target = False - if info.type == 'clang': + if info.type == "clang": # Add the target explicitly when the target is aarch64 macosx, because # the clang target is named differently, and we need to work around # https://github.com/rust-lang/rust-bindgen/issues/1871 and # https://github.com/alexcrichton/cc-rs/issues/542 so we always want # the target on the command line, even if the compiler would default to # that. - if target.os == 'OSX' and target.cpu == 'aarch64': - if '--target=arm64-apple-darwin' not in compiler: - flags.append('--target=arm64-apple-darwin') + if target.os == "OSX" and target.cpu == "aarch64": + if "--target=arm64-apple-darwin" not in compiler: + flags.append("--target=arm64-apple-darwin") has_target = True - elif not info.kernel or info.kernel != target.kernel or \ - not info.endianness or info.endianness != target.endianness: - flags.append('--target=%s' % target.toolchain) + elif ( + not info.kernel + or info.kernel != target.kernel + or not info.endianness + or info.endianness != target.endianness + ): + flags.append("--target=%s" % target.toolchain) has_target = True # Add target flag when there is an OS mismatch (e.g. building for Android on # Linux). However, only do this if the target OS is in our whitelist, to # keep things the same on other platforms. elif target.os in OS_preprocessor_checks and ( - not info.os or info.os != target.os): - flags.append('--target=%s' % target.toolchain) + not info.os or info.os != target.os + ): + flags.append("--target=%s" % target.toolchain) has_target = True if not has_target and (not info.cpu or info.cpu != target.cpu): same_arch = same_arch_different_bits() if (target.cpu, info.cpu) in same_arch: - flags.append('-m32') + flags.append("-m32") elif (info.cpu, target.cpu) in same_arch: - flags.append('-m64') - elif info.type == 'clang-cl' and target.cpu == 'aarch64': - flags.append('--target=%s' % target.toolchain) - elif info.type == 'clang': - flags.append('--target=%s' % target.toolchain) + flags.append("-m64") + elif info.type == "clang-cl" and target.cpu == "aarch64": + flags.append("--target=%s" % target.toolchain) + elif info.type == "clang": + flags.append("--target=%s" % target.toolchain) return namespace( type=info.type, @@ -598,102 +654,133 @@ def check_compiler(compiler, language, target): ) -@imports(_from='__builtin__', _import='open') -@imports('json') -@imports('os') +@imports(_from="__builtin__", _import="open") +@imports("json") +@imports("os") def get_vc_paths(topsrcdir): def vswhere(args): - program_files = (os.environ.get('PROGRAMFILES(X86)') or - os.environ.get('PROGRAMFILES')) + program_files = os.environ.get("PROGRAMFILES(X86)") or os.environ.get( + "PROGRAMFILES" + ) if not program_files: return [] - vswhere = os.path.join(program_files, 'Microsoft Visual Studio', - 'Installer', 'vswhere.exe') + vswhere = os.path.join( + program_files, "Microsoft Visual Studio", "Installer", "vswhere.exe" + ) if not os.path.exists(vswhere): return [] - return json.loads(check_cmd_output(vswhere, '-format', 'json', *args)) - - for install in vswhere(['-products', '*', '-requires', 'Microsoft.VisualStudio.Component.VC.Tools.x86.x64']): - path = install['installationPath'] - tools_version = open(os.path.join( - path, r'VC\Auxiliary\Build\Microsoft.VCToolsVersion.default.txt'), 'r').read().strip() - tools_path = os.path.join( - path, r'VC\Tools\MSVC', tools_version) - yield (Version(install['installationVersion']), tools_path) + return json.loads(check_cmd_output(vswhere, "-format", "json", *args)) + + for install in vswhere( + [ + "-products", + "*", + "-requires", + "Microsoft.VisualStudio.Component.VC.Tools.x86.x64", + ] + ): + path = install["installationPath"] + tools_version = ( + open( + os.path.join( + path, r"VC\Auxiliary\Build\Microsoft.VCToolsVersion.default.txt" + ), + "r", + ) + .read() + .strip() + ) + tools_path = os.path.join(path, r"VC\Tools\MSVC", tools_version) + yield (Version(install["installationVersion"]), tools_path) @depends(host) def host_is_windows(host): - if host.kernel == 'WINNT': + if host.kernel == "WINNT": return True -option('--with-visual-studio-version', nargs=1, - choices=('2017',), when=host_is_windows, - help='Select a specific Visual Studio version to use') +option( + "--with-visual-studio-version", + nargs=1, + choices=("2017",), + when=host_is_windows, + help="Select a specific Visual Studio version to use", +) -@depends('--with-visual-studio-version', when=host_is_windows) +@depends("--with-visual-studio-version", when=host_is_windows) def vs_major_version(value): if value: - return {'2017': 15}[value[0]] - - -option(env='VC_PATH', nargs=1, when=host_is_windows, - help='Path to the Microsoft Visual C/C++ compiler') - - -@depends(host, vs_major_version, check_build_environment, 'VC_PATH', - '--with-visual-studio-version', when=host_is_windows) -@imports(_from='__builtin__', _import='sorted') -@imports(_from='operator', _import='itemgetter') -def vc_compiler_paths_for_version(host, vs_major_version, env, vc_path, vs_release_name): + return {"2017": 15}[value[0]] + + +option( + env="VC_PATH", + nargs=1, + when=host_is_windows, + help="Path to the Microsoft Visual C/C++ compiler", +) + + +@depends( + host, + vs_major_version, + check_build_environment, + "VC_PATH", + "--with-visual-studio-version", + when=host_is_windows, +) +@imports(_from="__builtin__", _import="sorted") +@imports(_from="operator", _import="itemgetter") +def vc_compiler_paths_for_version( + host, vs_major_version, env, vc_path, vs_release_name +): if vc_path and vs_release_name: - die('VC_PATH and --with-visual-studio-version cannot be used together.') + die("VC_PATH and --with-visual-studio-version cannot be used together.") if vc_path: # Use an arbitrary version, it doesn't matter. - all_versions = [(Version('15'), vc_path[0])] + all_versions = [(Version("15"), vc_path[0])] else: all_versions = sorted(get_vc_paths(env.topsrcdir), key=itemgetter(0)) if not all_versions: return if vs_major_version: - versions = [d for (v, d) in all_versions if v.major == - vs_major_version] + versions = [d for (v, d) in all_versions if v.major == vs_major_version] if not versions: - die('Visual Studio %s could not be found!' % vs_release_name) + die("Visual Studio %s could not be found!" % vs_release_name) path = versions[0] else: # Choose the newest version. path = all_versions[-1][1] host_dir = { - 'x86_64': 'HostX64', - 'x86': 'HostX86', + "x86_64": "HostX64", + "x86": "HostX86", }.get(host.cpu) if host_dir: - path = os.path.join(path, 'bin', host_dir) + path = os.path.join(path, "bin", host_dir) return { - 'x64': [os.path.join(path, 'x64')], + "x64": [os.path.join(path, "x64")], # The cross toolchains require DLLs from the native x64 toolchain. - 'x86': [os.path.join(path, 'x86'), os.path.join(path, 'x64')], - 'arm64': [os.path.join(path, 'arm64'), os.path.join(path, 'x64')], + "x86": [os.path.join(path, "x86"), os.path.join(path, "x64")], + "arm64": [os.path.join(path, "arm64"), os.path.join(path, "x64")], } @template def vc_compiler_path_for(host_or_target): - @depends(host_or_target, vc_compiler_paths_for_version, - when=host_is_windows) + @depends(host_or_target, vc_compiler_paths_for_version, when=host_is_windows) def vc_compiler_path(target, paths): vc_target = { - 'x86': 'x86', - 'x86_64': 'x64', - 'arm': 'arm', - 'aarch64': 'arm64' + "x86": "x86", + "x86_64": "x64", + "arm": "arm", + "aarch64": "arm64", }.get(target.cpu) if not paths: return return paths.get(vc_target) + return vc_compiler_path @@ -702,32 +789,42 @@ host_vc_compiler_path = vc_compiler_path_for(host) @dependable -@imports('os') -@imports(_from='os', _import='environ') +@imports("os") +@imports(_from="os", _import="environ") def original_path(): - return environ['PATH'].split(os.pathsep) + return environ["PATH"].split(os.pathsep) @template def toolchain_search_path_for(host_or_target): arch_alias, vc_path = { - host: ('host', host_vc_compiler_path), - target: ('target', vc_compiler_path), + host: ("host", host_vc_compiler_path), + target: ("target", vc_compiler_path), }[host_or_target] - @depends(dependable(arch_alias), vc_path, original_path, developer_options, - mozbuild_state_path) - @imports('os') - @imports(_from='os', _import='environ') - def toolchain_search_path(arch_alias, vc_compiler_path, original_path, - developer_options, mozbuild_state_path): + @depends( + dependable(arch_alias), + vc_path, + original_path, + developer_options, + mozbuild_state_path, + ) + @imports("os") + @imports(_from="os", _import="environ") + def toolchain_search_path( + arch_alias, + vc_compiler_path, + original_path, + developer_options, + mozbuild_state_path, + ): result = list(original_path) if vc_compiler_path: # The second item, if there is one, is necessary to have in $PATH for # Windows to load the required DLLs from there. if len(vc_compiler_path) > 1: - environ['PATH'] = os.pathsep.join(result + vc_compiler_path[1:]) + environ["PATH"] = os.pathsep.join(result + vc_compiler_path[1:]) # The first item is where the programs are going to be result.append(vc_compiler_path[0]) @@ -735,35 +832,42 @@ def toolchain_search_path_for(host_or_target): # Also add in the location to which `mach bootstrap` or # `mach artifact toolchain` installs clang, cbindgen, etc. bootstrapped = [ - os.path.join(mozbuild_state_path, *rest) for rest in ( - ['clang', 'bin'], - ['cbindgen'], - ['dump_syms'], - ['nasm'], - ['lucetc'], - )] + os.path.join(mozbuild_state_path, *rest) + for rest in ( + ["clang", "bin"], + ["cbindgen"], + ["dump_syms"], + ["nasm"], + ["lucetc"], + ) + ] # Also add the rustup install directory for cargo/rustc. - cargo_home = environ.get('CARGO_HOME', '') + cargo_home = environ.get("CARGO_HOME", "") if cargo_home: cargo_home = os.path.abspath(cargo_home) else: - cargo_home = os.path.expanduser(os.path.join('~', '.cargo')) - rustup_path = os.path.join(cargo_home, 'bin') + cargo_home = os.path.expanduser(os.path.join("~", ".cargo")) + rustup_path = os.path.join(cargo_home, "bin") result.append(rustup_path) if developer_options: - log.debug('Prioritizing mozbuild state dir in {} toolchain path because ' - 'you are not building in release mode.'.format(arch_alias)) + log.debug( + "Prioritizing mozbuild state dir in {} toolchain path because " + "you are not building in release mode.".format(arch_alias) + ) search_path = bootstrapped + result else: - log.debug('Prioritizing system over mozbuild state dir in {} ' - 'toolchain path because you are building in ' - 'release mode.'.format(arch_alias)) + log.debug( + "Prioritizing system over mozbuild state dir in {} " + "toolchain path because you are building in " + "release mode.".format(arch_alias) + ) search_path = result + bootstrapped - log.debug('Search path for {} toolchain: {}'.format(arch_alias, search_path)) + log.debug("Search path for {} toolchain: {}".format(arch_alias, search_path)) return search_path + return toolchain_search_path @@ -774,10 +878,10 @@ host_toolchain_search_path = toolchain_search_path_for(host) # As a workaround until bug 1516228 and bug 1516253 are fixed, set the PATH # variable for the build to contain the toolchain search path. @depends(toolchain_search_path, host_toolchain_search_path) -@imports('os') -@imports(_from='os', _import='environ') +@imports("os") +@imports(_from="os", _import="environ") def altered_path(toolchain_search_path, host_toolchain_search_path): - path = environ['PATH'].split(os.pathsep) + path = environ["PATH"].split(os.pathsep) altered_path = list(toolchain_search_path) for p in host_toolchain_search_path: if p not in altered_path: @@ -788,42 +892,45 @@ def altered_path(toolchain_search_path, host_toolchain_search_path): return os.pathsep.join(altered_path) -set_config('PATH', altered_path) +set_config("PATH", altered_path) @template def default_c_compilers(host_or_target, other_c_compiler=None): - '''Template defining the set of default C compilers for the host and + """Template defining the set of default C compilers for the host and target platforms. `host_or_target` is either `host` or `target` (the @depends functions from init.configure. `other_c_compiler` is the `target` C compiler when `host_or_target` is `host`. - ''' + """ assert host_or_target in {host, target} other_c_compiler = () if other_c_compiler is None else (other_c_compiler,) @depends(host_or_target, target, toolchain_prefix, *other_c_compiler) - def default_c_compilers(host_or_target, target, toolchain_prefix, - *other_c_compiler): - if host_or_target.kernel == 'WINNT': - supported = types = ('clang-cl', 'clang') - elif host_or_target.kernel == 'Darwin': - types = ('clang',) - supported = ('clang', 'gcc') + def default_c_compilers( + host_or_target, target, toolchain_prefix, *other_c_compiler + ): + if host_or_target.kernel == "WINNT": + supported = types = ("clang-cl", "clang") + elif host_or_target.kernel == "Darwin": + types = ("clang",) + supported = ("clang", "gcc") else: - supported = types = ('clang', 'gcc') + supported = types = ("clang", "gcc") info = other_c_compiler[0] if other_c_compiler else None if info and info.type in supported: # When getting default C compilers for the host, we prioritize the # same compiler as the target C compiler. prioritized = info.compiler - if info.type == 'gcc': + if info.type == "gcc": same_arch = same_arch_different_bits() - if (target.cpu != host_or_target.cpu and - (target.cpu, host_or_target.cpu) not in same_arch and - (host_or_target.cpu, target.cpu) not in same_arch): + if ( + target.cpu != host_or_target.cpu + and (target.cpu, host_or_target.cpu) not in same_arch + and (host_or_target.cpu, target.cpu) not in same_arch + ): # If the target C compiler is GCC, and it can't be used with # -m32/-m64 for the host, it's probably toolchain-prefixed, # so we prioritize a raw 'gcc' instead. @@ -831,13 +938,13 @@ def default_c_compilers(host_or_target, other_c_compiler=None): types = [prioritized] + [t for t in types if t != info.type] - gcc = ('gcc',) + gcc = ("gcc",) if toolchain_prefix and host_or_target is target: - gcc = tuple('%sgcc' % p for p in toolchain_prefix) + gcc + gcc = tuple("%sgcc" % p for p in toolchain_prefix) + gcc result = [] for type in types: - if type == 'gcc': + if type == "gcc": result.extend(gcc) else: result.append(type) @@ -849,7 +956,7 @@ def default_c_compilers(host_or_target, other_c_compiler=None): @template def default_cxx_compilers(c_compiler, other_c_compiler=None, other_cxx_compiler=None): - '''Template defining the set of default C++ compilers for the host and + """Template defining the set of default C++ compilers for the host and target platforms. `c_compiler` is the @depends function returning a Compiler instance for the desired platform. @@ -861,7 +968,7 @@ def default_cxx_compilers(c_compiler, other_c_compiler=None, other_cxx_compiler= We also factor in the target C++ compiler when getting the default host C++ compiler, using the target C++ compiler if the host and target C compilers are the same. - ''' + """ assert (other_c_compiler is None) == (other_cxx_compiler is None) if other_c_compiler is not None: @@ -879,11 +986,11 @@ def default_cxx_compilers(c_compiler, other_c_compiler=None, other_cxx_compiler= dir = os.path.dirname(c_compiler.compiler) file = os.path.basename(c_compiler.compiler) - if c_compiler.type == 'gcc': - return (os.path.join(dir, file.replace('gcc', 'g++')),) + if c_compiler.type == "gcc": + return (os.path.join(dir, file.replace("gcc", "g++")),) - if c_compiler.type == 'clang': - return (os.path.join(dir, file.replace('clang', 'clang++')),) + if c_compiler.type == "clang": + return (os.path.join(dir, file.replace("clang", "clang++")),) return (c_compiler.compiler,) @@ -892,13 +999,13 @@ def default_cxx_compilers(c_compiler, other_c_compiler=None, other_cxx_compiler= @template def provided_program(env_var, when=None): - '''Template handling cases where a program can be specified either as a + """Template handling cases where a program can be specified either as a path or as a path with applicable arguments. - ''' + """ @depends_if(env_var, when=when) - @imports(_from='itertools', _import='takewhile') - @imports(_from='mozbuild.shellutil', _import='split', _as='shell_split') + @imports(_from="itertools", _import="takewhile") + @imports(_from="mozbuild.shellutil", _import="split", _as="shell_split") def provided(cmd): # Assume the first dash-prefixed item (and any subsequent items) are # command-line options, the item before the dash-prefixed item is @@ -906,31 +1013,36 @@ def provided_program(env_var, when=None): # of some kind (e.g. sccache). cmd = shell_split(cmd[0]) - without_flags = list(takewhile(lambda x: not x.startswith('-'), cmd)) + without_flags = list(takewhile(lambda x: not x.startswith("-"), cmd)) return namespace( wrapper=without_flags[:-1], program=without_flags[-1], - flags=cmd[len(without_flags):], + flags=cmd[len(without_flags) :], ) return provided def prepare_flags(host_or_target, macos_sdk): - if macos_sdk and host_or_target.os == 'OSX': - return ['-isysroot', macos_sdk] + if macos_sdk and host_or_target.os == "OSX": + return ["-isysroot", macos_sdk] return [] def minimum_gcc_version(): - return Version('7.1.0') + return Version("7.1.0") @template -def compiler(language, host_or_target, c_compiler=None, other_compiler=None, - other_c_compiler=None): - '''Template handling the generic base checks for the compiler for the +def compiler( + language, + host_or_target, + c_compiler=None, + other_compiler=None, + other_c_compiler=None, +): + """Template handling the generic base checks for the compiler for the given `language` on the given platform (`host_or_target`). `host_or_target` is either `host` or `target` (the @depends functions from init.configure. @@ -941,34 +1053,35 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, When `host_or_target` is `host` and the language is 'C++', `other_c_compiler` is the result of the `compiler` template for the language 'C' for `target`. - ''' + """ assert host_or_target in {host, target} - assert language in ('C', 'C++') - assert language == 'C' or c_compiler is not None + assert language in ("C", "C++") + assert language == "C" or c_compiler is not None assert host_or_target is target or other_compiler is not None - assert language == 'C' or host_or_target is target or \ - other_c_compiler is not None + assert language == "C" or host_or_target is target or other_c_compiler is not None host_or_target_str = { - host: 'host', - target: 'target', + host: "host", + target: "target", }[host_or_target] var = { - ('C', target): 'CC', - ('C++', target): 'CXX', - ('C', host): 'HOST_CC', - ('C++', host): 'HOST_CXX', + ("C", target): "CC", + ("C++", target): "CXX", + ("C", host): "HOST_CC", + ("C++", host): "HOST_CXX", }[language, host_or_target] default_compilers = { - 'C': lambda: default_c_compilers(host_or_target, other_compiler), - 'C++': lambda: default_cxx_compilers(c_compiler, other_c_compiler, other_compiler), + "C": lambda: default_c_compilers(host_or_target, other_compiler), + "C++": lambda: default_cxx_compilers( + c_compiler, other_c_compiler, other_compiler + ), }[language]() - what = 'the %s %s compiler' % (host_or_target_str, language) + what = "the %s %s compiler" % (host_or_target_str, language) - option(env=var, nargs=1, help='Path to %s' % what) + option(env=var, nargs=1, help="Path to %s" % what) # Handle the compiler given by the user through one of the CC/CXX/HOST_CC/ # HOST_CXX variables. @@ -983,15 +1096,20 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, # old-configure complicates things, and for now, we a) can't take the plain # result from check_prog as CC/CXX/HOST_CC/HOST_CXX and b) have to let # old-configure AC_SUBST it (because it's autoconf doing it, not us) - compiler = check_prog('_%s' % var, what=what, progs=default_compilers, - input=provided_compiler.program, - paths=search_path) + compiler = check_prog( + "_%s" % var, + what=what, + progs=default_compilers, + input=provided_compiler.program, + paths=search_path, + ) @depends(compiler, provided_compiler, compiler_wrapper, host_or_target, macos_sdk) - @checking('whether %s can be used' % what, lambda x: bool(x)) - @imports(_from='mozbuild.shellutil', _import='quote') - def valid_compiler(compiler, provided_compiler, compiler_wrapper, - host_or_target, macos_sdk): + @checking("whether %s can be used" % what, lambda x: bool(x)) + @imports(_from="mozbuild.shellutil", _import="quote") + def valid_compiler( + compiler, provided_compiler, compiler_wrapper, host_or_target, macos_sdk + ): wrapper = list(compiler_wrapper or ()) flags = prepare_flags(host_or_target, macos_sdk) if provided_compiler: @@ -999,13 +1117,12 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, # When doing a subconfigure, the compiler is set by old-configure # and it contains the wrappers from --with-compiler-wrapper and # --with-ccache. - if provided_wrapper[:len(wrapper)] == wrapper: - provided_wrapper = provided_wrapper[len(wrapper):] + if provided_wrapper[: len(wrapper)] == wrapper: + provided_wrapper = provided_wrapper[len(wrapper) :] wrapper.extend(provided_wrapper) flags.extend(provided_compiler.flags) - info = check_compiler(wrapper + [compiler] + flags, language, - host_or_target) + info = check_compiler(wrapper + [compiler] + flags, language, host_or_target) # Check that the additional flags we got are enough to not require any # more flags. If we get an exception, just ignore it; it's liable to be @@ -1015,66 +1132,86 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, try: if info.flags: flags += info.flags - info = check_compiler(wrapper + [compiler] + flags, language, - host_or_target) + info = check_compiler( + wrapper + [compiler] + flags, language, host_or_target + ) except FatalCheckError: pass if not info.target_cpu or info.target_cpu != host_or_target.cpu: raise FatalCheckError( - '%s %s compiler target CPU (%s) does not match --%s CPU (%s)' - % (host_or_target_str.capitalize(), language, - info.target_cpu or 'unknown', host_or_target_str, - host_or_target.raw_cpu)) + "%s %s compiler target CPU (%s) does not match --%s CPU (%s)" + % ( + host_or_target_str.capitalize(), + language, + info.target_cpu or "unknown", + host_or_target_str, + host_or_target.raw_cpu, + ) + ) - if not info.target_kernel or (info.target_kernel != - host_or_target.kernel): + if not info.target_kernel or (info.target_kernel != host_or_target.kernel): raise FatalCheckError( - '%s %s compiler target kernel (%s) does not match --%s kernel (%s)' - % (host_or_target_str.capitalize(), language, - info.target_kernel or 'unknown', host_or_target_str, - host_or_target.kernel)) + "%s %s compiler target kernel (%s) does not match --%s kernel (%s)" + % ( + host_or_target_str.capitalize(), + language, + info.target_kernel or "unknown", + host_or_target_str, + host_or_target.kernel, + ) + ) - if not info.target_endianness or (info.target_endianness != - host_or_target.endianness): + if not info.target_endianness or ( + info.target_endianness != host_or_target.endianness + ): raise FatalCheckError( - '%s %s compiler target endianness (%s) does not match --%s ' - 'endianness (%s)' - % (host_or_target_str.capitalize(), language, - info.target_endianness or 'unknown', host_or_target_str, - host_or_target.endianness)) + "%s %s compiler target endianness (%s) does not match --%s " + "endianness (%s)" + % ( + host_or_target_str.capitalize(), + language, + info.target_endianness or "unknown", + host_or_target_str, + host_or_target.endianness, + ) + ) # Compiler version checks # =================================================== # Check the compiler version here instead of in `compiler_version` so # that the `checking` message doesn't pretend the compiler can be used # to then bail out one line later. - if info.type == 'gcc': - if host_or_target.os == 'Android': - raise FatalCheckError('GCC is not supported on Android.\n' - 'Please use clang from the Android NDK instead.') + if info.type == "gcc": + if host_or_target.os == "Android": + raise FatalCheckError( + "GCC is not supported on Android.\n" + "Please use clang from the Android NDK instead." + ) gcc_version = minimum_gcc_version() if info.version < gcc_version: raise FatalCheckError( - 'Only GCC %d.%d or newer is supported (found version %s).' - % (gcc_version.major, gcc_version.minor, info.version)) + "Only GCC %d.%d or newer is supported (found version %s)." + % (gcc_version.major, gcc_version.minor, info.version) + ) - if info.type == 'clang-cl': - if info.version < '8.0.0': + if info.type == "clang-cl": + if info.version < "8.0.0": raise FatalCheckError( - 'Only clang-cl 8.0 or newer is supported (found version %s)' - % info.version) + "Only clang-cl 8.0 or newer is supported (found version %s)" + % info.version + ) # If you want to bump the version check here ensure the version # is known for Xcode in get_compiler_info. - if info.type == 'clang' and info.version < '5.0': + if info.type == "clang" and info.version < "5.0": raise FatalCheckError( - 'Only clang/llvm 5.0 or newer is supported (found version %s).' - % info.version) + "Only clang/llvm 5.0 or newer is supported (found version %s)." + % info.version + ) if info.flags: - raise FatalCheckError( - 'Unknown compiler or compiler not supported.') + raise FatalCheckError("Unknown compiler or compiler not supported.") return namespace( wrapper=wrapper, @@ -1086,69 +1223,87 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, ) @depends(valid_compiler) - @checking('%s version' % what) + @checking("%s version" % what) def compiler_version(compiler): return compiler.version - if language == 'C++': + if language == "C++": + @depends(valid_compiler, c_compiler) def valid_compiler(compiler, c_compiler): if compiler.type != c_compiler.type: - die('The %s C compiler is %s, while the %s C++ compiler is ' - '%s. Need to use the same compiler suite.', - host_or_target_str, c_compiler.type, - host_or_target_str, compiler.type) + die( + "The %s C compiler is %s, while the %s C++ compiler is " + "%s. Need to use the same compiler suite.", + host_or_target_str, + c_compiler.type, + host_or_target_str, + compiler.type, + ) if compiler.version != c_compiler.version: - die('The %s C compiler is version %s, while the %s C++ ' - 'compiler is version %s. Need to use the same compiler ' - 'version.', - host_or_target_str, c_compiler.version, - host_or_target_str, compiler.version) + die( + "The %s C compiler is version %s, while the %s C++ " + "compiler is version %s. Need to use the same compiler " + "version.", + host_or_target_str, + c_compiler.version, + host_or_target_str, + compiler.version, + ) return compiler # Set CC/CXX/HOST_CC/HOST_CXX for old-configure, which needs the wrapper # and the flags that were part of the user input for those variables to # be provided. - add_old_configure_assignment(var, depends_if(valid_compiler)( - lambda x: list(x.wrapper) + [x.compiler] + list(x.flags))) + add_old_configure_assignment( + var, + depends_if(valid_compiler)( + lambda x: list(x.wrapper) + [x.compiler] + list(x.flags) + ), + ) if host_or_target is target: - add_old_configure_assignment('ac_cv_prog_%s' % var, depends_if(valid_compiler)( - lambda x: list(x.wrapper) + [x.compiler] + list(x.flags))) + add_old_configure_assignment( + "ac_cv_prog_%s" % var, + depends_if(valid_compiler)( + lambda x: list(x.wrapper) + [x.compiler] + list(x.flags) + ), + ) # We check that it works in python configure already. - add_old_configure_assignment('ac_cv_prog_%s_works' % var.lower(), 'yes') + add_old_configure_assignment("ac_cv_prog_%s_works" % var.lower(), "yes") add_old_configure_assignment( - 'ac_cv_prog_%s_cross' % var.lower(), - depends(cross_compiling)(lambda x: 'yes' if x else 'no')) - gcc_like = depends(valid_compiler.type)(lambda x: 'yes' if x in ('gcc', 'clang') else 'no') - add_old_configure_assignment('ac_cv_prog_%s_g' % var.lower(), gcc_like) - if language == 'C': - add_old_configure_assignment('ac_cv_prog_gcc', gcc_like) - if language == 'C++': - add_old_configure_assignment('ac_cv_prog_gxx', gcc_like) - + "ac_cv_prog_%s_cross" % var.lower(), + depends(cross_compiling)(lambda x: "yes" if x else "no"), + ) + gcc_like = depends(valid_compiler.type)( + lambda x: "yes" if x in ("gcc", "clang") else "no" + ) + add_old_configure_assignment("ac_cv_prog_%s_g" % var.lower(), gcc_like) + if language == "C": + add_old_configure_assignment("ac_cv_prog_gcc", gcc_like) + if language == "C++": + add_old_configure_assignment("ac_cv_prog_gxx", gcc_like) # Set CC_TYPE/CC_VERSION/HOST_CC_TYPE/HOST_CC_VERSION to allow # old-configure to do some of its still existing checks. - if language == 'C': - set_config( - '%s_TYPE' % var, valid_compiler.type) - add_old_configure_assignment( - '%s_TYPE' % var, valid_compiler.type) + if language == "C": + set_config("%s_TYPE" % var, valid_compiler.type) + add_old_configure_assignment("%s_TYPE" % var, valid_compiler.type) set_config( - '%s_VERSION' % var, depends(valid_compiler.version)(lambda v: str(v))) + "%s_VERSION" % var, depends(valid_compiler.version)(lambda v: str(v)) + ) valid_compiler = compiler_class(valid_compiler, host_or_target) def compiler_error(): - raise FatalCheckError('Failed compiling a simple %s source with %s' - % (language, what)) + raise FatalCheckError( + "Failed compiling a simple %s source with %s" % (language, what) + ) - valid_compiler.try_compile(check_msg='%s works' % what, - onerror=compiler_error) + valid_compiler.try_compile(check_msg="%s works" % what, onerror=compiler_error) - set_config('%s_BASE_FLAGS' % var, valid_compiler.flags) + set_config("%s_BASE_FLAGS" % var, valid_compiler.flags) # Set CPP/CXXCPP for both the build system and old-configure. We don't # need to check this works for preprocessing, because we already relied @@ -1156,20 +1311,21 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, # in the first place. if host_or_target is target: pp_var = { - 'C': 'CPP', - 'C++': 'CXXCPP', + "C": "CPP", + "C++": "CXXCPP", }[language] preprocessor = depends_if(valid_compiler)( - lambda x: list(x.wrapper) + [x.compiler, '-E'] + list(x.flags)) + lambda x: list(x.wrapper) + [x.compiler, "-E"] + list(x.flags) + ) set_config(pp_var, preprocessor) add_old_configure_assignment(pp_var, preprocessor) - if language == 'C': + if language == "C": linker_var = { - target: 'LD', - host: 'HOST_LD', + target: "LD", + host: "HOST_LD", }[host_or_target] @deprecated_option(env=linker_var, nargs=1) @@ -1180,147 +1336,171 @@ def compiler(language, host_or_target, c_compiler=None, other_compiler=None, @depends(linker) def unused_linker(linker): if linker: - log.warning('The value of %s is not used by this build system.' - % linker_var) + log.warning( + "The value of %s is not used by this build system." % linker_var + ) return valid_compiler -c_compiler = compiler('C', target) -cxx_compiler = compiler('C++', target, c_compiler=c_compiler) -host_c_compiler = compiler('C', host, other_compiler=c_compiler) -host_cxx_compiler = compiler('C++', host, c_compiler=host_c_compiler, - other_compiler=cxx_compiler, - other_c_compiler=c_compiler) +c_compiler = compiler("C", target) +cxx_compiler = compiler("C++", target, c_compiler=c_compiler) +host_c_compiler = compiler("C", host, other_compiler=c_compiler) +host_cxx_compiler = compiler( + "C++", + host, + c_compiler=host_c_compiler, + other_compiler=cxx_compiler, + other_c_compiler=c_compiler, +) # Generic compiler-based conditions. -building_with_gcc = depends(c_compiler)(lambda info: info.type == 'gcc') +building_with_gcc = depends(c_compiler)(lambda info: info.type == "gcc") @depends(cxx_compiler, ccache_prefix) -@imports('os') +@imports("os") def cxx_is_icecream(info, ccache_prefix): - if (os.path.islink(info.compiler) and os.path.basename( - os.readlink(info.compiler)) == 'icecc'): + if ( + os.path.islink(info.compiler) + and os.path.basename(os.readlink(info.compiler)) == "icecc" + ): return True - if ccache_prefix and os.path.basename(ccache_prefix) == 'icecc': + if ccache_prefix and os.path.basename(ccache_prefix) == "icecc": return True -set_config('CXX_IS_ICECREAM', cxx_is_icecream) + +set_config("CXX_IS_ICECREAM", cxx_is_icecream) @depends(c_compiler) def msvs_version(info): # clang-cl emulates the same version scheme as cl. And MSVS_VERSION needs to # be set for GYP on Windows. - if info.type == 'clang-cl': - return '2017' + if info.type == "clang-cl": + return "2017" - return '' + return "" -set_config('MSVS_VERSION', msvs_version) +set_config("MSVS_VERSION", msvs_version) -include('compile-checks.configure') -include('arm.configure', when=depends(target.cpu)(lambda cpu: cpu == 'arm')) +include("compile-checks.configure") +include("arm.configure", when=depends(target.cpu)(lambda cpu: cpu == "arm")) @depends(host, host_os_kernel_major_version, target) def needs_macos_sdk_headers_check(host, version, target): # Only an issue on Mac OS X 10.14 (and probably above). - if host.kernel != 'Darwin' or target.kernel !='Darwin' or version < '18': + if host.kernel != "Darwin" or target.kernel != "Darwin" or version < "18": return return True -@depends(cxx_compiler.try_run(header='#include_next ', - check_msg='for macOS SDK headers', - when=needs_macos_sdk_headers_check), - when=needs_macos_sdk_headers_check) +@depends( + cxx_compiler.try_run( + header="#include_next ", + check_msg="for macOS SDK headers", + when=needs_macos_sdk_headers_check, + ), + when=needs_macos_sdk_headers_check, +) def check_have_mac_10_14_sdk(value): if value: return - die('System inttypes.h not found. Please try running ' - '`open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg` ' - 'and following the instructions to install the necessary headers') + die( + "System inttypes.h not found. Please try running " + "`open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg` " + "and following the instructions to install the necessary headers" + ) -@depends(have_64_bit, - try_compile(body='static_assert(sizeof(void *) == 8, "")', - check_msg='for 64-bit OS')) +@depends( + have_64_bit, + try_compile( + body='static_assert(sizeof(void *) == 8, "")', check_msg="for 64-bit OS" + ), +) def check_have_64_bit(have_64_bit, compiler_have_64_bit): if have_64_bit != compiler_have_64_bit: - configure_error('The target compiler does not agree with configure ' - 'about the target bitness.') + configure_error( + "The target compiler does not agree with configure " + "about the target bitness." + ) @depends(cxx_compiler, target) def needs_libstdcxx_newness_check(cxx_compiler, target): # We only have to care about this on Linux and MinGW. - if cxx_compiler.type == 'clang-cl': + if cxx_compiler.type == "clang-cl": return - if target.kernel not in ('Linux', 'WINNT'): + if target.kernel not in ("Linux", "WINNT"): return - if target.os == 'Android': + if target.os == "Android": return return True def die_on_old_libstdcxx(): - die('The libstdc++ in use is not new enough. Please run ' - './mach bootstrap to update your compiler, or update your system ' - 'libstdc++ installation.') - -try_compile(includes=['cstddef'], - body='\n'.join([ - # _GLIBCXX_RELEASE showed up in libstdc++ 7. - '#if defined(__GLIBCXX__) && !defined(_GLIBCXX_RELEASE)', - '# error libstdc++ not new enough', - '#endif', - '#if defined(_GLIBCXX_RELEASE)', - '# if _GLIBCXX_RELEASE < %d' % minimum_gcc_version().major, - '# error libstdc++ not new enough', - '# else', - ' (void) 0', - '# endif', - '#endif', - ]), - check_msg='for new enough STL headers from libstdc++', - when=needs_libstdcxx_newness_check, - onerror=die_on_old_libstdcxx) + die( + "The libstdc++ in use is not new enough. Please run " + "./mach bootstrap to update your compiler, or update your system " + "libstdc++ installation." + ) + + +try_compile( + includes=["cstddef"], + body="\n".join( + [ + # _GLIBCXX_RELEASE showed up in libstdc++ 7. + "#if defined(__GLIBCXX__) && !defined(_GLIBCXX_RELEASE)", + "# error libstdc++ not new enough", + "#endif", + "#if defined(_GLIBCXX_RELEASE)", + "# if _GLIBCXX_RELEASE < %d" % minimum_gcc_version().major, + "# error libstdc++ not new enough", + "# else", + " (void) 0", + "# endif", + "#endif", + ] + ), + check_msg="for new enough STL headers from libstdc++", + when=needs_libstdcxx_newness_check, + onerror=die_on_old_libstdcxx, +) @depends(c_compiler, target) def default_debug_flags(compiler_info, target): # Debug info is ON by default. - if compiler_info.type == 'clang-cl': - return '-Z7' - elif target.kernel == 'WINNT' and compiler_info.type == 'clang': - return '-g -gcodeview' - return '-g' + if compiler_info.type == "clang-cl": + return "-Z7" + elif target.kernel == "WINNT" and compiler_info.type == "clang": + return "-g -gcodeview" + return "-g" -option(env='MOZ_DEBUG_FLAGS', - nargs=1, - help='Debug compiler flags') +option(env="MOZ_DEBUG_FLAGS", nargs=1, help="Debug compiler flags") -imply_option('--enable-debug-symbols', - depends_if('--enable-debug')(lambda v: v)) +imply_option("--enable-debug-symbols", depends_if("--enable-debug")(lambda v: v)) -option('--disable-debug-symbols', - nargs='?', - help='Disable debug symbols using the given compiler flags') +option( + "--disable-debug-symbols", + nargs="?", + help="Disable debug symbols using the given compiler flags", +) -set_config('MOZ_DEBUG_SYMBOLS', - depends_if('--enable-debug-symbols')(lambda _: True)) +set_config("MOZ_DEBUG_SYMBOLS", depends_if("--enable-debug-symbols")(lambda _: True)) -@depends('MOZ_DEBUG_FLAGS', '--enable-debug-symbols', default_debug_flags) +@depends("MOZ_DEBUG_FLAGS", "--enable-debug-symbols", default_debug_flags) def debug_flags(env_debug_flags, enable_debug_flags, default_debug_flags): # If MOZ_DEBUG_FLAGS is set, and --enable-debug-symbols is set to a value, # --enable-debug-symbols takes precedence. Note, the value of @@ -1332,8 +1512,8 @@ def debug_flags(env_debug_flags, enable_debug_flags, default_debug_flags): return default_debug_flags -set_config('MOZ_DEBUG_FLAGS', debug_flags) -add_old_configure_assignment('MOZ_DEBUG_FLAGS', debug_flags) +set_config("MOZ_DEBUG_FLAGS", debug_flags) +add_old_configure_assignment("MOZ_DEBUG_FLAGS", debug_flags) @depends(c_compiler) @@ -1348,15 +1528,15 @@ def color_cflags(info): # value changes to e.g. "=always", exact string match may fail and # multiple color flags could be added. So examine downstream consumers # before adding flags to return values. - if info.type == 'gcc': - return '-fdiagnostics-color' - elif info.type == 'clang': - return '-fcolor-diagnostics' + if info.type == "gcc": + return "-fdiagnostics-color" + elif info.type == "clang": + return "-fcolor-diagnostics" else: - return '' + return "" -set_config('COLOR_CFLAGS', color_cflags) +set_config("COLOR_CFLAGS", color_cflags) # Some standard library headers (notably bionic on Android) declare standard # functions (e.g. getchar()) and also #define macros for those standard @@ -1408,48 +1588,54 @@ set_config('COLOR_CFLAGS', color_cflags) @depends(c_compiler, target) def libcxx_override_visibility(c_compiler, target): - if c_compiler.type == 'clang' and target.os == 'Android': + if c_compiler.type == "clang" and target.os == "Android": return namespace( - empty='', - hide_from_abi='__attribute__((__exclude_from_explicit_instantiation__))', + empty="", + hide_from_abi="__attribute__((__exclude_from_explicit_instantiation__))", ) -set_define('_LIBCPP_INLINE_VISIBILITY', libcxx_override_visibility.empty) -set_define('_LIBCPP_ALWAYS_INLINE', libcxx_override_visibility.empty) +set_define("_LIBCPP_INLINE_VISIBILITY", libcxx_override_visibility.empty) +set_define("_LIBCPP_ALWAYS_INLINE", libcxx_override_visibility.empty) + +set_define("_LIBCPP_HIDE_FROM_ABI", libcxx_override_visibility.hide_from_abi) -set_define('_LIBCPP_HIDE_FROM_ABI', libcxx_override_visibility.hide_from_abi) @depends(target, check_build_environment) def visibility_flags(target, env): - if target.os != 'WINNT': - if target.kernel == 'Darwin': - return ('-fvisibility=hidden', '-fvisibility-inlines-hidden') - return ('-I%s/system_wrappers' % os.path.join(env.dist), - '-include', - '%s/config/gcc_hidden.h' % env.topsrcdir) + if target.os != "WINNT": + if target.kernel == "Darwin": + return ("-fvisibility=hidden", "-fvisibility-inlines-hidden") + return ( + "-I%s/system_wrappers" % os.path.join(env.dist), + "-include", + "%s/config/gcc_hidden.h" % env.topsrcdir, + ) @depends(target, visibility_flags) def wrap_system_includes(target, visibility_flags): - if visibility_flags and target.kernel != 'Darwin': + if visibility_flags and target.kernel != "Darwin": return True -set_define('HAVE_VISIBILITY_HIDDEN_ATTRIBUTE', - depends(visibility_flags)(lambda v: bool(v) or None)) -set_define('HAVE_VISIBILITY_ATTRIBUTE', - depends(visibility_flags)(lambda v: bool(v) or None)) -set_config('WRAP_SYSTEM_INCLUDES', wrap_system_includes) -set_config('VISIBILITY_FLAGS', visibility_flags) +set_define( + "HAVE_VISIBILITY_HIDDEN_ATTRIBUTE", + depends(visibility_flags)(lambda v: bool(v) or None), +) +set_define( + "HAVE_VISIBILITY_ATTRIBUTE", depends(visibility_flags)(lambda v: bool(v) or None) +) +set_config("WRAP_SYSTEM_INCLUDES", wrap_system_includes) +set_config("VISIBILITY_FLAGS", visibility_flags) @template def depend_cflags(host_or_target_c_compiler): @depends(host_or_target_c_compiler) def depend_cflags(host_or_target_c_compiler): - if host_or_target_c_compiler.type != 'clang-cl': - return ['-MD', '-MP', '-MF $(MDDEPDIR)/$(@F).pp'] + if host_or_target_c_compiler.type != "clang-cl": + return ["-MD", "-MP", "-MF $(MDDEPDIR)/$(@F).pp"] else: # clang-cl doesn't accept the normal -MD -MP -MF options that clang # does, but the underlying cc1 binary understands how to generate @@ -1457,30 +1643,35 @@ def depend_cflags(host_or_target_c_compiler): # normal clang driver sends to cc1 when given the "correct" # dependency options. return [ - '-Xclang', '-MP', - '-Xclang', '-dependency-file', - '-Xclang', '$(MDDEPDIR)/$(@F).pp', - '-Xclang', '-MT', - '-Xclang', '$@' + "-Xclang", + "-MP", + "-Xclang", + "-dependency-file", + "-Xclang", + "$(MDDEPDIR)/$(@F).pp", + "-Xclang", + "-MT", + "-Xclang", + "$@", ] return depend_cflags -set_config('_DEPEND_CFLAGS', depend_cflags(c_compiler)) -set_config('_HOST_DEPEND_CFLAGS', depend_cflags(host_c_compiler)) +set_config("_DEPEND_CFLAGS", depend_cflags(c_compiler)) +set_config("_HOST_DEPEND_CFLAGS", depend_cflags(host_c_compiler)) @depends(c_compiler) def preprocess_option(compiler): # The uses of PREPROCESS_OPTION depend on the spacing for -o/-Fi. - if compiler.type in ('gcc', 'clang'): - return '-E -o ' + if compiler.type in ("gcc", "clang"): + return "-E -o " else: - return '-P -Fi' + return "-P -Fi" -set_config('PREPROCESS_OPTION', preprocess_option) +set_config("PREPROCESS_OPTION", preprocess_option) # We only want to include windows.configure when we are compiling on @@ -1489,120 +1680,135 @@ set_config('PREPROCESS_OPTION', preprocess_option) @depends(target, host) def is_windows(target, host): - return host.kernel == 'WINNT' or target.kernel == 'WINNT' + return host.kernel == "WINNT" or target.kernel == "WINNT" -include('windows.configure', when=is_windows) +include("windows.configure", when=is_windows) # On Power ISA, determine compiler flags for VMX, VSX and VSX-3. -set_config('PPC_VMX_FLAGS', - ['-maltivec'], - when=depends(target.cpu)(lambda cpu: cpu.startswith('ppc'))) +set_config( + "PPC_VMX_FLAGS", + ["-maltivec"], + when=depends(target.cpu)(lambda cpu: cpu.startswith("ppc")), +) -set_config('PPC_VSX_FLAGS', - ['-mvsx'], - when=depends(target.cpu)(lambda cpu: cpu.startswith('ppc'))) +set_config( + "PPC_VSX_FLAGS", + ["-mvsx"], + when=depends(target.cpu)(lambda cpu: cpu.startswith("ppc")), +) -set_config('PPC_VSX3_FLAGS', - ['-mvsx','-mcpu=power9'], - when=depends(target.cpu)(lambda cpu: cpu.startswith('ppc'))) +set_config( + "PPC_VSX3_FLAGS", + ["-mvsx", "-mcpu=power9"], + when=depends(target.cpu)(lambda cpu: cpu.startswith("ppc")), +) # ASAN # ============================================================== -option('--enable-address-sanitizer', help='Enable Address Sanitizer') +option("--enable-address-sanitizer", help="Enable Address Sanitizer") -@depends(when='--enable-address-sanitizer') +@depends(when="--enable-address-sanitizer") def asan(): return True -add_old_configure_assignment('MOZ_ASAN', asan) +add_old_configure_assignment("MOZ_ASAN", asan) # MSAN # ============================================================== -option('--enable-memory-sanitizer', help='Enable Memory Sanitizer') +option("--enable-memory-sanitizer", help="Enable Memory Sanitizer") -@depends(when='--enable-memory-sanitizer') +@depends(when="--enable-memory-sanitizer") def msan(): return True -add_old_configure_assignment('MOZ_MSAN', msan) +add_old_configure_assignment("MOZ_MSAN", msan) # TSAN # ============================================================== -option('--enable-thread-sanitizer', help='Enable Thread Sanitizer') +option("--enable-thread-sanitizer", help="Enable Thread Sanitizer") -@depends(when='--enable-thread-sanitizer') +@depends(when="--enable-thread-sanitizer") def tsan(): return True -add_old_configure_assignment('MOZ_TSAN', tsan) +add_old_configure_assignment("MOZ_TSAN", tsan) # UBSAN # ============================================================== -option('--enable-undefined-sanitizer', - nargs='*', - help='Enable UndefinedBehavior Sanitizer') +option( + "--enable-undefined-sanitizer", nargs="*", help="Enable UndefinedBehavior Sanitizer" +) + -@depends_if('--enable-undefined-sanitizer') +@depends_if("--enable-undefined-sanitizer") def ubsan(options): default_checks = [ - 'bool', - 'bounds', - 'enum', - 'integer-divide-by-zero', - 'object-size', - 'pointer-overflow', - 'return', - 'vla-bound', + "bool", + "bounds", + "enum", + "integer-divide-by-zero", + "object-size", + "pointer-overflow", + "return", + "vla-bound", ] checks = options if len(options) else default_checks - return ','.join(checks) + return ",".join(checks) + -add_old_configure_assignment('MOZ_UBSAN_CHECKS', ubsan) +add_old_configure_assignment("MOZ_UBSAN_CHECKS", ubsan) -option('--enable-signed-overflow-sanitizer', - help='Enable UndefinedBehavior Sanitizer (Signed Integer Overflow Parts)') +option( + "--enable-signed-overflow-sanitizer", + help="Enable UndefinedBehavior Sanitizer (Signed Integer Overflow Parts)", +) -@depends(when='--enable-signed-overflow-sanitizer') +@depends(when="--enable-signed-overflow-sanitizer") def ub_signed_overflow_san(): return True -add_old_configure_assignment('MOZ_SIGNED_OVERFLOW_SANITIZE', ub_signed_overflow_san) +add_old_configure_assignment("MOZ_SIGNED_OVERFLOW_SANITIZE", ub_signed_overflow_san) -option('--enable-unsigned-overflow-sanitizer', - help='Enable UndefinedBehavior Sanitizer (Unsigned Integer Overflow Parts)') +option( + "--enable-unsigned-overflow-sanitizer", + help="Enable UndefinedBehavior Sanitizer (Unsigned Integer Overflow Parts)", +) -@depends(when='--enable-unsigned-overflow-sanitizer') +@depends(when="--enable-unsigned-overflow-sanitizer") def ub_unsigned_overflow_san(): return True -add_old_configure_assignment('MOZ_UNSIGNED_OVERFLOW_SANITIZE', ub_unsigned_overflow_san) +add_old_configure_assignment("MOZ_UNSIGNED_OVERFLOW_SANITIZE", ub_unsigned_overflow_san) # Security Hardening # ============================================================== -option('--enable-hardening', env='MOZ_SECURITY_HARDENING', - help='Enables security hardening compiler options') +option( + "--enable-hardening", + env="MOZ_SECURITY_HARDENING", + help="Enables security hardening compiler options", +) # This function is a bit confusing. It adds or removes hardening flags in @@ -1614,12 +1820,19 @@ option('--enable-hardening', env='MOZ_SECURITY_HARDENING', # flag. --disable-hardening will omit the security flags. (However, not all # possible security flags will be omitted by --disable-hardening, as many are # compiler-default options we do not explicitly enable.) -@depends('--enable-hardening', '--enable-address-sanitizer', - '--enable-debug', '--enable-optimize', c_compiler, target) -def security_hardening_cflags(hardening_flag, asan, debug, optimize, c_compiler, - target): - compiler_is_gccish = c_compiler.type in ('gcc', 'clang') - mingw_clang = c_compiler.type == 'clang' and target.os == 'WINNT' +@depends( + "--enable-hardening", + "--enable-address-sanitizer", + "--enable-debug", + "--enable-optimize", + c_compiler, + target, +) +def security_hardening_cflags( + hardening_flag, asan, debug, optimize, c_compiler, target +): + compiler_is_gccish = c_compiler.type in ("gcc", "clang") + mingw_clang = c_compiler.type == "clang" and target.os == "WINNT" flags = [] ldflags = [] @@ -1634,15 +1847,15 @@ def security_hardening_cflags(hardening_flag, asan, debug, optimize, c_compiler, # Also, undefine it before defining it just in case a distro adds it, see Bug 1418398 if compiler_is_gccish and optimize and not asan: # Don't enable FORTIFY_SOURCE on Android on the top-level, but do enable in js/ - if target.os != 'Android': + if target.os != "Android": flags.append("-U_FORTIFY_SOURCE") flags.append("-D_FORTIFY_SOURCE=2") js_flags.append("-U_FORTIFY_SOURCE") js_flags.append("-D_FORTIFY_SOURCE=2") if mingw_clang: # mingw-clang needs to link in ssp which is not done by default - ldflags.append('-lssp') - js_ldflags.append('-lssp') + ldflags.append("-lssp") + js_ldflags.append("-lssp") # fstack-protector ------------------------------------ # Enable only if hardening is not disabled and ASAN is @@ -1656,14 +1869,18 @@ def security_hardening_cflags(hardening_flag, asan, debug, optimize, c_compiler, # ftrivial-auto-var-init ------------------------------ # Initialize local variables with a 0xAA pattern in clang debug builds. # Linux32 fails some xpcshell tests with -ftrivial-auto-var-init - linux32 = target.kernel == 'Linux' and target.cpu == 'x86' - if (c_compiler.type == 'clang' or c_compiler.type == 'clang-cl') and \ - c_compiler.version >= '8' and debug and not linux32: - if c_compiler.type == 'clang-cl': - flags.append('-Xclang') - js_flags.append('-Xclang') - flags.append('-ftrivial-auto-var-init=pattern') - js_flags.append('-ftrivial-auto-var-init=pattern') + linux32 = target.kernel == "Linux" and target.cpu == "x86" + if ( + (c_compiler.type == "clang" or c_compiler.type == "clang-cl") + and c_compiler.version >= "8" + and debug + and not linux32 + ): + if c_compiler.type == "clang-cl": + flags.append("-Xclang") + js_flags.append("-Xclang") + flags.append("-ftrivial-auto-var-init=pattern") + js_flags.append("-ftrivial-auto-var-init=pattern") # ASLR ------------------------------------------------ # ASLR (dynamicbase) is enabled by default in clang-cl; but the @@ -1673,9 +1890,12 @@ def security_hardening_cflags(hardening_flag, asan, debug, optimize, c_compiler, js_ldflags.append("-Wl,--dynamicbase") # Control Flow Guard (CFG) ---------------------------- - if c_compiler.type == 'clang-cl' and c_compiler.version >= '8' and \ - (target.cpu != 'aarch64' or c_compiler.version >= '8.0.1'): - if target.cpu == 'aarch64' and c_compiler.version >= '10.0.0': + if ( + c_compiler.type == "clang-cl" + and c_compiler.version >= "8" + and (target.cpu != "aarch64" or c_compiler.version >= "8.0.1") + ): + if target.cpu == "aarch64" and c_compiler.version >= "10.0.0": # The added checks in clang 10 make arm64 builds crash. (Bug 1639318) flags.append("-guard:cf,nochecks") js_flags.append("-guard:cf,nochecks") @@ -1697,7 +1917,7 @@ def security_hardening_cflags(hardening_flag, asan, debug, optimize, c_compiler, # Do not merge variables for ASAN; can detect some subtle bugs if asan: # clang-cl does not recognize the flag, it must be passed down to clang - if c_compiler.type == 'clang-cl': + if c_compiler.type == "clang-cl": flags.append("-Xclang") flags.append("-fno-common") @@ -1709,58 +1929,72 @@ def security_hardening_cflags(hardening_flag, asan, debug, optimize, c_compiler, ) -set_config('MOZ_HARDENING_CFLAGS', security_hardening_cflags.flags) -set_config('MOZ_HARDENING_LDFLAGS', security_hardening_cflags.ldflags) -set_config('MOZ_HARDENING_CFLAGS_JS', security_hardening_cflags.js_flags) -set_config('MOZ_HARDENING_LDFLAGS_JS', security_hardening_cflags.js_ldflags) +set_config("MOZ_HARDENING_CFLAGS", security_hardening_cflags.flags) +set_config("MOZ_HARDENING_LDFLAGS", security_hardening_cflags.ldflags) +set_config("MOZ_HARDENING_CFLAGS_JS", security_hardening_cflags.js_flags) +set_config("MOZ_HARDENING_LDFLAGS_JS", security_hardening_cflags.js_ldflags) # Frame pointers # ============================================================== @depends(c_compiler) def frame_pointer_flags(compiler): - if compiler.type == 'clang-cl': + if compiler.type == "clang-cl": return namespace( - enable=['-Oy-'], - disable=['-Oy'], + enable=["-Oy-"], + disable=["-Oy"], ) return namespace( - enable=['-fno-omit-frame-pointer', '-funwind-tables'], - disable=['-fomit-frame-pointer', '-funwind-tables'], + enable=["-fno-omit-frame-pointer", "-funwind-tables"], + disable=["-fomit-frame-pointer", "-funwind-tables"], ) -@depends(moz_optimize.optimize, moz_debug, target, - '--enable-memory-sanitizer', '--enable-address-sanitizer', - '--enable-undefined-sanitizer') +@depends( + moz_optimize.optimize, + moz_debug, + target, + "--enable-memory-sanitizer", + "--enable-address-sanitizer", + "--enable-undefined-sanitizer", +) def frame_pointer_default(optimize, debug, target, msan, asan, ubsan): - return bool(not optimize or debug or msan or asan or ubsan or \ - (target.os == 'WINNT' and target.cpu in ('x86', 'aarch64'))) + return bool( + not optimize + or debug + or msan + or asan + or ubsan + or (target.os == "WINNT" and target.cpu in ("x86", "aarch64")) + ) -option('--enable-frame-pointers', default=frame_pointer_default, - help='{Enable|Disable} frame pointers') +option( + "--enable-frame-pointers", + default=frame_pointer_default, + help="{Enable|Disable} frame pointers", +) -@depends('--enable-frame-pointers', frame_pointer_flags) +@depends("--enable-frame-pointers", frame_pointer_flags) def frame_pointer_flags(enable, flags): if enable: return flags.enable return flags.disable -set_config('MOZ_FRAMEPTR_FLAGS', frame_pointer_flags) +set_config("MOZ_FRAMEPTR_FLAGS", frame_pointer_flags) # nasm detection # ============================================================== -nasm = check_prog('NASM', ['nasm'], allow_missing=True, paths=toolchain_search_path) +nasm = check_prog("NASM", ["nasm"], allow_missing=True, paths=toolchain_search_path) @depends_if(nasm) -@checking('nasm version') +@checking("nasm version") def nasm_version(nasm): - (retcode, stdout, _) = get_cmd_output(nasm, '-v') + (retcode, stdout, _) = get_cmd_output(nasm, "-v") if retcode: # mac stub binary return None @@ -1779,30 +2013,31 @@ def nasm_minor_version(nasm_version): return str(nasm_version.minor) -set_config('NASM_MAJOR_VERSION', nasm_major_version) -set_config('NASM_MINOR_VERSION', nasm_minor_version) +set_config("NASM_MAJOR_VERSION", nasm_major_version) +set_config("NASM_MINOR_VERSION", nasm_minor_version) @depends(nasm, target) def nasm_asflags(nasm, target): if nasm: asflags = { - ('OSX', 'x86'): ['-f', 'macho32'], - ('OSX', 'x86_64'): ['-f', 'macho64'], - ('WINNT', 'x86'): ['-f', 'win32'], - ('WINNT', 'x86_64'): ['-f', 'win64'], + ("OSX", "x86"): ["-f", "macho32"], + ("OSX", "x86_64"): ["-f", "macho64"], + ("WINNT", "x86"): ["-f", "win32"], + ("WINNT", "x86_64"): ["-f", "win64"], }.get((target.os, target.cpu), None) if asflags is None: # We're assuming every x86 platform we support that's # not Windows or Mac is ELF. - if target.cpu == 'x86': - asflags = ['-f', 'elf32'] - elif target.cpu == 'x86_64': - asflags = ['-f', 'elf64'] + if target.cpu == "x86": + asflags = ["-f", "elf32"] + elif target.cpu == "x86_64": + asflags = ["-f", "elf64"] return asflags -set_config('NASM_ASFLAGS', nasm_asflags) +set_config("NASM_ASFLAGS", nasm_asflags) + @depends(nasm_asflags) def have_nasm(value): @@ -1815,43 +2050,45 @@ def have_yasm(yasm_asflags): if yasm_asflags: return True -set_config('HAVE_NASM', have_nasm) -set_config('HAVE_YASM', have_yasm) +set_config("HAVE_NASM", have_nasm) + +set_config("HAVE_YASM", have_yasm) # Until the YASM variable is not necessary in old-configure. -add_old_configure_assignment('YASM', have_yasm) +add_old_configure_assignment("YASM", have_yasm) # Code Coverage # ============================================================== -option('--enable-coverage', env='MOZ_CODE_COVERAGE', - help='Enable code coverage') +option("--enable-coverage", env="MOZ_CODE_COVERAGE", help="Enable code coverage") -@depends('--enable-coverage') +@depends("--enable-coverage") def code_coverage(value): if value: return True -set_config('MOZ_CODE_COVERAGE', code_coverage) -set_define('MOZ_CODE_COVERAGE', code_coverage) +set_config("MOZ_CODE_COVERAGE", code_coverage) +set_define("MOZ_CODE_COVERAGE", code_coverage) + @depends(target, c_compiler, vc_path, check_build_environment, when=code_coverage) -@imports('os') -@imports('re') -@imports(_from='__builtin__', _import='open') +@imports("os") +@imports("re") +@imports(_from="__builtin__", _import="open") def coverage_cflags(target, c_compiler, vc_path, build_env): - cflags = ['--coverage'] + cflags = ["--coverage"] # clang 11 no longer accepts this flag (its behavior became the default) - if c_compiler.type in ('clang', 'clang-cl') and c_compiler.version < '11.0.0': + if c_compiler.type in ("clang", "clang-cl") and c_compiler.version < "11.0.0": cflags += [ - '-Xclang', '-coverage-no-function-names-in-data', + "-Xclang", + "-coverage-no-function-names-in-data", ] - if target.os == 'WINNT' and c_compiler.type == 'clang-cl': + if target.os == "WINNT" and c_compiler.type == "clang-cl": # The Visual Studio directory is the parent of the Visual C++ directory. vs_path = os.path.dirname(vc_path) @@ -1860,46 +2097,49 @@ def coverage_cflags(target, c_compiler, vc_path, build_env): vs_path = os.path.realpath(vs_path) cflags += [ - '-fprofile-exclude-files=^{}.*$'.format(re.escape(vs_path)), + "-fprofile-exclude-files=^{}.*$".format(re.escape(vs_path)), ] - response_file_path = os.path.join(build_env.topobjdir, 'code_coverage_cflags') + response_file_path = os.path.join(build_env.topobjdir, "code_coverage_cflags") - with open(response_file_path, 'w') as f: - f.write(' '.join(cflags)) + with open(response_file_path, "w") as f: + f.write(" ".join(cflags)) - return ['@{}'.format(response_file_path)] + return ["@{}".format(response_file_path)] -set_config('COVERAGE_CFLAGS', coverage_cflags) + +set_config("COVERAGE_CFLAGS", coverage_cflags) # ============================================================== -option(env='RUSTFLAGS', - nargs=1, - help='Rust compiler flags') -set_config('RUSTFLAGS', depends('RUSTFLAGS')(lambda flags: flags)) +option(env="RUSTFLAGS", nargs=1, help="Rust compiler flags") +set_config("RUSTFLAGS", depends("RUSTFLAGS")(lambda flags: flags)) # Rust compiler flags # ============================================================== -option(env='RUSTC_OPT_LEVEL', - nargs=1, - help='Rust compiler optimization level (-C opt-level=%s)') +option( + env="RUSTC_OPT_LEVEL", + nargs=1, + help="Rust compiler optimization level (-C opt-level=%s)", +) # --enable-release kicks in full optimizations. -imply_option('RUSTC_OPT_LEVEL', '2', when='--enable-release') +imply_option("RUSTC_OPT_LEVEL", "2", when="--enable-release") -@depends('RUSTC_OPT_LEVEL', moz_optimize) +@depends("RUSTC_OPT_LEVEL", moz_optimize) def rustc_opt_level(opt_level_option, moz_optimize): if opt_level_option: return opt_level_option[0] else: - return '1' if moz_optimize.optimize else '0' + return "1" if moz_optimize.optimize else "0" -@depends(rustc_opt_level, debug_rust, '--enable-debug-symbols', '--enable-frame-pointers') +@depends( + rustc_opt_level, debug_rust, "--enable-debug-symbols", "--enable-frame-pointers" +) def rust_compile_flags(opt_level, debug_rust, debug_symbols, frame_pointers): # Cargo currently supports only two interesting profiles for building: # development and release. Those map (roughly) to --enable-debug and @@ -1918,27 +2158,26 @@ def rust_compile_flags(opt_level, debug_rust, debug_symbols, frame_pointers): # opt-level=0 implies -C debug-assertions, which may not be desired # unless Rust debugging is enabled. - if opt_level == '0' and not debug_rust: + if opt_level == "0" and not debug_rust: debug_assertions = False if debug_symbols: - debug_info = '2' + debug_info = "2" opts = [] if opt_level is not None: - opts.append('opt-level=%s' % opt_level) + opts.append("opt-level=%s" % opt_level) if debug_assertions is not None: - opts.append('debug-assertions=%s' % - ('yes' if debug_assertions else 'no')) + opts.append("debug-assertions=%s" % ("yes" if debug_assertions else "no")) if debug_info is not None: - opts.append('debuginfo=%s' % debug_info) + opts.append("debuginfo=%s" % debug_info) if frame_pointers: - opts.append('force-frame-pointers=yes') + opts.append("force-frame-pointers=yes") flags = [] for opt in opts: - flags.extend(['-C', opt]) + flags.extend(["-C", opt]) return flags @@ -1947,38 +2186,52 @@ def rust_compile_flags(opt_level, debug_rust, debug_symbols, frame_pointers): # ============================================================== -option('--disable-cargo-incremental', - help='Disable incremental rust compilation.') - -@depends(rustc_opt_level, debug_rust, 'MOZ_AUTOMATION', code_coverage, - '--disable-cargo-incremental', using_sccache, 'RUSTC_WRAPPER') -@imports('os') -def cargo_incremental(opt_level, debug_rust, automation, code_coverage, - enabled, using_sccache, rustc_wrapper): +option("--disable-cargo-incremental", help="Disable incremental rust compilation.") + + +@depends( + rustc_opt_level, + debug_rust, + "MOZ_AUTOMATION", + code_coverage, + "--disable-cargo-incremental", + using_sccache, + "RUSTC_WRAPPER", +) +@imports("os") +def cargo_incremental( + opt_level, + debug_rust, + automation, + code_coverage, + enabled, + using_sccache, + rustc_wrapper, +): """Return a value for the CARGO_INCREMENTAL environment variable.""" if not enabled: - return '0' + return "0" # We never want to use incremental compilation in automation. sccache # handles our automation use case much better than incremental compilation # would. if automation: - return '0' + return "0" # Coverage instrumentation doesn't play well with incremental compilation # https://github.com/rust-lang/rust/issues/50203. if code_coverage: - return '0' + return "0" # Incremental compilation doesn't work as well as it should, and if we're # using sccache, it's better to use sccache than incremental compilation. if not using_sccache and rustc_wrapper: rustc_wrapper = os.path.basename(rustc_wrapper[0]) - if os.path.splitext(rustc_wrapper)[0].lower() == 'sccache': + if os.path.splitext(rustc_wrapper)[0].lower() == "sccache": using_sccache = True if using_sccache: - return '0' + return "0" # Incremental compilation is automatically turned on for debug builds, so # we don't need to do anything special here. @@ -1990,14 +2243,14 @@ def cargo_incremental(opt_level, debug_rust, automation, code_coverage, # Let's assume that if Rust code is using -O2 or higher, we shouldn't # be using incremental compilation, because we'd be imposing a # significant runtime cost. - if opt_level not in ('0', '1'): + if opt_level not in ("0", "1"): return # We're clear to use incremental compilation! - return '1' + return "1" -set_config('CARGO_INCREMENTAL', cargo_incremental) +set_config("CARGO_INCREMENTAL", cargo_incremental) # Linker detection # ============================================================== @@ -2019,46 +2272,60 @@ set_config('CARGO_INCREMENTAL', cargo_incremental) # https://bugzilla.mozilla.org/show_bug.cgi?id=1563654#c2. @depends(target) def is_linker_option_enabled(target): - if target.kernel not in ('WINNT', 'SunOS'): + if target.kernel not in ("WINNT", "SunOS"): return True -option('--enable-gold', - env='MOZ_FORCE_GOLD', - help='Enable GNU Gold Linker when it is not already the default', - when=is_linker_option_enabled) +option( + "--enable-gold", + env="MOZ_FORCE_GOLD", + help="Enable GNU Gold Linker when it is not already the default", + when=is_linker_option_enabled, +) + +imply_option("--enable-linker", "gold", when="--enable-gold") -imply_option('--enable-linker', 'gold', when='--enable-gold') @depends(target, developer_options) def enable_linker_default(target, developer_options): # x86-64 gold has bugs in how it lays out .note.* sections. See bug 1573820. # x86-32 gold has a bug when assembly files are built. See bug 1651699. # lld is faster, so prefer that for developer builds. - if target.os == 'Android' and target.cpu in ('x86', 'x86_64'): - return 'lld' if developer_options else 'bfd' + if target.os == "Android" and target.cpu in ("x86", "x86_64"): + return "lld" if developer_options else "bfd" -option('--enable-linker', nargs=1, - help='Select the linker {bfd, gold, ld64, lld, lld-*}{|}', - default=enable_linker_default, - when=is_linker_option_enabled) +option( + "--enable-linker", + nargs=1, + help="Select the linker {bfd, gold, ld64, lld, lld-*}{|}", + default=enable_linker_default, + when=is_linker_option_enabled, +) # No-op to enable depending on --enable-linker from default_elfhack in # toolkit/moz.configure. -@depends('--enable-linker', when=is_linker_option_enabled) +@depends("--enable-linker", when=is_linker_option_enabled) def enable_linker(linker): return linker -@depends('--enable-linker', c_compiler, developer_options, '--enable-gold', - extra_toolchain_flags, target, when=is_linker_option_enabled) -@checking('for linker', lambda x: x.KIND) -@imports('os') -@imports('shutil') -def select_linker(linker, c_compiler, developer_options, enable_gold, - toolchain_flags, target): +@depends( + "--enable-linker", + c_compiler, + developer_options, + "--enable-gold", + extra_toolchain_flags, + target, + when=is_linker_option_enabled, +) +@checking("for linker", lambda x: x.KIND) +@imports("os") +@imports("shutil") +def select_linker( + linker, c_compiler, developer_options, enable_gold, toolchain_flags, target +): if linker: linker = linker[0] @@ -2066,28 +2333,28 @@ def select_linker(linker, c_compiler, developer_options, enable_gold, linker = None def is_valid_linker(linker): - if target.kernel == 'Darwin': - valid_linkers = ('ld64', 'lld') + if target.kernel == "Darwin": + valid_linkers = ("ld64", "lld") else: - valid_linkers = ('bfd', 'gold', 'lld') + valid_linkers = ("bfd", "gold", "lld") if linker in valid_linkers: return True - if 'lld' in valid_linkers and linker.startswith('lld-'): + if "lld" in valid_linkers and linker.startswith("lld-"): return True return False if linker and not is_valid_linker(linker): # Check that we are trying to use a supported linker - die('Unsupported linker ' + linker) + die("Unsupported linker " + linker) # Check the kind of linker - version_check = ['-Wl,--version'] + version_check = ["-Wl,--version"] cmd_base = c_compiler.wrapper + [c_compiler.compiler] + c_compiler.flags def try_linker(linker): # Generate the compiler flag - if linker == 'ld64': - linker_flag = ['-fuse-ld=ld'] + if linker == "ld64": + linker_flag = ["-fuse-ld=ld"] elif linker: linker_flag = ["-fuse-ld=" + linker] else: @@ -2103,29 +2370,29 @@ def select_linker(linker, c_compiler, developer_options, enable_gold, # Instead, abuse its LD_PRINT_OPTIONS feature to detect a message # specific to it on stderr when it fails to process --version. env = dict(os.environ) - env['LD_PRINT_OPTIONS'] = '1' + env["LD_PRINT_OPTIONS"] = "1" # Some locales might not print out the strings we are looking for, so # ensure consistent output. - env['LC_ALL'] = 'C' + env["LC_ALL"] = "C" retcode, stdout, stderr = get_cmd_output(*cmd, env=env) - if retcode == 1 and 'Logging ld64 options' in stderr: - kind = 'ld64' + if retcode == 1 and "Logging ld64 options" in stderr: + kind = "ld64" elif retcode != 0: return None - elif 'GNU ld' in stdout: + elif "GNU ld" in stdout: # We are using the normal linker - kind = 'bfd' + kind = "bfd" - elif 'GNU gold' in stdout: - kind = 'gold' + elif "GNU gold" in stdout: + kind = "gold" - elif 'LLD' in stdout: - kind = 'lld' + elif "LLD" in stdout: + kind = "lld" else: - kind = 'unknown' + kind = "unknown" return namespace( KIND=kind, @@ -2138,13 +2405,17 @@ def select_linker(linker, c_compiler, developer_options, enable_gold, die("Could not use {} as linker".format(linker)) die("Failed to find a linker") - if (linker is None and enable_gold.origin == 'default' and - developer_options and result.KIND in ('bfd', 'gold')): + if ( + linker is None + and enable_gold.origin == "default" + and developer_options + and result.KIND in ("bfd", "gold") + ): # try and use lld if available. - tried = try_linker('lld') - if result.KIND != 'gold' and (tried is None or tried.KIND != 'lld'): - tried = try_linker('gold') - if tried is None or tried.KIND != 'gold': + tried = try_linker("lld") + if result.KIND != "gold" and (tried is None or tried.KIND != "lld"): + tried = try_linker("gold") + if tried is None or tried.KIND != "gold": tried = None if tried: result = tried @@ -2156,22 +2427,22 @@ def select_linker(linker, c_compiler, developer_options, enable_gold, return result -set_config('LINKER_KIND', select_linker.KIND) +set_config("LINKER_KIND", select_linker.KIND) @depends_if(select_linker, macos_sdk) def linker_ldflags(linker, macos_sdk): flags = list((linker and linker.LINKER_FLAG) or []) if macos_sdk: - if linker and linker.KIND == 'ld64': - flags.append('-Wl,-syslibroot,%s' % macos_sdk) + if linker and linker.KIND == "ld64": + flags.append("-Wl,-syslibroot,%s" % macos_sdk) else: - flags.append('-Wl,--sysroot=%s' % macos_sdk) + flags.append("-Wl,--sysroot=%s" % macos_sdk) return flags -add_old_configure_assignment('LINKER_LDFLAGS', linker_ldflags) +add_old_configure_assignment("LINKER_LDFLAGS", linker_ldflags) # There's a wrinkle with MinGW: linker configuration is not enabled, so @@ -2179,38 +2450,34 @@ add_old_configure_assignment('LINKER_LDFLAGS', linker_ldflags) @depends(select_linker, target, c_compiler) def gcc_use_gnu_ld(select_linker, target, c_compiler): if select_linker is not None: - return select_linker.KIND in ('bfd', 'gold', 'lld') - if target.kernel == 'WINNT' and c_compiler.type == 'clang': + return select_linker.KIND in ("bfd", "gold", "lld") + if target.kernel == "WINNT" and c_compiler.type == "clang": return True return None # GCC_USE_GNU_LD=1 means the linker is command line compatible with GNU ld. -set_config('GCC_USE_GNU_LD', gcc_use_gnu_ld) -add_old_configure_assignment('GCC_USE_GNU_LD', gcc_use_gnu_ld) +set_config("GCC_USE_GNU_LD", gcc_use_gnu_ld) +add_old_configure_assignment("GCC_USE_GNU_LD", gcc_use_gnu_ld) # Assembler detection # ============================================================== -option(env='AS', nargs=1, help='Path to the assembler') +option(env="AS", nargs=1, help="Path to the assembler") + @depends(target, c_compiler) def as_info(target, c_compiler): - if c_compiler.type == 'clang-cl': + if c_compiler.type == "clang-cl": ml = { - 'x86': 'ml.exe', - 'x86_64': 'ml64.exe', - 'aarch64': 'armasm64.exe', + "x86": "ml.exe", + "x86_64": "ml64.exe", + "aarch64": "armasm64.exe", }.get(target.cpu) - return namespace( - type='masm', - names=(ml, ) - ) + return namespace(type="masm", names=(ml,)) # When building with anything but clang-cl, we just use the C compiler as the assembler. - return namespace( - type='gcc', - names=(c_compiler.compiler, ) - ) + return namespace(type="gcc", names=(c_compiler.compiler,)) + # One would expect the assembler to be specified merely as a program. But in # cases where the assembler is passed down into js/, it can be specified in @@ -2218,152 +2485,176 @@ def as_info(target, c_compiler): # permit the same behavior in general, even though it seems somewhat unusual. # So we have to do the same sort of dance as we did above with # `provided_compiler`. -provided_assembler = provided_program('AS') -assembler = check_prog('_AS', input=provided_assembler.program, - what='the assembler', progs=as_info.names, - paths=toolchain_search_path) +provided_assembler = provided_program("AS") +assembler = check_prog( + "_AS", + input=provided_assembler.program, + what="the assembler", + progs=as_info.names, + paths=toolchain_search_path, +) + @depends(as_info, assembler, provided_assembler, c_compiler) def as_with_flags(as_info, assembler, provided_assembler, c_compiler): if provided_assembler: - return provided_assembler.wrapper + \ - [assembler] + \ - provided_assembler.flags + return provided_assembler.wrapper + [assembler] + provided_assembler.flags - if as_info.type == 'masm': + if as_info.type == "masm": return assembler - assert as_info.type == 'gcc' + assert as_info.type == "gcc" # Need to add compiler wrappers and flags as appropriate. return c_compiler.wrapper + [assembler] + c_compiler.flags -add_old_configure_assignment('AS', as_with_flags) -add_old_configure_assignment('ac_cv_prog_AS', as_with_flags) +add_old_configure_assignment("AS", as_with_flags) +add_old_configure_assignment("ac_cv_prog_AS", as_with_flags) @depends(assembler, c_compiler, extra_toolchain_flags) -@imports('subprocess') -@imports(_from='os', _import='devnull') +@imports("subprocess") +@imports(_from="os", _import="devnull") def gnu_as(assembler, c_compiler, toolchain_flags): # clang uses a compatible GNU assembler. - if c_compiler.type == 'clang': + if c_compiler.type == "clang": return True - if c_compiler.type == 'gcc': + if c_compiler.type == "gcc": cmd = [assembler] + c_compiler.flags if toolchain_flags: cmd += toolchain_flags - cmd += ['-Wa,--version', '-c', '-o', devnull, '-x', 'assembler', '-'] + cmd += ["-Wa,--version", "-c", "-o", devnull, "-x", "assembler", "-"] # We don't actually have to provide any input on stdin, `Popen.communicate` will # close the stdin pipe. # clang will error if it uses its integrated assembler for this target, # so handle failures gracefully. - if 'GNU' in check_cmd_output(*cmd, stdin=subprocess.PIPE, onerror=lambda: ''): + if "GNU" in check_cmd_output(*cmd, stdin=subprocess.PIPE, onerror=lambda: ""): return True -set_config('GNU_AS', gnu_as) -add_old_configure_assignment('GNU_AS', gnu_as) +set_config("GNU_AS", gnu_as) +add_old_configure_assignment("GNU_AS", gnu_as) @depends(as_info, target) def as_dash_c_flag(as_info, target): # armasm64 doesn't understand -c. - if as_info.type == 'masm' and target.cpu == 'aarch64': - return '' + if as_info.type == "masm" and target.cpu == "aarch64": + return "" else: - return '-c' + return "-c" -set_config('AS_DASH_C_FLAG', as_dash_c_flag) +set_config("AS_DASH_C_FLAG", as_dash_c_flag) @depends(as_info, target) def as_outoption(as_info, target): # The uses of ASOUTOPTION depend on the spacing for -o/-Fo. - if as_info.type == 'masm' and target.cpu != 'aarch64': - return '-Fo' + if as_info.type == "masm" and target.cpu != "aarch64": + return "-Fo" - return '-o ' + return "-o " -set_config('ASOUTOPTION', as_outoption) +set_config("ASOUTOPTION", as_outoption) # clang plugin handling # ============================================================== -option('--enable-clang-plugin', env='ENABLE_CLANG_PLUGIN', - help="Enable building with the Clang plugin (gecko specific static analyzers)") +option( + "--enable-clang-plugin", + env="ENABLE_CLANG_PLUGIN", + help="Enable building with the Clang plugin (gecko specific static analyzers)", +) -add_old_configure_assignment('ENABLE_CLANG_PLUGIN', - depends_if('--enable-clang-plugin')(lambda _: True)) +add_old_configure_assignment( + "ENABLE_CLANG_PLUGIN", depends_if("--enable-clang-plugin")(lambda _: True) +) -@depends(host_c_compiler, c_compiler, when='--enable-clang-plugin') + +@depends(host_c_compiler, c_compiler, when="--enable-clang-plugin") def llvm_config(host_c_compiler, c_compiler): clang = None for compiler in (host_c_compiler, c_compiler): - if compiler and compiler.type == 'clang': + if compiler and compiler.type == "clang": clang = compiler.compiler break - elif compiler and compiler.type == 'clang-cl': - clang = os.path.join(os.path.dirname(compiler.compiler), 'clang') + elif compiler and compiler.type == "clang-cl": + clang = os.path.join(os.path.dirname(compiler.compiler), "clang") break if not clang: - die('Cannot --enable-clang-plugin when not building with clang') - llvm_config = 'llvm-config' - out = check_cmd_output(clang, '--print-prog-name=llvm-config', - onerror=lambda: None) + die("Cannot --enable-clang-plugin when not building with clang") + llvm_config = "llvm-config" + out = check_cmd_output(clang, "--print-prog-name=llvm-config", onerror=lambda: None) if out: llvm_config = out.rstrip() return (llvm_config,) -llvm_config = check_prog('LLVM_CONFIG', llvm_config, what='llvm-config', - when='--enable-clang-plugin', - paths=toolchain_search_path) +llvm_config = check_prog( + "LLVM_CONFIG", + llvm_config, + what="llvm-config", + when="--enable-clang-plugin", + paths=toolchain_search_path, +) + +add_old_configure_assignment("LLVM_CONFIG", llvm_config) -add_old_configure_assignment('LLVM_CONFIG', llvm_config) +option( + "--enable-clang-plugin-alpha", + env="ENABLE_CLANG_PLUGIN_ALPHA", + help="Enable static analysis with clang-plugin alpha checks.", +) -option('--enable-clang-plugin-alpha', env='ENABLE_CLANG_PLUGIN_ALPHA', - help='Enable static analysis with clang-plugin alpha checks.') -@depends('--enable-clang-plugin', '--enable-clang-plugin-alpha') +@depends("--enable-clang-plugin", "--enable-clang-plugin-alpha") def check_clang_plugin_alpha(enable_clang_plugin, enable_clang_plugin_alpha): if enable_clang_plugin_alpha: if enable_clang_plugin: return True die("Cannot enable clang-plugin alpha checkers without --enable-clang-plugin.") -add_old_configure_assignment('ENABLE_CLANG_PLUGIN_ALPHA', check_clang_plugin_alpha) -set_define('MOZ_CLANG_PLUGIN_ALPHA', check_clang_plugin_alpha) -option('--enable-mozsearch-plugin', env='ENABLE_MOZSEARCH_PLUGIN', - help="Enable building with the mozsearch indexer plugin") +add_old_configure_assignment("ENABLE_CLANG_PLUGIN_ALPHA", check_clang_plugin_alpha) +set_define("MOZ_CLANG_PLUGIN_ALPHA", check_clang_plugin_alpha) -add_old_configure_assignment('ENABLE_MOZSEARCH_PLUGIN', - depends_if('--enable-mozsearch-plugin')(lambda _: True)) +option( + "--enable-mozsearch-plugin", + env="ENABLE_MOZSEARCH_PLUGIN", + help="Enable building with the mozsearch indexer plugin", +) + +add_old_configure_assignment( + "ENABLE_MOZSEARCH_PLUGIN", depends_if("--enable-mozsearch-plugin")(lambda _: True) +) # Libstdc++ compatibility hacks # ============================================================== # -option('--enable-stdcxx-compat', env='MOZ_STDCXX_COMPAT', - help='Enable compatibility with older libstdc++') +option( + "--enable-stdcxx-compat", + env="MOZ_STDCXX_COMPAT", + help="Enable compatibility with older libstdc++", +) @template def libstdcxx_version(var, compiler): - @depends(compiler, when='--enable-stdcxx-compat') + @depends(compiler, when="--enable-stdcxx-compat") @checking(var, lambda v: v and "GLIBCXX_%s" % v.dotted) - @imports(_from='mozbuild.configure.libstdcxx', _import='find_version') - @imports(_from='__builtin__', _import='Exception') + @imports(_from="mozbuild.configure.libstdcxx", _import="find_version") + @imports(_from="__builtin__", _import="Exception") def version(compiler): try: result = find_version( - compiler.wrapper + [compiler.compiler] + compiler.flags) + compiler.wrapper + [compiler.compiler] + compiler.flags + ) except Exception: die("Couldn't determine libstdc++ version") if result: @@ -2377,61 +2668,70 @@ def libstdcxx_version(var, compiler): add_gcc_flag( - '-D_GLIBCXX_USE_CXX11_ABI=0', cxx_compiler, - when=libstdcxx_version( - 'MOZ_LIBSTDCXX_TARGET_VERSION', cxx_compiler)) + "-D_GLIBCXX_USE_CXX11_ABI=0", + cxx_compiler, + when=libstdcxx_version("MOZ_LIBSTDCXX_TARGET_VERSION", cxx_compiler), +) add_gcc_flag( - '-D_GLIBCXX_USE_CXX11_ABI=0', host_cxx_compiler, - when=libstdcxx_version( - 'MOZ_LIBSTDCXX_HOST_VERSION', host_cxx_compiler)) + "-D_GLIBCXX_USE_CXX11_ABI=0", + host_cxx_compiler, + when=libstdcxx_version("MOZ_LIBSTDCXX_HOST_VERSION", host_cxx_compiler), +) # Support various fuzzing options # ============================================================== -option('--enable-fuzzing', help='Enable fuzzing support') +option("--enable-fuzzing", help="Enable fuzzing support") + -@depends('--enable-fuzzing') +@depends("--enable-fuzzing") def enable_fuzzing(value): if value: return True -@depends(try_compile(body='__AFL_COMPILER;', - check_msg='for AFL compiler', - when='--enable-fuzzing')) + +@depends( + try_compile( + body="__AFL_COMPILER;", check_msg="for AFL compiler", when="--enable-fuzzing" + ) +) def enable_aflfuzzer(afl): if afl: return True -@depends(enable_fuzzing, - enable_aflfuzzer, - c_compiler, - target) + +@depends(enable_fuzzing, enable_aflfuzzer, c_compiler, target) def enable_libfuzzer(fuzzing, afl, c_compiler, target): - if fuzzing and not afl and c_compiler.type == 'clang' and target.os != 'Android': + if fuzzing and not afl and c_compiler.type == "clang" and target.os != "Android": return True -@depends(enable_fuzzing, - enable_aflfuzzer, - enable_libfuzzer) + +@depends(enable_fuzzing, enable_aflfuzzer, enable_libfuzzer) def enable_fuzzing_interfaces(fuzzing, afl, libfuzzer): if fuzzing and (afl or libfuzzer): return True -set_config('FUZZING', enable_fuzzing) -set_define('FUZZING', enable_fuzzing) -set_config('LIBFUZZER', enable_libfuzzer) -set_define('LIBFUZZER', enable_libfuzzer) -add_old_configure_assignment('LIBFUZZER', enable_libfuzzer) +set_config("FUZZING", enable_fuzzing) +set_define("FUZZING", enable_fuzzing) + +set_config("LIBFUZZER", enable_libfuzzer) +set_define("LIBFUZZER", enable_libfuzzer) +add_old_configure_assignment("LIBFUZZER", enable_libfuzzer) -set_config('FUZZING_INTERFACES', enable_fuzzing_interfaces) -set_define('FUZZING_INTERFACES', enable_fuzzing_interfaces) -add_old_configure_assignment('FUZZING_INTERFACES', enable_fuzzing_interfaces) +set_config("FUZZING_INTERFACES", enable_fuzzing_interfaces) +set_define("FUZZING_INTERFACES", enable_fuzzing_interfaces) +add_old_configure_assignment("FUZZING_INTERFACES", enable_fuzzing_interfaces) -@depends(c_compiler.try_compile(flags=['-fsanitize=fuzzer-no-link'], - when=enable_fuzzing, - check_msg='whether the C compiler supports -fsanitize=fuzzer-no-link'), tsan) +@depends( + c_compiler.try_compile( + flags=["-fsanitize=fuzzer-no-link"], + when=enable_fuzzing, + check_msg="whether the C compiler supports -fsanitize=fuzzer-no-link", + ), + tsan, +) def libfuzzer_flags(value, tsan): if tsan: # With ThreadSanitizer, we should not use any libFuzzer instrumentation because @@ -2445,19 +2745,20 @@ def libfuzzer_flags(value, tsan): if value: no_link_flag_supported = True # recommended for (and only supported by) clang >= 6 - use_flags = ['-fsanitize=fuzzer-no-link'] + use_flags = ["-fsanitize=fuzzer-no-link"] else: no_link_flag_supported = False - use_flags = ['-fsanitize-coverage=trace-pc-guard,trace-cmp'] + use_flags = ["-fsanitize-coverage=trace-pc-guard,trace-cmp"] return namespace( no_link_flag_supported=no_link_flag_supported, use_flags=use_flags, ) -set_config('HAVE_LIBFUZZER_FLAG_FUZZER_NO_LINK', libfuzzer_flags.no_link_flag_supported) -set_config('LIBFUZZER_FLAGS', libfuzzer_flags.use_flags) -add_old_configure_assignment('LIBFUZZER_FLAGS', libfuzzer_flags.use_flags) + +set_config("HAVE_LIBFUZZER_FLAG_FUZZER_NO_LINK", libfuzzer_flags.no_link_flag_supported) +set_config("LIBFUZZER_FLAGS", libfuzzer_flags.use_flags) +add_old_configure_assignment("LIBFUZZER_FLAGS", libfuzzer_flags.use_flags) # Shared library building # ============================================================== @@ -2465,43 +2766,56 @@ add_old_configure_assignment('LIBFUZZER_FLAGS', libfuzzer_flags.use_flags) # XXX: The use of makefile constructs in these variables is awful. @depends(target, c_compiler) def make_shared_library(target, compiler): - if target.os == 'WINNT': - if compiler.type == 'gcc': + if target.os == "WINNT": + if compiler.type == "gcc": return namespace( - mkshlib=['$(CXX)', '$(DSO_LDOPTS)', '-o', '$@'], - mkcshlib=['$(CC)', '$(DSO_LDOPTS)', '-o', '$@'], + mkshlib=["$(CXX)", "$(DSO_LDOPTS)", "-o", "$@"], + mkcshlib=["$(CC)", "$(DSO_LDOPTS)", "-o", "$@"], ) - elif compiler.type == 'clang': + elif compiler.type == "clang": return namespace( - mkshlib=['$(CXX)', '$(DSO_LDOPTS)', '-Wl,-pdb,$(LINK_PDBFILE)', '-o', '$@'], - mkcshlib=['$(CC)', '$(DSO_LDOPTS)', '-Wl,-pdb,$(LINK_PDBFILE)', '-o', '$@'], + mkshlib=[ + "$(CXX)", + "$(DSO_LDOPTS)", + "-Wl,-pdb,$(LINK_PDBFILE)", + "-o", + "$@", + ], + mkcshlib=[ + "$(CC)", + "$(DSO_LDOPTS)", + "-Wl,-pdb,$(LINK_PDBFILE)", + "-o", + "$@", + ], ) else: linker = [ - '$(LINKER)', - '-NOLOGO', '-DLL', - '-OUT:$@', - '-PDB:$(LINK_PDBFILE)', - '$(DSO_LDOPTS)' + "$(LINKER)", + "-NOLOGO", + "-DLL", + "-OUT:$@", + "-PDB:$(LINK_PDBFILE)", + "$(DSO_LDOPTS)", ] return namespace( mkshlib=linker, mkcshlib=linker, ) - cc = ['$(CC)', '$(COMPUTED_C_LDFLAGS)'] - cxx = ['$(CXX)', '$(COMPUTED_CXX_LDFLAGS)'] - flags = ['$(PGO_CFLAGS)', '$(DSO_PIC_CFLAGS)', '$(DSO_LDOPTS)'] - output = ['-o', '$@'] + cc = ["$(CC)", "$(COMPUTED_C_LDFLAGS)"] + cxx = ["$(CXX)", "$(COMPUTED_CXX_LDFLAGS)"] + flags = ["$(PGO_CFLAGS)", "$(DSO_PIC_CFLAGS)", "$(DSO_LDOPTS)"] + output = ["-o", "$@"] - if target.kernel == 'Darwin': + if target.kernel == "Darwin": soname = [] - elif target.os == 'NetBSD': - soname = ['-Wl,-soname,$(DSO_SONAME)'] + elif target.os == "NetBSD": + soname = ["-Wl,-soname,$(DSO_SONAME)"] else: - assert compiler.type in ('gcc', 'clang') + assert compiler.type in ("gcc", "clang") - soname = ['-Wl,-h,$(DSO_SONAME)'] + soname = ["-Wl,-h,$(DSO_SONAME)"] return namespace( mkshlib=cxx + flags + soname + output, @@ -2509,49 +2823,47 @@ def make_shared_library(target, compiler): ) -set_config('MKSHLIB', make_shared_library.mkshlib) -set_config('MKCSHLIB', make_shared_library.mkcshlib) +set_config("MKSHLIB", make_shared_library.mkshlib) +set_config("MKCSHLIB", make_shared_library.mkcshlib) @depends(c_compiler, toolchain_prefix, when=target_is_windows) def rc_names(c_compiler, toolchain_prefix): - if c_compiler.type in ('gcc', 'clang'): - return tuple('%s%s' % (p, 'windres') - for p in ('',) + (toolchain_prefix or ())) - return ('llvm-rc',) + if c_compiler.type in ("gcc", "clang"): + return tuple("%s%s" % (p, "windres") for p in ("",) + (toolchain_prefix or ())) + return ("llvm-rc",) -check_prog('RC', rc_names, paths=toolchain_search_path, when=target_is_windows) +check_prog("RC", rc_names, paths=toolchain_search_path, when=target_is_windows) @depends(link, toolchain_prefix, c_compiler) def ar_config(link, toolchain_prefix, c_compiler): - if c_compiler.type == 'clang-cl' and link: + if c_compiler.type == "clang-cl" and link: # if LINKER is set, it's either for lld-link or link - if 'lld-link' in link: + if "lld-link" in link: return namespace( - names=('llvm-lib',), - flags=('-llvmlibthin', '-out:$@'), + names=("llvm-lib",), + flags=("-llvmlibthin", "-out:$@"), ) else: return namespace( - names=('lib',), - flags=('-NOLOGO', '-OUT:$@'), + names=("lib",), + flags=("-NOLOGO", "-OUT:$@"), ) return namespace( - names=tuple('%s%s' % (p, 'ar') - for p in (toolchain_prefix or ()) + ('',)), - flags=('crs', '$@'), + names=tuple("%s%s" % (p, "ar") for p in (toolchain_prefix or ()) + ("",)), + flags=("crs", "$@"), ) -ar = check_prog('AR', ar_config.names, paths=toolchain_search_path) +ar = check_prog("AR", ar_config.names, paths=toolchain_search_path) -add_old_configure_assignment('AR', ar) +add_old_configure_assignment("AR", ar) -set_config('AR_FLAGS', ar_config.flags) +set_config("AR_FLAGS", ar_config.flags) -option('--enable-cpp-rtti', help='Enable C++ RTTI') +option("--enable-cpp-rtti", help="Enable C++ RTTI") -add_old_configure_assignment('_MOZ_USE_RTTI', '1', when='--enable-cpp-rtti') +add_old_configure_assignment("_MOZ_USE_RTTI", "1", when="--enable-cpp-rtti") diff --git a/build/moz.configure/update-programs.configure b/build/moz.configure/update-programs.configure index e3bb7b4642953c..d5a75b9ac8eb65 100644 --- a/build/moz.configure/update-programs.configure +++ b/build/moz.configure/update-programs.configure @@ -7,43 +7,55 @@ # Verify MAR signatures # ============================================================== -option('--disable-verify-mar', help='Disable verifying MAR signatures') +option("--disable-verify-mar", help="Disable verifying MAR signatures") -set_define('MOZ_VERIFY_MAR_SIGNATURE', - depends_if('--enable-verify-mar')(lambda _: True)) -set_config('MOZ_VERIFY_MAR_SIGNATURE', - depends_if('--enable-verify-mar')(lambda _: True)) +set_define( + "MOZ_VERIFY_MAR_SIGNATURE", depends_if("--enable-verify-mar")(lambda _: True) +) +set_config( + "MOZ_VERIFY_MAR_SIGNATURE", depends_if("--enable-verify-mar")(lambda _: True) +) # Maintenance service (Windows only) # ============================================================== -option('--enable-maintenance-service', - when=target_is_windows, default=target_is_windows, - help='{Enable|Disable} building of maintenance service') - -set_define('MOZ_MAINTENANCE_SERVICE', - depends_if('--enable-maintenance-service', - when=target_is_windows)(lambda _: True)) -set_config('MOZ_MAINTENANCE_SERVICE', - depends_if('--enable-maintenance-service', - when=target_is_windows)(lambda _: True)) +option( + "--enable-maintenance-service", + when=target_is_windows, + default=target_is_windows, + help="{Enable|Disable} building of maintenance service", +) + +set_define( + "MOZ_MAINTENANCE_SERVICE", + depends_if("--enable-maintenance-service", when=target_is_windows)(lambda _: True), +) +set_config( + "MOZ_MAINTENANCE_SERVICE", + depends_if("--enable-maintenance-service", when=target_is_windows)(lambda _: True), +) # Update agent (currently Windows only) # This is an independent task that runs on a schedule to # check for, download, and install updates. # ============================================================== -option('--enable-update-agent', - when=target_is_windows, default=False, - help='{Enable|Disable} building update agent') +option( + "--enable-update-agent", + when=target_is_windows, + default=False, + help="{Enable|Disable} building update agent", +) -set_define('MOZ_UPDATE_AGENT', - depends_if('--enable-update-agent', - when=target_is_windows)(lambda _: True)) +set_define( + "MOZ_UPDATE_AGENT", + depends_if("--enable-update-agent", when=target_is_windows)(lambda _: True), +) -set_config('MOZ_UPDATE_AGENT', - depends_if('--enable-update-agent', - when=target_is_windows)(lambda _: True)) +set_config( + "MOZ_UPDATE_AGENT", + depends_if("--enable-update-agent", when=target_is_windows)(lambda _: True), +) # Enable or disable the default browser agent, which monitors the user's default # browser setting on Windows. @@ -52,17 +64,20 @@ set_config('MOZ_UPDATE_AGENT', @depends(target) def default_browser_agent_default(target): - return target.os == 'WINNT' + return target.os == "WINNT" -option('--enable-default-browser-agent', default=default_browser_agent_default, - help='{Enable|Disable} building the default browser agent') +option( + "--enable-default-browser-agent", + default=default_browser_agent_default, + help="{Enable|Disable} building the default browser agent", +) -@depends('--enable-default-browser-agent', when=target_is_windows) +@depends("--enable-default-browser-agent", when=target_is_windows) def default_agent_flag(enabled): if enabled: return True -set_config('MOZ_DEFAULT_BROWSER_AGENT', default_agent_flag) +set_config("MOZ_DEFAULT_BROWSER_AGENT", default_agent_flag) diff --git a/build/moz.configure/util.configure b/build/moz.configure/util.configure index 7ee1a498ad8ccf..fe82698c620bca 100644 --- a/build/moz.configure/util.configure +++ b/build/moz.configure/util.configure @@ -5,45 +5,51 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -@imports('sys') +@imports("sys") def die(*args): - 'Print an error and terminate configure.' + "Print an error and terminate configure." log.error(*args) sys.exit(1) -@imports(_from='mozbuild.configure', _import='ConfigureError') +@imports(_from="mozbuild.configure", _import="ConfigureError") def configure_error(message): - '''Raise a programming error and terminate configure. + """Raise a programming error and terminate configure. Primarily for use in moz.configure templates to sanity check - their inputs from moz.configure usage.''' + their inputs from moz.configure usage.""" raise ConfigureError(message) # A wrapper to obtain a process' output and return code. # Returns a tuple (retcode, stdout, stderr). -@imports('os') -@imports('six') -@imports('subprocess') -@imports(_from='mozbuild.shellutil', _import='quote') -@imports(_from='mozbuild.util', _import='system_encoding') +@imports("os") +@imports("six") +@imports("subprocess") +@imports(_from="mozbuild.shellutil", _import="quote") +@imports(_from="mozbuild.util", _import="system_encoding") def get_cmd_output(*args, **kwargs): - log.debug('Executing: `%s`', quote(*args)) + log.debug("Executing: `%s`", quote(*args)) proc = subprocess.Popen( - args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, + args, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, # On Python 2 on Windows, close_fds prevents the process from inheriting # stdout/stderr. Elsewhere, it simply prevents it from inheriting extra # file descriptors, which is what we want. - close_fds=os.name != 'nt', **kwargs) + close_fds=os.name != "nt", + **kwargs + ) stdout, stderr = proc.communicate() # Normally we would set the `encoding` and `errors` arguments in the # constructor to subprocess.Popen, but those arguments were added in 3.6 # and we need to support back to 3.5, so instead we need to do this # nonsense. - stdout = six.ensure_text(stdout, encoding=system_encoding, - errors='replace').replace('\r\n', '\n') - stderr = six.ensure_text(stderr, encoding=system_encoding, - errors='replace').replace('\r\n', '\n') + stdout = six.ensure_text( + stdout, encoding=system_encoding, errors="replace" + ).replace("\r\n", "\n") + stderr = six.ensure_text( + stderr, encoding=system_encoding, errors="replace" + ).replace("\r\n", "\n") return proc.wait(), stdout, stderr @@ -51,87 +57,83 @@ def get_cmd_output(*args, **kwargs): # by running the given command if it exits normally, and streams that # output to log.debug and calls die or the given error callback if it # does not. -@imports(_from='mozbuild.configure.util', _import='LineIO') -@imports(_from='mozbuild.shellutil', _import='quote') +@imports(_from="mozbuild.configure.util", _import="LineIO") +@imports(_from="mozbuild.shellutil", _import="quote") def check_cmd_output(*args, **kwargs): - onerror = kwargs.pop('onerror', None) + onerror = kwargs.pop("onerror", None) with log.queue_debug(): retcode, stdout, stderr = get_cmd_output(*args, **kwargs) if retcode == 0: return stdout - log.debug('The command returned non-zero exit status %d.', - retcode) - for out, desc in ((stdout, 'output'), (stderr, 'error output')): + log.debug("The command returned non-zero exit status %d.", retcode) + for out, desc in ((stdout, "output"), (stderr, "error output")): if out: - log.debug('Its %s was:', desc) - with LineIO(lambda l: log.debug('| %s', l)) as o: + log.debug("Its %s was:", desc) + with LineIO(lambda l: log.debug("| %s", l)) as o: o.write(out) if onerror: return onerror() - die('Command `%s` failed with exit status %d.' % - (quote(*args), retcode)) + die("Command `%s` failed with exit status %d." % (quote(*args), retcode)) -@imports('os') +@imports("os") def is_absolute_or_relative(path): if os.altsep and os.altsep in path: return True return os.sep in path -@imports(_import='mozpack.path', _as='mozpath') +@imports(_import="mozpack.path", _as="mozpath") def normsep(path): return mozpath.normsep(path) -@imports('ctypes') -@imports(_from='ctypes', _import='wintypes') -@imports(_from='mozbuild.configure.constants', _import='WindowsBinaryType') +@imports("ctypes") +@imports(_from="ctypes", _import="wintypes") +@imports(_from="mozbuild.configure.constants", _import="WindowsBinaryType") def windows_binary_type(path): """Obtain the type of a binary on Windows. Returns WindowsBinaryType constant. """ GetBinaryTypeW = ctypes.windll.kernel32.GetBinaryTypeW - GetBinaryTypeW.argtypes = [wintypes.LPWSTR, - ctypes.POINTER(wintypes.DWORD)] + GetBinaryTypeW.argtypes = [wintypes.LPWSTR, ctypes.POINTER(wintypes.DWORD)] GetBinaryTypeW.restype = wintypes.BOOL bin_type = wintypes.DWORD() res = GetBinaryTypeW(path, ctypes.byref(bin_type)) if not res: - die('could not obtain binary type of %s' % path) + die("could not obtain binary type of %s" % path) if bin_type.value == 0: - return WindowsBinaryType('win32') + return WindowsBinaryType("win32") elif bin_type.value == 6: - return WindowsBinaryType('win64') + return WindowsBinaryType("win64") # If we see another binary type, something is likely horribly wrong. else: - die('unsupported binary type on %s: %s' % (path, bin_type)) + die("unsupported binary type on %s: %s" % (path, bin_type)) -@imports('ctypes') -@imports(_from='ctypes', _import='wintypes') +@imports("ctypes") +@imports(_from="ctypes", _import="wintypes") def get_GetShortPathNameW(): GetShortPathNameW = ctypes.windll.kernel32.GetShortPathNameW - GetShortPathNameW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, - wintypes.DWORD] + GetShortPathNameW.argtypes = [wintypes.LPCWSTR, wintypes.LPWSTR, wintypes.DWORD] GetShortPathNameW.restype = wintypes.DWORD return GetShortPathNameW @template -@imports('ctypes') -@imports('platform') -@imports(_from='mozbuild.shellutil', _import='quote') +@imports("ctypes") +@imports("platform") +@imports(_from="mozbuild.shellutil", _import="quote") def normalize_path(): # Until the build system can properly handle programs that need quoting, # transform those paths into their short version on Windows (e.g. # c:\PROGRA~1...). - if platform.system() == 'Windows': + if platform.system() == "Windows": GetShortPathNameW = get_GetShortPathNameW() def normalize_path(path): @@ -143,17 +145,20 @@ def normalize_path(): out = ctypes.create_unicode_buffer(size) needed = GetShortPathNameW(path, out, size) if size >= needed: - if ' ' in out.value: - die("GetShortPathName returned a long path name: `%s`. " + if " " in out.value: + die( + "GetShortPathName returned a long path name: `%s`. " "Use `fsutil file setshortname' " "to create a short name " "for any components of this path " "that have spaces.", - out.value) + out.value, + ) return normsep(out.value) size = needed else: + def normalize_path(path): return normsep(path) @@ -167,10 +172,10 @@ normalize_path = normalize_path() # exists. # The `paths` parameter may be passed to search the given paths instead of # $PATH. -@imports('sys') -@imports(_from='os', _import='pathsep') -@imports(_from='os', _import='environ') -@imports(_from='mozfile', _import='which') +@imports("sys") +@imports(_from="os", _import="pathsep") +@imports(_from="os", _import="environ") +@imports(_from="mozfile", _import="which") def find_program(file, paths=None): # The following snippet comes from `which` itself, with a slight # modification to use lowercase extensions, because it's confusing rustup @@ -178,12 +183,11 @@ def find_program(file, paths=None): # Windows has the concept of a list of extensions (PATHEXT env var). if sys.platform.startswith("win"): - exts = [e.lower() - for e in environ.get("PATHEXT", "").split(pathsep)] + exts = [e.lower() for e in environ.get("PATHEXT", "").split(pathsep)] # If '.exe' is not in exts then obviously this is Win9x and # or a bogus PATHEXT, then use a reasonable default. - if '.exe' not in exts: - exts = ['.com', '.exe', '.bat'] + if ".exe" not in exts: + exts = [".com", ".exe", ".bat"] else: exts = None @@ -193,42 +197,43 @@ def find_program(file, paths=None): if paths: if not isinstance(paths, (list, tuple)): - die("Paths provided to find_program must be a list of strings, " - "not %r", paths) + die( + "Paths provided to find_program must be a list of strings, " "not %r", + paths, + ) paths = pathsep.join(paths) path = which(file, path=paths, exts=exts) return normalize_path(path) if path else None -@imports('os') -@imports(_from='mozbuild.configure.util', _import='LineIO') -@imports(_from='six', _import='ensure_binary') -@imports(_from='tempfile', _import='mkstemp') +@imports("os") +@imports(_from="mozbuild.configure.util", _import="LineIO") +@imports(_from="six", _import="ensure_binary") +@imports(_from="tempfile", _import="mkstemp") def try_invoke_compiler(compiler, language, source, flags=None, onerror=None): flags = flags or [] if not isinstance(flags, (list, tuple)): - die("Flags provided to try_compile must be a list of strings, " - "not %r", flags) + die("Flags provided to try_compile must be a list of strings, " "not %r", flags) suffix = { - 'C': '.c', - 'C++': '.cpp', + "C": ".c", + "C++": ".cpp", }[language] - fd, path = mkstemp(prefix='conftest.', suffix=suffix, text=True) + fd, path = mkstemp(prefix="conftest.", suffix=suffix, text=True) try: - source = source.encode('ascii', 'replace') + source = source.encode("ascii", "replace") - log.debug('Creating `%s` with content:', path) - with LineIO(lambda l: log.debug('| %s', l)) as out: + log.debug("Creating `%s` with content:", path) + with LineIO(lambda l: log.debug("| %s", l)) as out: out.write(source) os.write(fd, ensure_binary(source)) os.close(fd) cmd = compiler + [path] + list(flags) - kwargs = {'onerror': onerror} + kwargs = {"onerror": onerror} return check_cmd_output(*cmd, **kwargs) finally: os.remove(path) @@ -288,9 +293,9 @@ def unique_list(l): # ('19.0', 'arm', r'C:\...\amd64_arm\cl.exe') # ('19.0', 'x64', r'C:\...\amd64\cl.exe') # ('19.0', 'x86', r'C:\...\amd64_x86\cl.exe') -@imports(_import='winreg') -@imports(_from='__builtin__', _import='WindowsError') -@imports(_from='fnmatch', _import='fnmatch') +@imports(_import="winreg") +@imports(_from="__builtin__", _import="WindowsError") +@imports(_from="fnmatch", _import="fnmatch") def get_registry_values(pattern, get_32_and_64_bit=False): def enum_helper(func, key): i = 0 @@ -303,7 +308,7 @@ def get_registry_values(pattern, get_32_and_64_bit=False): def get_keys(key, pattern, access_mask): try: - s = winreg.OpenKey(key, '\\'.join(pattern[:-1]), 0, access_mask) + s = winreg.OpenKey(key, "\\".join(pattern[:-1]), 0, access_mask) except WindowsError: return for k in enum_helper(winreg.EnumKey, s): @@ -315,7 +320,7 @@ def get_registry_values(pattern, get_32_and_64_bit=False): def get_values(key, pattern, access_mask): try: - s = winreg.OpenKey(key, '\\'.join(pattern[:-1]), 0, access_mask) + s = winreg.OpenKey(key, "\\".join(pattern[:-1]), 0, access_mask) except WindowsError: return for k, v, t in enum_helper(winreg.EnumValue, s): @@ -326,7 +331,7 @@ def get_registry_values(pattern, get_32_and_64_bit=False): subpattern = [] for p in pattern: subpattern.append(p) - if '*' in p: + if "*" in p: yield subpattern subpattern = [] if subpattern: @@ -339,7 +344,7 @@ def get_registry_values(pattern, get_32_and_64_bit=False): matches = base_key[:-1] base_key = base_key[-1] if i == len(pattern) - 1: - want_name = '*' in p[-1] + want_name = "*" in p[-1] for name, value in get_values(base_key, p, access_mask): yield matches + ((name, value) if want_name else (value,)) else: @@ -347,25 +352,30 @@ def get_registry_values(pattern, get_32_and_64_bit=False): next_keys.append(matches + (name, k)) keys = next_keys - pattern = pattern.split('\\') - assert pattern[0].startswith('HKEY_') + pattern = pattern.split("\\") + assert pattern[0].startswith("HKEY_") keys = [(getattr(winreg, pattern[0]),)] pattern = list(split_pattern(pattern[1:])) if get_32_and_64_bit: - for match in get_all_values(keys, pattern, winreg.KEY_READ | winreg.KEY_WOW64_32KEY): + for match in get_all_values( + keys, pattern, winreg.KEY_READ | winreg.KEY_WOW64_32KEY + ): yield match - for match in get_all_values(keys, pattern, winreg.KEY_READ | winreg.KEY_WOW64_64KEY): + for match in get_all_values( + keys, pattern, winreg.KEY_READ | winreg.KEY_WOW64_64KEY + ): yield match else: for match in get_all_values(keys, pattern, winreg.KEY_READ): yield match -@imports(_from='mozbuild.configure.util', _import='Version', _as='_Version') +@imports(_from="mozbuild.configure.util", _import="Version", _as="_Version") def Version(v): - 'A version number that can be compared usefully.' + "A version number that can be compared usefully." return _Version(v) + # Denotes a deprecated option. Combines option() and @depends: # @deprecated_option('--option') # def option(value): @@ -378,22 +388,23 @@ def Version(v): @template def deprecated_option(*args, **kwargs): - assert 'help' not in kwargs - kwargs['help'] = 'Deprecated' + assert "help" not in kwargs + kwargs["help"] = "Deprecated" opt = option(*args, **kwargs) def decorator(func): @depends(opt.option) def deprecated(value): - if value.origin != 'default': + if value.origin != "default": return func(value) + return deprecated return decorator # from mozbuild.util import ReadOnlyNamespace as namespace -@imports(_from='mozbuild.util', _import='ReadOnlyNamespace') +@imports(_from="mozbuild.util", _import="ReadOnlyNamespace") def namespace(**kwargs): return ReadOnlyNamespace(**kwargs) @@ -402,8 +413,8 @@ def namespace(**kwargs): # The given object can be a literal value, a function that takes no argument, # or, for convenience, a @depends function. @template -@imports(_from='inspect', _import='isfunction') -@imports(_from='mozbuild.configure', _import='SandboxDependsFunction') +@imports(_from="inspect", _import="isfunction") +@imports(_from="mozbuild.configure", _import="SandboxDependsFunction") def dependable(obj): if isinstance(obj, SandboxDependsFunction): return obj @@ -411,7 +422,7 @@ def dependable(obj): return depends(when=True)(obj) # Depend on --help to make lint happy if the dependable is used as an input # to an option(). - return depends('--help', when=True)(lambda _: obj) + return depends("--help", when=True)(lambda _: obj) always = dependable(True) @@ -425,7 +436,7 @@ never = dependable(False) def depends_tmpl(eval_args_fn, *args, **kwargs): if kwargs: assert len(kwargs) == 1 - when = kwargs['when'] + when = kwargs["when"] else: when = None @@ -434,7 +445,9 @@ def depends_tmpl(eval_args_fn, *args, **kwargs): def wrapper(*args): if eval_args_fn(args): return func(*args) + return wrapper + return decorator @@ -455,6 +468,7 @@ def depends_all(*args, **kwargs): # Hacks related to old-configure # ============================== + @dependable def old_configure_assignments(): return [] @@ -466,14 +480,14 @@ def add_old_configure_assignment(var, value, when=None): value = dependable(value) @depends(old_configure_assignments, var, value, when=when) - @imports(_from='mozbuild.shellutil', _import='quote') + @imports(_from="mozbuild.shellutil", _import="quote") def add_assignment(assignments, var, value): if var is None or value is None: return if value is True: - assignments.append((var, '1')) + assignments.append((var, "1")) elif value is False: - assignments.append((var, '')) + assignments.append((var, "")) else: if isinstance(value, (list, tuple)): value = quote(*value) diff --git a/build/moz.configure/warnings.configure b/build/moz.configure/warnings.configure index d7a0ec890f82f9..1a8e7a8c1b9fc0 100755 --- a/build/moz.configure/warnings.configure +++ b/build/moz.configure/warnings.configure @@ -4,39 +4,44 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -option('--enable-warnings-as-errors', env='MOZ_ENABLE_WARNINGS_AS_ERRORS', - default=depends('MOZ_AUTOMATION')(lambda x: bool(x)), - help='{Enable|Disable} treating warnings as errors') +option( + "--enable-warnings-as-errors", + env="MOZ_ENABLE_WARNINGS_AS_ERRORS", + default=depends("MOZ_AUTOMATION")(lambda x: bool(x)), + help="{Enable|Disable} treating warnings as errors", +) -@depends('--enable-warnings-as-errors') +@depends("--enable-warnings-as-errors") def rust_warning_flags(warnings_as_errors): flags = [] # Note that cargo passes --cap-lints warn to rustc for third-party code, so # we don't need a very complicated setup. if warnings_as_errors: - flags.append('-Dwarnings') + flags.append("-Dwarnings") else: - flags.extend(('--cap-lints', 'warn')) + flags.extend(("--cap-lints", "warn")) return flags -c_warning_flag = dependable('-Werror') +c_warning_flag = dependable("-Werror") -@depends('--enable-warnings-as-errors', c_warning_flag) + +@depends("--enable-warnings-as-errors", c_warning_flag) def warnings_as_errors(warnings_as_errors, c_warning_flag): if not warnings_as_errors: - return '' + return "" return c_warning_flag -set_config('WARNINGS_AS_ERRORS', warnings_as_errors) + +set_config("WARNINGS_AS_ERRORS", warnings_as_errors) # We have a peculiar setup in old-configure.in where some compilation tests # depend on enabling warnings-as-errors even if it's disabled for Firefox # compilation. We therefore need this assignment. -add_old_configure_assignment('WARNINGS_AS_ERRORS', c_warning_flag) +add_old_configure_assignment("WARNINGS_AS_ERRORS", c_warning_flag) # GCC/Clang warnings: @@ -44,70 +49,70 @@ add_old_configure_assignment('WARNINGS_AS_ERRORS', c_warning_flag) # https://clang.llvm.org/docs/DiagnosticsReference.html # lots of useful warnings -add_gcc_warning('-Wall') +add_gcc_warning("-Wall") # catch implicit truncation of enum values assigned to smaller bit fields -check_and_add_gcc_warning('-Wbitfield-enum-conversion') +check_and_add_gcc_warning("-Wbitfield-enum-conversion") # catches bugs, e.g. "if (c); foo();", few false positives -add_gcc_warning('-Wempty-body') +add_gcc_warning("-Wempty-body") # catches return types with qualifiers like const -add_gcc_warning('-Wignored-qualifiers') +add_gcc_warning("-Wignored-qualifiers") # function declaration hides virtual function from base class -add_gcc_warning('-Woverloaded-virtual', cxx_compiler) +add_gcc_warning("-Woverloaded-virtual", cxx_compiler) # catches pointer arithmetic using NULL or sizeof(void) -add_gcc_warning('-Wpointer-arith') +add_gcc_warning("-Wpointer-arith") # catch modifying constructor parameter that shadows member variable -check_and_add_gcc_warning('-Wshadow-field-in-constructor-modified') +check_and_add_gcc_warning("-Wshadow-field-in-constructor-modified") # catches comparing signed/unsigned ints -add_gcc_warning('-Wsign-compare') +add_gcc_warning("-Wsign-compare") # catches overflow bugs, few false positives -add_gcc_warning('-Wtype-limits') +add_gcc_warning("-Wtype-limits") # catches some dead code -add_gcc_warning('-Wunreachable-code') -check_and_add_gcc_warning('-Wunreachable-code-return') +add_gcc_warning("-Wunreachable-code") +check_and_add_gcc_warning("-Wunreachable-code-return") # catches treating string literals as non-const -add_gcc_warning('-Wwrite-strings', cxx_compiler) +add_gcc_warning("-Wwrite-strings", cxx_compiler) # turned on by -Wall, but we use offsetof on non-POD types frequently -add_gcc_warning('-Wno-invalid-offsetof', cxx_compiler) +add_gcc_warning("-Wno-invalid-offsetof", cxx_compiler) # catches objects passed by value to variadic functions. -check_and_add_gcc_warning('-Wclass-varargs') +check_and_add_gcc_warning("-Wclass-varargs") # catches empty if/switch/for initialization statements that have no effect -check_and_add_gcc_warning('-Wempty-init-stmt', cxx_compiler) +check_and_add_gcc_warning("-Wempty-init-stmt", cxx_compiler) # catches some implicit conversion of floats to ints -check_and_add_gcc_warning('-Wfloat-overflow-conversion') -check_and_add_gcc_warning('-Wfloat-zero-conversion') +check_and_add_gcc_warning("-Wfloat-overflow-conversion") +check_and_add_gcc_warning("-Wfloat-zero-conversion") # catches issues around loops -check_and_add_gcc_warning('-Wloop-analysis') +check_and_add_gcc_warning("-Wloop-analysis") # catches C++ version forward-compat issues -check_and_add_gcc_warning('-Wc++2a-compat', cxx_compiler) +check_and_add_gcc_warning("-Wc++2a-compat", cxx_compiler) # catches possible misuse of the comma operator -check_and_add_gcc_warning('-Wcomma', cxx_compiler) +check_and_add_gcc_warning("-Wcomma", cxx_compiler) # catches duplicated conditions in if-else-if chains -check_and_add_gcc_warning('-Wduplicated-cond') +check_and_add_gcc_warning("-Wduplicated-cond") # catches unintentional switch case fallthroughs -check_and_add_gcc_warning('-Wimplicit-fallthrough', cxx_compiler) +check_and_add_gcc_warning("-Wimplicit-fallthrough", cxx_compiler) # catches unused variable/function declarations -check_and_add_gcc_warning('-Wunused-function', cxx_compiler) -check_and_add_gcc_warning('-Wunused-variable', cxx_compiler) +check_and_add_gcc_warning("-Wunused-function", cxx_compiler) +check_and_add_gcc_warning("-Wunused-variable", cxx_compiler) # catches expressions used as a null pointer constant # XXX: at the time of writing, the version of clang used on the OS X test @@ -117,120 +122,123 @@ check_and_add_gcc_warning('-Wunused-variable', cxx_compiler) # -Werror=non-literal-null-conversion, but we only do that when # --enable-warnings-as-errors is specified so that no unexpected fatal # warnings are produced. -check_and_add_gcc_warning('-Werror=non-literal-null-conversion', - when='--enable-warnings-as-errors') +check_and_add_gcc_warning( + "-Werror=non-literal-null-conversion", when="--enable-warnings-as-errors" +) # catches string literals used in boolean expressions -check_and_add_gcc_warning('-Wstring-conversion') +check_and_add_gcc_warning("-Wstring-conversion") # catches comparisons that are always true or false -check_and_add_gcc_warning('-Wtautological-overlap-compare') -check_and_add_gcc_warning('-Wtautological-unsigned-enum-zero-compare') -check_and_add_gcc_warning('-Wtautological-unsigned-zero-compare') +check_and_add_gcc_warning("-Wtautological-overlap-compare") +check_and_add_gcc_warning("-Wtautological-unsigned-enum-zero-compare") +check_and_add_gcc_warning("-Wtautological-unsigned-zero-compare") # This can be triggered by certain patterns used deliberately in portable code -check_and_add_gcc_warning('-Wno-error=tautological-type-limit-compare') +check_and_add_gcc_warning("-Wno-error=tautological-type-limit-compare") # we inline 'new' and 'delete' in mozalloc -check_and_add_gcc_warning('-Wno-inline-new-delete', cxx_compiler) +check_and_add_gcc_warning("-Wno-inline-new-delete", cxx_compiler) # Prevent the following GCC warnings from being treated as errors: # too many false positives -check_and_add_gcc_warning('-Wno-error=maybe-uninitialized') +check_and_add_gcc_warning("-Wno-error=maybe-uninitialized") # we don't want our builds held hostage when a platform-specific API # becomes deprecated. -check_and_add_gcc_warning('-Wno-error=deprecated-declarations') +check_and_add_gcc_warning("-Wno-error=deprecated-declarations") # false positives depending on optimization -check_and_add_gcc_warning('-Wno-error=array-bounds') +check_and_add_gcc_warning("-Wno-error=array-bounds") # can't get rid of those PGO warnings -check_and_add_gcc_warning('-Wno-error=coverage-mismatch') +check_and_add_gcc_warning("-Wno-error=coverage-mismatch") # -Wbackend-plugin warnings from Android PGO profile-use builds: # error: /builds/worker/workspace/build/src/mozglue/misc/AutoProfilerLabel.cpp: # Function control flow change detected (hash mismatch) # _ZN7mozilla17AutoProfilerLabelD2Ev [-Werror,-Wbackend-plugin] -check_and_add_gcc_warning('-Wno-error=backend-plugin') +check_and_add_gcc_warning("-Wno-error=backend-plugin") # false positives depending on optimizations -check_and_add_gcc_warning('-Wno-error=free-nonheap-object') +check_and_add_gcc_warning("-Wno-error=free-nonheap-object") # Would be a pain to fix all occurrences, for very little gain -check_and_add_gcc_warning('-Wno-multistatement-macros') +check_and_add_gcc_warning("-Wno-multistatement-macros") # Disable the -Werror for return-std-move because of a false positive # on nsTAutoStringN: https://bugs.llvm.org/show_bug.cgi?id=37249 -check_and_add_gcc_warning('-Wno-error=return-std-move') +check_and_add_gcc_warning("-Wno-error=return-std-move") # Disable the -Werror for -Wclass-memaccess as we have a long # tail of issues to fix -check_and_add_gcc_warning('-Wno-error=class-memaccess') +check_and_add_gcc_warning("-Wno-error=class-memaccess") # -Watomic-alignment is a new warning in clang 7 that seems way too broad. # https://bugs.llvm.org/show_bug.cgi?id=38593 -check_and_add_gcc_warning('-Wno-error=atomic-alignment') +check_and_add_gcc_warning("-Wno-error=atomic-alignment") # New warning with gcc 9. Not useful # https://bugzilla.mozilla.org/show_bug.cgi?id=1515356 -check_and_add_gcc_warning('-Wno-error=deprecated-copy') +check_and_add_gcc_warning("-Wno-error=deprecated-copy") # catches format/argument mismatches with printf c_format_warning, cxx_format_warning = check_and_add_gcc_warning( - '-Wformat', when=depends(target)(lambda t: t.kernel != 'WINNT')) + "-Wformat", when=depends(target)(lambda t: t.kernel != "WINNT") +) # Add compile-time warnings for unprotected functions and format functions # that represent possible security problems. Enable this only when -Wformat # is enabled, otherwise it is an error -check_and_add_gcc_warning('-Wformat-security', - when=c_format_warning & cxx_format_warning) -check_and_add_gcc_warning('-Wformat-overflow=2', - when=c_format_warning & cxx_format_warning) +check_and_add_gcc_warning( + "-Wformat-security", when=c_format_warning & cxx_format_warning +) +check_and_add_gcc_warning( + "-Wformat-overflow=2", when=c_format_warning & cxx_format_warning +) # Other MinGW specific things -with only_when(depends(target)(lambda t: t.kernel == 'WINNT')): +with only_when(depends(target)(lambda t: t.kernel == "WINNT")): # When compiling for Windows with gcc, we encounter lots of "#pragma warning"'s # which is an MSVC-only pragma that GCC does not recognize. - check_and_add_gcc_warning('-Wno-unknown-pragmas') + check_and_add_gcc_warning("-Wno-unknown-pragmas") # When compiling for Windows with gcc, gcc throws false positives and true # positives where the callsite is ifdef-ed out - check_and_add_gcc_warning('-Wno-unused-function') + check_and_add_gcc_warning("-Wno-unused-function") # When compiling for Windows with gcc, gcc cannot produce this warning # correctly: it mistakes DWORD_PTR and ULONG_PTR as types you cannot # give NULL to. (You can in fact do that.) - check_and_add_gcc_warning('-Wno-conversion-null') + check_and_add_gcc_warning("-Wno-conversion-null") # Throughout the codebase we regularly have switch statements off of enums # without covering every value in the enum. We don't care about these warnings. - check_and_add_gcc_warning('-Wno-switch') + check_and_add_gcc_warning("-Wno-switch") # Another code pattern we have is using start and end constants in enums of # different types. We do this for safety, but then when comparing it throws # an error, which we would like to ignore. This seems to only affect the MinGW # build, but we're not sure why. - check_and_add_gcc_warning('-Wno-enum-compare') + check_and_add_gcc_warning("-Wno-enum-compare") # We hit this all over the place with the gtest INSTANTIATE_TEST_CASE_P macro -check_and_add_gcc_warning('-Wno-gnu-zero-variadic-macro-arguments') +check_and_add_gcc_warning("-Wno-gnu-zero-variadic-macro-arguments") # Make it an error to be missing function declarations. -check_and_add_gcc_warning('-Werror=implicit-function-declaration') +check_and_add_gcc_warning("-Werror=implicit-function-declaration") # New in clang 11. We can't really do anything about this warning. -check_and_add_gcc_warning('-Wno-psabi') +check_and_add_gcc_warning("-Wno-psabi") # Disable broken missing-braces warning on old clang versions check_and_add_gcc_warning( - '-Wno-missing-braces', - when=depends(c_compiler)(lambda c: c.type == 'clang' and c.version < '6.0')) + "-Wno-missing-braces", + when=depends(c_compiler)(lambda c: c.type == "clang" and c.version < "6.0"), +) # Please keep these last in this file -add_old_configure_assignment('_WARNINGS_CFLAGS', warnings_flags.cflags) -add_old_configure_assignment('_WARNINGS_CXXFLAGS', warnings_flags.cxxflags) -add_old_configure_assignment( - '_WARNINGS_HOST_CFLAGS', warnings_flags.host_cflags) -add_old_configure_assignment( - '_WARNINGS_HOST_CXXFLAGS', warnings_flags.host_cxxflags) +add_old_configure_assignment("_WARNINGS_CFLAGS", warnings_flags.cflags) +add_old_configure_assignment("_WARNINGS_CXXFLAGS", warnings_flags.cxxflags) +add_old_configure_assignment("_WARNINGS_HOST_CFLAGS", warnings_flags.host_cflags) +add_old_configure_assignment("_WARNINGS_HOST_CXXFLAGS", warnings_flags.host_cxxflags) diff --git a/build/moz.configure/windows.configure b/build/moz.configure/windows.configure index 466d6fed746730..2cc6a60411b996 100644 --- a/build/moz.configure/windows.configure +++ b/build/moz.configure/windows.configure @@ -4,16 +4,20 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -option('--with-windows-version', nargs=1, default='603', - help='Windows SDK version to target. Win 8.1 (603) is currently' - 'the minimum supported version.') +option( + "--with-windows-version", + nargs=1, + default="603", + help="Windows SDK version to target. Win 8.1 (603) is currently" + "the minimum supported version.", +) -@depends('--with-windows-version') -@imports(_from='__builtin__', _import='ValueError') +@depends("--with-windows-version") +@imports(_from="__builtin__", _import="ValueError") def valid_windows_version(value): if not value: - die('Cannot build with --without-windows-version') + die("Cannot build with --without-windows-version") try: version = int(value[0], 16) if version in (0x603,): @@ -21,25 +25,30 @@ def valid_windows_version(value): except ValueError: pass - die('Invalid value for --with-windows-version (%s)', value[0]) + die("Invalid value for --with-windows-version (%s)", value[0]) -option(env='WINDOWSSDKDIR', nargs=1, - help='Directory containing the Windows SDK') +option(env="WINDOWSSDKDIR", nargs=1, help="Directory containing the Windows SDK") -@depends('WINDOWSSDKDIR', host, c_compiler) +@depends("WINDOWSSDKDIR", host, c_compiler) def windows_sdk_dir(value, host, compiler): if value: return value # Ideally, we'd actually check for host/target ABI being MSVC, but # that's waiting for bug 1617793. - if host.kernel != 'WINNT' or compiler.type != 'clang-cl': + if host.kernel != "WINNT" or compiler.type != "clang-cl": return () - return set(x[1] for x in get_registry_values( - r'HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows Kits\Installed Roots' - r'\KitsRoot*', get_32_and_64_bit=True)) + return set( + x[1] + for x in get_registry_values( + r"HKEY_LOCAL_MACHINE\SOFTWARE\Microsoft\Windows Kits\Installed Roots" + r"\KitsRoot*", + get_32_and_64_bit=True, + ) + ) + # The Windows SDK 8.1 and 10 have different layouts. The former has # $SDK/include/$subdir, while the latter has $SDK/include/$version/$subdir. @@ -50,16 +59,17 @@ def windows_sdk_dir(value, host, compiler): # 8.1. -@imports('os') -@imports('re') -@imports(_from='__builtin__', _import='sorted') -@imports(_from='__builtin__', _import='Exception') +@imports("os") +@imports("re") +@imports(_from="__builtin__", _import="sorted") +@imports(_from="__builtin__", _import="Exception") def get_sdk_dirs(sdk, subdir): def get_dirs_containing(sdk, stem, subdir): base = os.path.join(sdk, stem) try: - subdirs = [d for d in os.listdir(base) - if os.path.isdir(os.path.join(base, d))] + subdirs = [ + d for d in os.listdir(base) if os.path.isdir(os.path.join(base, d)) + ] except Exception: subdirs = [] if not subdirs: @@ -68,18 +78,21 @@ def get_sdk_dirs(sdk, subdir): return (base,) # At this point, either we have an incomplete or invalid SDK directory, # or we exclusively have version numbers in subdirs. - return tuple(os.path.join(base, s) for s in subdirs - if os.path.isdir(os.path.join(base, s, subdir))) + return tuple( + os.path.join(base, s) + for s in subdirs + if os.path.isdir(os.path.join(base, s, subdir)) + ) def categorize(dirs): return {os.path.basename(d): d for d in dirs} - include_dirs = categorize(get_dirs_containing(sdk, 'include', subdir)) - lib_dirs = categorize(get_dirs_containing(sdk, 'lib', subdir)) + include_dirs = categorize(get_dirs_containing(sdk, "include", subdir)) + lib_dirs = categorize(get_dirs_containing(sdk, "lib", subdir)) - if 'include' in include_dirs: - include_dirs['winv6.3'] = include_dirs['include'] - del include_dirs['include'] + if "include" in include_dirs: + include_dirs["winv6.3"] = include_dirs["include"] + del include_dirs["include"] valid_versions = sorted(set(include_dirs) & set(lib_dirs), reverse=True) if valid_versions: @@ -90,39 +103,47 @@ def get_sdk_dirs(sdk, subdir): ) -@imports(_from='mozbuild.shellutil', _import='quote') +@imports(_from="mozbuild.shellutil", _import="quote") def valid_windows_sdk_dir_result(value): if value: - return '0x%04x in %s' % (value.version, quote(value.path)) + return "0x%04x in %s" % (value.version, quote(value.path)) -@depends(c_compiler, windows_sdk_dir, valid_windows_version, 'WINDOWSSDKDIR') -@checking('for Windows SDK', valid_windows_sdk_dir_result) -@imports(_from='__builtin__', _import='sorted') -@imports(_from='__builtin__', _import='Exception') -@imports(_from='textwrap', _import='dedent') -def valid_windows_sdk_dir(compiler, windows_sdk_dir, target_version, - windows_sdk_dir_env): +@depends(c_compiler, windows_sdk_dir, valid_windows_version, "WINDOWSSDKDIR") +@checking("for Windows SDK", valid_windows_sdk_dir_result) +@imports(_from="__builtin__", _import="sorted") +@imports(_from="__builtin__", _import="Exception") +@imports(_from="textwrap", _import="dedent") +def valid_windows_sdk_dir( + compiler, windows_sdk_dir, target_version, windows_sdk_dir_env +): # Ideally, we'd actually check for host/target ABI being MSVC, but # that's waiting for bug 1617793. - if compiler.type != 'clang-cl': + if compiler.type != "clang-cl": return None if windows_sdk_dir_env: windows_sdk_dir_env = windows_sdk_dir_env[0] sdks = {} for d in windows_sdk_dir: - sdk = get_sdk_dirs(d, 'um') + sdk = get_sdk_dirs(d, "um") if sdk: - check = dedent('''\ + check = dedent( + """\ #include WINVER_MAXVER - ''') - um_dir = os.path.join(sdk.include, 'um') - shared_dir = os.path.join(sdk.include, 'shared') - result = try_preprocess(compiler.wrapper + [compiler.compiler] + - compiler.flags + - ['-X', '-I', um_dir, '-I', shared_dir], 'C', - check, onerror=lambda: '') + """ + ) + um_dir = os.path.join(sdk.include, "um") + shared_dir = os.path.join(sdk.include, "shared") + result = try_preprocess( + compiler.wrapper + + [compiler.compiler] + + compiler.flags + + ["-X", "-I", um_dir, "-I", shared_dir], + "C", + check, + onerror=lambda: "", + ) if result: maxver = result.splitlines()[-1] try: @@ -134,9 +155,10 @@ def valid_windows_sdk_dir(compiler, windows_sdk_dir, target_version, continue if d == windows_sdk_dir_env: raise FatalCheckError( - 'Error while checking the version of the SDK in ' - 'WINDOWSSDKDIR (%s). Please verify it contains a valid and ' - 'complete SDK installation.' % windows_sdk_dir_env) + "Error while checking the version of the SDK in " + "WINDOWSSDKDIR (%s). Please verify it contains a valid and " + "complete SDK installation." % windows_sdk_dir_env + ) valid_sdks = sorted(sdks, key=lambda x: sdks[x][0], reverse=True) if valid_sdks: @@ -144,16 +166,18 @@ def valid_windows_sdk_dir(compiler, windows_sdk_dir, target_version, if not valid_sdks or biggest_version < target_version: if windows_sdk_dir_env: raise FatalCheckError( - 'You are targeting Windows version 0x%04x, but your SDK only ' - 'supports up to version 0x%04x. Install and use an updated SDK, ' - 'or target a lower version using --with-windows-version. ' - 'Alternatively, try running the Windows SDK Configuration Tool ' - 'and selecting a newer SDK. See ' - 'https://developer.mozilla.org/En/Windows_SDK_versions for ' - 'details on fixing this.' % (target_version, biggest_version)) + "You are targeting Windows version 0x%04x, but your SDK only " + "supports up to version 0x%04x. Install and use an updated SDK, " + "or target a lower version using --with-windows-version. " + "Alternatively, try running the Windows SDK Configuration Tool " + "and selecting a newer SDK. See " + "https://developer.mozilla.org/En/Windows_SDK_versions for " + "details on fixing this." % (target_version, biggest_version) + ) raise FatalCheckError( - 'Cannot find a Windows SDK for version >= 0x%04x.' % target_version) + "Cannot find a Windows SDK for version >= 0x%04x." % target_version + ) return namespace( path=sdk.path, @@ -163,27 +187,27 @@ def valid_windows_sdk_dir(compiler, windows_sdk_dir, target_version, ) -@imports(_from='mozbuild.shellutil', _import='quote') +@imports(_from="mozbuild.shellutil", _import="quote") def valid_ucrt_sdk_dir_result(value): if value: - return '%s in %s' % (value.version, quote(value.path)) + return "%s in %s" % (value.version, quote(value.path)) -@depends(windows_sdk_dir, 'WINDOWSSDKDIR', c_compiler) -@checking('for Universal CRT SDK', valid_ucrt_sdk_dir_result) -@imports('os') -@imports(_from='__builtin__', _import='sorted') -@imports(_import='mozpack.path', _as='mozpath') +@depends(windows_sdk_dir, "WINDOWSSDKDIR", c_compiler) +@checking("for Universal CRT SDK", valid_ucrt_sdk_dir_result) +@imports("os") +@imports(_from="__builtin__", _import="sorted") +@imports(_import="mozpack.path", _as="mozpath") def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, compiler): # Ideally, we'd actually check for host/target ABI being MSVC, but # that's waiting for bug 1617793. - if compiler.type != 'clang-cl': + if compiler.type != "clang-cl": return None if windows_sdk_dir_env: windows_sdk_dir_env = windows_sdk_dir_env[0] sdks = {} for d in windows_sdk_dir: - sdk = get_sdk_dirs(d, 'ucrt') + sdk = get_sdk_dirs(d, "ucrt") if sdk: version = os.path.basename(sdk.include) # We're supposed to always find a version in the directory, because @@ -191,7 +215,7 @@ def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, compiler): # contain the Universal CRT SDK. When the main SDK is 8.1, there # is, however, supposed to be a reduced install of the SDK 10 # with the UCRT. - if version != 'include': + if version != "include": sdks[d] = Version(version), sdk continue if d == windows_sdk_dir_env: @@ -199,12 +223,14 @@ def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, compiler): # Universal CRT SDK, chances are this is a start-shell-msvc*.bat # setup, where INCLUDE and LIB already contain the UCRT paths. ucrt_includes = [ - p for p in os.environ.get('INCLUDE', '').split(';') - if os.path.basename(p).lower() == 'ucrt' + p + for p in os.environ.get("INCLUDE", "").split(";") + if os.path.basename(p).lower() == "ucrt" ] ucrt_libs = [ - p for p in os.environ.get('LIB', '').split(';') - if os.path.basename(os.path.dirname(p)).lower() == 'ucrt' + p + for p in os.environ.get("LIB", "").split(";") + if os.path.basename(os.path.dirname(p)).lower() == "ucrt" ] if ucrt_includes and ucrt_libs: # Pick the first of each, since they are the ones that the @@ -213,30 +239,36 @@ def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, compiler): lib = os.path.dirname(os.path.dirname(ucrt_libs[0])) path = os.path.dirname(os.path.dirname(include)) version = os.path.basename(include) - if version != 'include' and mozpath.basedir(lib, [path]): - sdks[d] = Version(version), namespace( - path=path, - include=include, - lib=lib, + if version != "include" and mozpath.basedir(lib, [path]): + sdks[d] = ( + Version(version), + namespace( + path=path, + include=include, + lib=lib, + ), ) continue raise FatalCheckError( - 'The SDK in WINDOWSSDKDIR (%s) does not contain the Universal ' - 'CRT.' % windows_sdk_dir_env) + "The SDK in WINDOWSSDKDIR (%s) does not contain the Universal " + "CRT." % windows_sdk_dir_env + ) valid_sdks = sorted(sdks, key=lambda x: sdks[x][0], reverse=True) if not valid_sdks: - raise FatalCheckError('Cannot find the Universal CRT SDK. ' - 'Please install it.') + raise FatalCheckError( + "Cannot find the Universal CRT SDK. " "Please install it." + ) version, sdk = sdks[valid_sdks[0]] - minimum_ucrt_version = Version('10.0.17134.0') + minimum_ucrt_version = Version("10.0.17134.0") if version < minimum_ucrt_version: - raise FatalCheckError('Latest Universal CRT SDK version found %s' - ' and minimum required is %s. This or a later' - ' version can be installed using the Visual' - ' Studio installer.' - % (version, minimum_ucrt_version)) + raise FatalCheckError( + "Latest Universal CRT SDK version found %s" + " and minimum required is %s. This or a later" + " version can be installed using the Visual" + " Studio installer." % (version, minimum_ucrt_version) + ) return namespace( path=sdk.path, @@ -247,35 +279,36 @@ def valid_ucrt_sdk_dir(windows_sdk_dir, windows_sdk_dir_env, compiler): @depends(c_compiler, host_c_compiler, toolchain_search_path) -@imports('os') +@imports("os") def vc_path(c_compiler, host_c_compiler, toolchain_search_path): - if c_compiler.type != 'clang-cl' and host_c_compiler.type != 'clang-cl': + if c_compiler.type != "clang-cl" and host_c_compiler.type != "clang-cl": return # In clang-cl builds, we need the headers and libraries from an MSVC installation. - vc_program = find_program('cl.exe', paths=toolchain_search_path) + vc_program = find_program("cl.exe", paths=toolchain_search_path) if not vc_program: - die('Cannot find a Visual C++ install for e.g. ATL headers.') + die("Cannot find a Visual C++ install for e.g. ATL headers.") result = os.path.dirname(vc_program) while True: next, p = os.path.split(result) if next == result: - die('Cannot determine the Visual C++ directory the compiler (%s) ' - 'is in' % vc_program) + die( + "Cannot determine the Visual C++ directory the compiler (%s) " + "is in" % vc_program + ) result = next - if p.lower() == 'bin': + if p.lower() == "bin": break return os.path.normpath(result) -option(env='DIA_SDK_PATH', nargs=1, - help='Path to the Debug Interface Access SDK') +option(env="DIA_SDK_PATH", nargs=1, help="Path to the Debug Interface Access SDK") -@depends(vc_path, 'DIA_SDK_PATH') -@checking('for the Debug Interface Access SDK', lambda x: x or 'not found') -@imports('os') +@depends(vc_path, "DIA_SDK_PATH") +@checking("for the Debug Interface Access SDK", lambda x: x or "not found") +@imports("os") def dia_sdk_dir(vc_path, dia_sdk_path): if dia_sdk_path: path = os.path.normpath(dia_sdk_path[0]) @@ -283,51 +316,58 @@ def dia_sdk_dir(vc_path, dia_sdk_path): elif vc_path: # This would be easier if we had the installationPath that # get_vc_paths works with, since 'DIA SDK' is relative to that. - path = os.path.normpath(os.path.join( - vc_path, '..', '..', '..', '..', 'DIA SDK')) + path = os.path.normpath( + os.path.join(vc_path, "..", "..", "..", "..", "DIA SDK") + ) else: return - if os.path.exists(os.path.join(path, 'include', 'dia2.h')): + if os.path.exists(os.path.join(path, "include", "dia2.h")): return path @depends(vc_path, valid_windows_sdk_dir, valid_ucrt_sdk_dir, dia_sdk_dir) -@imports('os') +@imports("os") def include_path(vc_path, windows_sdk_dir, ucrt_sdk_dir, dia_sdk_dir): if not vc_path: return - atlmfc_dir = os.path.join(vc_path, 'atlmfc', 'include') + atlmfc_dir = os.path.join(vc_path, "atlmfc", "include") if not os.path.isdir(atlmfc_dir): - die('Cannot find the ATL/MFC headers in the Visual C++ directory (%s). ' - 'Please install them.' % vc_path) + die( + "Cannot find the ATL/MFC headers in the Visual C++ directory (%s). " + "Please install them." % vc_path + ) - winrt_dir = os.path.join(windows_sdk_dir.include, 'winrt') + winrt_dir = os.path.join(windows_sdk_dir.include, "winrt") if not os.path.isdir(winrt_dir): - die('Cannot find the WinRT headers in the Windows SDK directory (%s). ' - 'Please install them.' % windows_sdk_dir.path) + die( + "Cannot find the WinRT headers in the Windows SDK directory (%s). " + "Please install them." % windows_sdk_dir.path + ) includes = [] - include_env = os.environ.get('INCLUDE') + include_env = os.environ.get("INCLUDE") if include_env: includes.append(include_env) - includes.extend(( - os.path.join(vc_path, 'include'), - atlmfc_dir, - os.path.join(windows_sdk_dir.include, 'shared'), - os.path.join(windows_sdk_dir.include, 'um'), - winrt_dir, - os.path.join(ucrt_sdk_dir.include, 'ucrt'), - )) + includes.extend( + ( + os.path.join(vc_path, "include"), + atlmfc_dir, + os.path.join(windows_sdk_dir.include, "shared"), + os.path.join(windows_sdk_dir.include, "um"), + winrt_dir, + os.path.join(ucrt_sdk_dir.include, "ucrt"), + ) + ) if dia_sdk_dir: - includes.append(os.path.join(dia_sdk_dir, 'include')) + includes.append(os.path.join(dia_sdk_dir, "include")) # Set in the environment for old-configure - includes = ';'.join(includes) - os.environ['INCLUDE'] = includes + includes = ";".join(includes) + os.environ["INCLUDE"] = includes return includes -set_config('INCLUDE', include_path) +set_config("INCLUDE", include_path) @template @@ -339,10 +379,10 @@ def dia_sdk_subdir(host_or_target, subdir): # For some reason the DIA SDK still uses the old-style targets # even in a newer MSVC. old_target = { - 'x86': '', - 'x86_64': 'amd64', - 'arm': 'arm', - 'aarch64': 'arm64' + "x86": "", + "x86_64": "amd64", + "arm": "arm", + "aarch64": "arm64", }.get(target.cpu) if old_target is None: return @@ -356,40 +396,52 @@ def dia_sdk_subdir(host_or_target, subdir): return dia_sdk_subdir -set_config('WIN_DIA_SDK_BIN_DIR', dia_sdk_subdir(host, 'bin')) +set_config("WIN_DIA_SDK_BIN_DIR", dia_sdk_subdir(host, "bin")) @template def lib_path_for(host_or_target): - @depends(host_or_target, dependable(host_or_target is host), vc_path, - valid_windows_sdk_dir, valid_ucrt_sdk_dir, dia_sdk_subdir(host_or_target, 'lib')) - @imports('os') - def lib_path(target, is_host, vc_path, windows_sdk_dir, ucrt_sdk_dir, dia_sdk_lib_dir): + @depends( + host_or_target, + dependable(host_or_target is host), + vc_path, + valid_windows_sdk_dir, + valid_ucrt_sdk_dir, + dia_sdk_subdir(host_or_target, "lib"), + ) + @imports("os") + def lib_path( + target, is_host, vc_path, windows_sdk_dir, ucrt_sdk_dir, dia_sdk_lib_dir + ): if not vc_path: return sdk_target = { - 'x86': 'x86', - 'x86_64': 'x64', - 'arm': 'arm', - 'aarch64': 'arm64', + "x86": "x86", + "x86_64": "x64", + "arm": "arm", + "aarch64": "arm64", }.get(target.cpu) # MSVC2017 switched to use the same target naming as the sdk. - atlmfc_dir = os.path.join(vc_path, 'atlmfc', 'lib', sdk_target) + atlmfc_dir = os.path.join(vc_path, "atlmfc", "lib", sdk_target) if not os.path.isdir(atlmfc_dir): - die('Cannot find the ATL/MFC libraries in the Visual C++ directory ' - '(%s). Please install them.' % vc_path) + die( + "Cannot find the ATL/MFC libraries in the Visual C++ directory " + "(%s). Please install them." % vc_path + ) libs = [] - lib_env = os.environ.get('LIB') + lib_env = os.environ.get("LIB") if lib_env and not is_host: - libs.extend(lib_env.split(';')) - libs.extend(( - os.path.join(vc_path, 'lib', sdk_target), - atlmfc_dir, - os.path.join(windows_sdk_dir.lib, 'um', sdk_target), - os.path.join(ucrt_sdk_dir.lib, 'ucrt', sdk_target), - )) + libs.extend(lib_env.split(";")) + libs.extend( + ( + os.path.join(vc_path, "lib", sdk_target), + atlmfc_dir, + os.path.join(windows_sdk_dir.lib, "um", sdk_target), + os.path.join(ucrt_sdk_dir.lib, "ucrt", sdk_target), + ) + ) if dia_sdk_lib_dir: libs.append(dia_sdk_lib_dir) return libs @@ -398,27 +450,28 @@ def lib_path_for(host_or_target): @depends_if(lib_path_for(target)) -@imports('os') +@imports("os") def lib_path(libs): # Set in the environment for old-configure - libs = ';'.join(libs) - os.environ['LIB'] = libs + libs = ";".join(libs) + os.environ["LIB"] = libs return libs -set_config('LIB', lib_path) +set_config("LIB", lib_path) lib_path_for_host = lib_path_for(host) + @depends_if(lib_path_for_host) -@imports(_from='mozbuild.shellutil', _import='quote') +@imports(_from="mozbuild.shellutil", _import="quote") def host_linker_libpaths(libs): - return ['-LIBPATH:%s' % quote(l) for l in libs] + return ["-LIBPATH:%s" % quote(l) for l in libs] @depends_if(lib_path_for_host) -@imports(_from='mozbuild.shellutil', _import='quote') +@imports(_from="mozbuild.shellutil", _import="quote") def host_linker_libpaths_bat(libs): # .bat files need a different style of quoting. Batch quoting is actually # not defined, and up to applications to handle, so it's not really clear @@ -429,42 +482,54 @@ def host_linker_libpaths_bat(libs): return ['"-LIBPATH:%s"' % l for l in libs] -set_config('HOST_LINKER_LIBPATHS', host_linker_libpaths) -set_config('HOST_LINKER_LIBPATHS_BAT', host_linker_libpaths_bat) +set_config("HOST_LINKER_LIBPATHS", host_linker_libpaths) +set_config("HOST_LINKER_LIBPATHS_BAT", host_linker_libpaths_bat) @depends(valid_windows_sdk_dir, valid_ucrt_sdk_dir, host) -@imports(_from='os', _import='environ') +@imports(_from="os", _import="environ") def sdk_bin_path(valid_windows_sdk_dir, valid_ucrt_sdk_dir, host): if not valid_windows_sdk_dir: return vc_host = { - 'x86': 'x86', - 'x86_64': 'x64', + "x86": "x86", + "x86_64": "x64", }.get(host.cpu) # From version 10.0.15063.0 onwards the bin path contains the version number. - versioned_bin = ('bin' if valid_ucrt_sdk_dir.version < '10.0.15063.0' - else os.path.join('bin', str(valid_ucrt_sdk_dir.version))) + versioned_bin = ( + "bin" + if valid_ucrt_sdk_dir.version < "10.0.15063.0" + else os.path.join("bin", str(valid_ucrt_sdk_dir.version)) + ) result = [ - environ['PATH'], - os.path.join(valid_windows_sdk_dir.path, versioned_bin, vc_host) + environ["PATH"], + os.path.join(valid_windows_sdk_dir.path, versioned_bin, vc_host), ] - if vc_host == 'x64': - result.append( - os.path.join(valid_windows_sdk_dir.path, versioned_bin, 'x86')) + if vc_host == "x64": + result.append(os.path.join(valid_windows_sdk_dir.path, versioned_bin, "x86")) return result -option(env='LINKER', nargs=1, when=target_is_windows, help='Path to the linker') +option(env="LINKER", nargs=1, when=target_is_windows, help="Path to the linker") -link = check_prog('LINKER', ('lld-link',), input='LINKER', - when=target_is_windows, paths=toolchain_search_path) +link = check_prog( + "LINKER", + ("lld-link",), + input="LINKER", + when=target_is_windows, + paths=toolchain_search_path, +) -option(env='HOST_LINKER', nargs=1, when=host_is_windows, help='Path to the host linker') +option(env="HOST_LINKER", nargs=1, when=host_is_windows, help="Path to the host linker") -host_link = check_prog('HOST_LINKER', ('lld-link',), input='HOST_LINKER', - when=host_is_windows, paths=host_toolchain_search_path) +host_link = check_prog( + "HOST_LINKER", + ("lld-link",), + input="HOST_LINKER", + when=host_is_windows, + paths=host_toolchain_search_path, +) -add_old_configure_assignment('LINKER', link) +add_old_configure_assignment("LINKER", link) diff --git a/build/pgo/profileserver.py b/build/pgo/profileserver.py index cbb45e08a293ce..7f3de106ab423f 100755 --- a/build/pgo/profileserver.py +++ b/build/pgo/profileserver.py @@ -22,27 +22,31 @@ PORT = 8888 PATH_MAPPINGS = { - '/webkit/PerformanceTests': 'third_party/webkit/PerformanceTests', + "/webkit/PerformanceTests": "third_party/webkit/PerformanceTests", # It is tempting to map to `testing/talos/talos/tests` instead, to avoid # writing `tests/` in every path, but we can't do that because some files # refer to scripts located in `../..`. - '/talos': 'testing/talos/talos', + "/talos": "testing/talos/talos", } def get_crashreports(directory, name=None): rc = 0 - upload_path = os.environ.get('UPLOAD_PATH') + upload_path = os.environ.get("UPLOAD_PATH") if upload_path: # For automation, log the minidumps with stackwalk and get them moved to # the artifacts directory. - fetches_dir = os.environ.get('MOZ_FETCHES_DIR') + fetches_dir = os.environ.get("MOZ_FETCHES_DIR") if not fetches_dir: - raise Exception("Unable to process minidump in automation because " - "$MOZ_FETCHES_DIR is not set in the environment") - stackwalk_binary = os.path.join(fetches_dir, 'minidump_stackwalk', 'minidump_stackwalk') - if sys.platform == 'win32': - stackwalk_binary += '.exe' + raise Exception( + "Unable to process minidump in automation because " + "$MOZ_FETCHES_DIR is not set in the environment" + ) + stackwalk_binary = os.path.join( + fetches_dir, "minidump_stackwalk", "minidump_stackwalk" + ) + if sys.platform == "win32": + stackwalk_binary += ".exe" minidump_path = os.path.join(directory, "minidumps") rc = mozcrash.check_for_crashes( minidump_path, @@ -54,48 +58,49 @@ def get_crashreports(directory, name=None): return rc -if __name__ == '__main__': +if __name__ == "__main__": cli = CLI() debug_args, interactive = cli.debugger_arguments() runner_args = cli.runner_args() build = MozbuildObject.from_environment() - binary = runner_args.get('binary') + binary = runner_args.get("binary") if not binary: try: binary = build.get_binary_path(where="staged-package") except BinaryNotFoundException as e: - print('{}\n\n{}\n'.format(e, e.help())) + print("{}\n\n{}\n".format(e, e.help())) sys.exit(1) binary = os.path.normpath(os.path.abspath(binary)) path_mappings = { - k: os.path.join(build.topsrcdir, v) - for k, v in PATH_MAPPINGS.items() + k: os.path.join(build.topsrcdir, v) for k, v in PATH_MAPPINGS.items() } - httpd = MozHttpd(port=PORT, - docroot=os.path.join(build.topsrcdir, "build", "pgo"), - path_mappings=path_mappings) + httpd = MozHttpd( + port=PORT, + docroot=os.path.join(build.topsrcdir, "build", "pgo"), + path_mappings=path_mappings, + ) httpd.start(block=False) locations = ServerLocations() - locations.add_host(host='127.0.0.1', - port=PORT, - options='primary,privileged') + locations.add_host(host="127.0.0.1", port=PORT, options="primary,privileged") - old_profraw_files = glob.glob('*.profraw') + old_profraw_files = glob.glob("*.profraw") for f in old_profraw_files: os.remove(f) with TemporaryDirectory() as profilePath: # TODO: refactor this into mozprofile - profile_data_dir = os.path.join(build.topsrcdir, 'testing', 'profiles') - with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh: - base_profiles = json.load(fh)['profileserver'] + profile_data_dir = os.path.join(build.topsrcdir, "testing", "profiles") + with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh: + base_profiles = json.load(fh)["profileserver"] - prefpaths = [os.path.join(profile_data_dir, profile, 'user.js') - for profile in base_profiles] + prefpaths = [ + os.path.join(profile_data_dir, profile, "user.js") + for profile in base_profiles + ] prefs = {} for path in prefpaths: @@ -113,12 +118,16 @@ def get_crashreports(directory, name=None): # and this is a bool pref. prefs["browser.tabs.remote.autostart"] = True - profile = FirefoxProfile(profile=profilePath, - preferences=prefs, - addons=[os.path.join( - build.topsrcdir, 'tools', 'quitter', - 'quitter@mozilla.org.xpi')], - locations=locations) + profile = FirefoxProfile( + profile=profilePath, + preferences=prefs, + addons=[ + os.path.join( + build.topsrcdir, "tools", "quitter", "quitter@mozilla.org.xpi" + ) + ], + locations=locations, + ) env = os.environ.copy() env["MOZ_CRASHREPORTER_NO_REPORT"] = "1" @@ -137,28 +146,31 @@ def get_crashreports(directory, name=None): env["LLVM_PROFILE_FILE"] = "default_%p_random_%m.profraw" # Write to an output file if we're running in automation - process_args = {'universal_newlines': True} - if 'UPLOAD_PATH' in env: - process_args['logfile'] = os.path.join(env['UPLOAD_PATH'], 'profile-run-1.log') + process_args = {"universal_newlines": True} + if "UPLOAD_PATH" in env: + process_args["logfile"] = os.path.join( + env["UPLOAD_PATH"], "profile-run-1.log" + ) # Run Firefox a first time to initialize its profile - runner = FirefoxRunner(profile=profile, - binary=binary, - cmdargs=['data:text/html,'], - env=env, - process_args=process_args) + runner = FirefoxRunner( + profile=profile, + binary=binary, + cmdargs=["data:text/html,"], + env=env, + process_args=process_args, + ) runner.start() ret = runner.wait() if ret: - print("Firefox exited with code %d during profile initialization" - % ret) - logfile = process_args.get('logfile') + print("Firefox exited with code %d during profile initialization" % ret) + logfile = process_args.get("logfile") if logfile: print("Firefox output (%s):" % logfile) with open(logfile) as f: print(f.read()) httpd.stop() - get_crashreports(profilePath, name='Profile initialization') + get_crashreports(profilePath, name="Profile initialization") sys.exit(ret) jarlog = os.getenv("JARLOG_FILE") @@ -168,48 +180,55 @@ def get_crashreports(directory, name=None): if os.path.exists(jarlog): os.remove(jarlog) - if 'UPLOAD_PATH' in env: - process_args['logfile'] = os.path.join(env['UPLOAD_PATH'], 'profile-run-2.log') + if "UPLOAD_PATH" in env: + process_args["logfile"] = os.path.join( + env["UPLOAD_PATH"], "profile-run-2.log" + ) cmdargs = ["http://localhost:%d/index.html" % PORT] - runner = FirefoxRunner(profile=profile, - binary=binary, - cmdargs=cmdargs, - env=env, - process_args=process_args) + runner = FirefoxRunner( + profile=profile, + binary=binary, + cmdargs=cmdargs, + env=env, + process_args=process_args, + ) runner.start(debug_args=debug_args, interactive=interactive) ret = runner.wait() httpd.stop() if ret: print("Firefox exited with code %d during profiling" % ret) - logfile = process_args.get('logfile') + logfile = process_args.get("logfile") if logfile: print("Firefox output (%s):" % logfile) with open(logfile) as f: print(f.read()) - get_crashreports(profilePath, name='Profiling run') + get_crashreports(profilePath, name="Profiling run") sys.exit(ret) # Try to move the crash reports to the artifacts even if Firefox appears # to exit successfully, in case there's a crash that doesn't set the # return code to non-zero for some reason. - if get_crashreports(profilePath, name='Firefox exited successfully?') != 0: + if get_crashreports(profilePath, name="Firefox exited successfully?") != 0: print("Firefox exited successfully, but produced a crashreport") sys.exit(1) - llvm_profdata = env.get('LLVM_PROFDATA') + llvm_profdata = env.get("LLVM_PROFDATA") if llvm_profdata: - profraw_files = glob.glob('*.profraw') + profraw_files = glob.glob("*.profraw") if not profraw_files: - print('Could not find profraw files in the current directory: %s' % os.getcwd()) + print( + "Could not find profraw files in the current directory: %s" + % os.getcwd() + ) sys.exit(1) merge_cmd = [ llvm_profdata, - 'merge', - '-o', - 'merged.profdata', + "merge", + "-o", + "merged.profdata", ] + profraw_files rc = subprocess.call(merge_cmd) if rc != 0: - print('INFRA-ERROR: Failed to merge profile data. Corrupt profile?') + print("INFRA-ERROR: Failed to merge profile data. Corrupt profile?") # exit with TBPL_RETRY sys.exit(4) diff --git a/build/submit_telemetry_data.py b/build/submit_telemetry_data.py index 700f7550f16415..0748fbfeadd247 100644 --- a/build/submit_telemetry_data.py +++ b/build/submit_telemetry_data.py @@ -19,15 +19,15 @@ verify_statedir, ) -BUILD_TELEMETRY_URL = 'https://incoming.telemetry.mozilla.org/{endpoint}' -SUBMIT_ENDPOINT = 'submit/eng-workflow/build/1/{ping_uuid}' -STATUS_ENDPOINT = 'status' +BUILD_TELEMETRY_URL = "https://incoming.telemetry.mozilla.org/{endpoint}" +SUBMIT_ENDPOINT = "submit/eng-workflow/build/1/{ping_uuid}" +STATUS_ENDPOINT = "status" def delete_expired_files(directory, days=30): - '''Discards files in a directory older than a specified number + """Discards files in a directory older than a specified number of days - ''' + """ now = datetime.datetime.now() for filename in os.listdir(directory): filepath = os.path.join(directory, filename) @@ -42,9 +42,9 @@ def delete_expired_files(directory, days=30): def check_edge_server_status(session): - '''Returns True if the Telemetry Edge Server + """Returns True if the Telemetry Edge Server is ready to accept data - ''' + """ status_url = BUILD_TELEMETRY_URL.format(endpoint=STATUS_ENDPOINT) response = session.get(status_url) if response.status_code != 200: @@ -53,9 +53,9 @@ def check_edge_server_status(session): def send_telemetry_ping(session, data, ping_uuid): - '''Sends a single build telemetry ping to the + """Sends a single build telemetry ping to the edge server, returning the response object - ''' + """ resource_url = SUBMIT_ENDPOINT.format(ping_uuid=str(ping_uuid)) url = BUILD_TELEMETRY_URL.format(endpoint=resource_url) response = session.post(url, json=data) @@ -64,9 +64,9 @@ def send_telemetry_ping(session, data, ping_uuid): def submit_telemetry_data(outgoing, submitted): - '''Sends information about `./mach build` invocations to + """Sends information about `./mach build` invocations to the Telemetry pipeline - ''' + """ with requests.Session() as session: # Confirm the server is OK if not check_edge_server_status(session): @@ -76,14 +76,14 @@ def submit_telemetry_data(outgoing, submitted): for filename in os.listdir(outgoing): path = os.path.join(outgoing, filename) - if os.path.isdir(path) or not path.endswith('.json'): - logging.info('skipping item {}'.format(path)) + if os.path.isdir(path) or not path.endswith(".json"): + logging.info("skipping item {}".format(path)) continue ping_uuid = os.path.splitext(filename)[0] # strip ".json" to get ping UUID try: - with open(path, 'r') as f: + with open(path, "r") as f: data = json.load(f) # Verify the data matches the schema @@ -93,7 +93,7 @@ def submit_telemetry_data(outgoing, submitted): response = send_telemetry_ping(session, data, ping_uuid) if response.status_code != 200: - msg = 'response code {code} sending {uuid} to telemetry: {body}'.format( + msg = "response code {code} sending {uuid} to telemetry: {body}".format( body=response.content, code=response.status_code, uuid=ping_uuid, @@ -102,27 +102,25 @@ def submit_telemetry_data(outgoing, submitted): continue # Move from "outgoing" to "submitted" - os.rename(os.path.join(outgoing, filename), - os.path.join(submitted, filename)) + os.rename( + os.path.join(outgoing, filename), os.path.join(submitted, filename) + ) - logging.info('successfully posted {} to telemetry'.format(ping_uuid)) + logging.info("successfully posted {} to telemetry".format(ping_uuid)) except ValueError as ve: # ValueError is thrown if JSON cannot be decoded - logging.exception('exception parsing JSON at %s: %s' - % (path, str(ve))) + logging.exception("exception parsing JSON at %s: %s" % (path, str(ve))) os.remove(path) except voluptuous.Error as e: # Invalid is thrown if some data does not fit # the correct Schema - logging.exception('invalid data found at %s: %s' - % (path, e.message)) + logging.exception("invalid data found at %s: %s" % (path, e.message)) os.remove(path) except Exception as e: - logging.error('exception posting to telemetry ' - 'server: %s' % str(e)) + logging.error("exception posting to telemetry " "server: %s" % str(e)) break delete_expired_files(submitted) @@ -130,9 +128,9 @@ def submit_telemetry_data(outgoing, submitted): return 0 -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) != 2: - print('usage: python submit_telemetry_data.py ') + print("usage: python submit_telemetry_data.py ") sys.exit(1) statedir = sys.argv[1] @@ -141,9 +139,11 @@ def submit_telemetry_data(outgoing, submitted): outgoing, submitted, telemetry_log = verify_statedir(statedir) # Configure logging - logging.basicConfig(filename=telemetry_log, - format='%(asctime)s %(message)s', - level=logging.DEBUG) + logging.basicConfig( + filename=telemetry_log, + format="%(asctime)s %(message)s", + level=logging.DEBUG, + ) sys.exit(submit_telemetry_data(outgoing, submitted)) diff --git a/build/unix/elfhack/inject/copy_source.py b/build/unix/elfhack/inject/copy_source.py index 757253e3975eba..02b4f6237e2772 100644 --- a/build/unix/elfhack/inject/copy_source.py +++ b/build/unix/elfhack/inject/copy_source.py @@ -6,5 +6,5 @@ def copy(out_file, in_path): - with open(in_path, 'r') as fh: + with open(in_path, "r") as fh: out_file.write(fh.read()) diff --git a/build/unix/elfhack/inject/moz.build b/build/unix/elfhack/inject/moz.build index 516104c06aaa61..5b717a49cfeb9d 100644 --- a/build/unix/elfhack/inject/moz.build +++ b/build/unix/elfhack/inject/moz.build @@ -6,35 +6,35 @@ # dummy library name to avoid skipping building the source here, which # we only need the object for. -Library('elfhack_inject') +Library("elfhack_inject") DIST_INSTALL = False -cpu = CONFIG['CPU_ARCH'] +cpu = CONFIG["CPU_ARCH"] -gen_src = '%s.c' % cpu -GeneratedFile(gen_src, script='copy_source.py', entry_point='copy', - inputs = ['../inject.c']) +gen_src = "%s.c" % cpu +GeneratedFile( + gen_src, script="copy_source.py", entry_point="copy", inputs=["../inject.c"] +) SOURCES += [ - '!%s' % gen_src, + "!%s" % gen_src, ] NO_PGO = True -for v in ('OS_CPPFLAGS', 'OS_CFLAGS', 'DEBUG', 'CLANG_PLUGIN', 'OPTIMIZE', - 'FRAMEPTR'): +for v in ("OS_CPPFLAGS", "OS_CFLAGS", "DEBUG", "CLANG_PLUGIN", "OPTIMIZE", "FRAMEPTR"): flags = [] idx = 0 for flag in COMPILE_FLAGS[v]: - if flag == '-isystem': - flags.append(''.join(COMPILE_FLAGS[v][idx:idx + 2])) - elif flag.startswith(('-m', '-I', '-isystem')): + if flag == "-isystem": + flags.append("".join(COMPILE_FLAGS[v][idx : idx + 2])) + elif flag.startswith(("-m", "-I", "-isystem")): flags.append(flag) idx += 1 COMPILE_FLAGS[v] = flags -COMPILE_FLAGS['OS_CFLAGS'] += ['-O2', '-fno-stack-protector', '-fno-lto'] +COMPILE_FLAGS["OS_CFLAGS"] += ["-O2", "-fno-stack-protector", "-fno-lto"] AllowCompilerWarnings() NoVisibilityFlags() diff --git a/build/unix/elfhack/moz.build b/build/unix/elfhack/moz.build index 4c12e7e62ee40f..bb68b3aa4e009f 100644 --- a/build/unix/elfhack/moz.build +++ b/build/unix/elfhack/moz.build @@ -5,29 +5,28 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIST_INSTALL = False -DIRS += ['inject'] +DIRS += ["inject"] -if not CONFIG['CROSS_COMPILE']: +if not CONFIG["CROSS_COMPILE"]: SOURCES += [ - 'dummy.c', - 'test-array.c', - 'test-ctors.c', + "dummy.c", + "test-array.c", + "test-ctors.c", ] - SOURCES['dummy.c'].flags += ['-fno-lto'] - SOURCES['test-array.c'].flags += ['-fno-lto'] - SOURCES['test-ctors.c'].flags += ['-fno-lto'] + SOURCES["dummy.c"].flags += ["-fno-lto"] + SOURCES["test-array.c"].flags += ["-fno-lto"] + SOURCES["test-ctors.c"].flags += ["-fno-lto"] HOST_SOURCES += [ - 'elf.cpp', - 'elfhack.cpp', + "elf.cpp", + "elfhack.cpp", ] -HostProgram('elfhack') +HostProgram("elfhack") NO_PGO = True -COMPILE_FLAGS['OS_CXXFLAGS'] = [ - f for f in COMPILE_FLAGS['OS_CXXFLAGS'] if f != '-fno-exceptions' -] + ['-fexceptions'] - +COMPILE_FLAGS["OS_CXXFLAGS"] = [ + f for f in COMPILE_FLAGS["OS_CXXFLAGS"] if f != "-fno-exceptions" +] + ["-fexceptions"] diff --git a/build/unix/moz.build b/build/unix/moz.build index 95e5b92ac6c3f9..3e2a9047a269aa 100644 --- a/build/unix/moz.build +++ b/build/unix/moz.build @@ -4,12 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_LIBSTDCXX_TARGET_VERSION'] or CONFIG['MOZ_LIBSTDCXX_HOST_VERSION']: - DIRS += ['stdc++compat'] +if CONFIG["MOZ_LIBSTDCXX_TARGET_VERSION"] or CONFIG["MOZ_LIBSTDCXX_HOST_VERSION"]: + DIRS += ["stdc++compat"] -if CONFIG['USE_ELF_HACK']: - DIRS += ['elfhack'] +if CONFIG["USE_ELF_HACK"]: + DIRS += ["elfhack"] FINAL_TARGET_FILES += [ - 'run-mozilla.sh', + "run-mozilla.sh", ] diff --git a/build/unix/rewrite_asan_dylib.py b/build/unix/rewrite_asan_dylib.py index ada15fa690b2db..0bcf31b559ad48 100644 --- a/build/unix/rewrite_asan_dylib.py +++ b/build/unix/rewrite_asan_dylib.py @@ -11,19 +11,20 @@ import shutil from buildconfig import substs -''' +""" Scans the given directories for binaries referencing the AddressSanitizer runtime library, copies it to the main directory and rewrites binaries to not reference it with absolute paths but with @executable_path instead. -''' +""" # This is the dylib we're looking for -DYLIB_NAME = 'libclang_rt.asan_osx_dynamic.dylib' +DYLIB_NAME = "libclang_rt.asan_osx_dynamic.dylib" def resolve_rpath(filename): - otoolOut = subprocess.check_output([substs['OTOOL'], '-l', filename], - universal_newlines=True) + otoolOut = subprocess.check_output( + [substs["OTOOL"], "-l", filename], universal_newlines=True + ) currentCmd = None # The lines we need to find look like this: @@ -36,19 +37,19 @@ def resolve_rpath(filename): # ... # Other load command types have a varying number of fields. for line in otoolOut.splitlines(): - cmdMatch = re.match(r'^\s+cmd ([A-Z_]+)', line) + cmdMatch = re.match(r"^\s+cmd ([A-Z_]+)", line) if cmdMatch is not None: currentCmd = cmdMatch.group(1) continue - if currentCmd == 'LC_RPATH': - pathMatch = re.match(r'^\s+path (.*) \(offset \d+\)', line) + if currentCmd == "LC_RPATH": + pathMatch = re.match(r"^\s+path (.*) \(offset \d+\)", line) if pathMatch is not None: path = pathMatch.group(1) if os.path.isdir(path): return path - sys.stderr.write('@rpath could not be resolved from %s\n' % filename) + sys.stderr.write("@rpath could not be resolved from %s\n" % filename) sys.exit(1) @@ -60,12 +61,13 @@ def scan_directory(path): filename = os.path.join(root, filename) # Skip all files that aren't either dylibs or executable - if not (filename.endswith('.dylib') or os.access(filename, os.X_OK)): + if not (filename.endswith(".dylib") or os.access(filename, os.X_OK)): continue try: otoolOut = subprocess.check_output( - [substs['OTOOL'], '-L', filename], universal_newlines=True) + [substs["OTOOL"], "-L", filename], universal_newlines=True + ) except Exception: # Errors are expected on non-mach executables, ignore them and continue continue @@ -75,14 +77,13 @@ def scan_directory(path): absDylibPath = line.split()[0] # Don't try to rewrite binaries twice - if absDylibPath.startswith('@executable_path/'): + if absDylibPath.startswith("@executable_path/"): continue if not dylibCopied: - if absDylibPath.startswith('@rpath/'): + if absDylibPath.startswith("@rpath/"): rpath = resolve_rpath(filename) - copyDylibPath = absDylibPath.replace( - '@rpath', rpath) + copyDylibPath = absDylibPath.replace("@rpath", rpath) else: copyDylibPath = absDylibPath @@ -93,28 +94,38 @@ def scan_directory(path): # Now rewrite the library itself subprocess.check_call( - [substs['INSTALL_NAME_TOOL'], '-id', - '@executable_path/' + DYLIB_NAME, - os.path.join(path, DYLIB_NAME)]) + [ + substs["INSTALL_NAME_TOOL"], + "-id", + "@executable_path/" + DYLIB_NAME, + os.path.join(path, DYLIB_NAME), + ] + ) dylibCopied = True else: - sys.stderr.write('dylib path in %s was not found at: %s\n' % ( - filename, copyDylibPath)) + sys.stderr.write( + "dylib path in %s was not found at: %s\n" + % (filename, copyDylibPath) + ) # Now use install_name_tool to rewrite the path in our binary - relpath = '' if path == root else os.path.relpath( - path, root) + '/' - subprocess.check_call([substs['INSTALL_NAME_TOOL'], '-change', - absDylibPath, - '@executable_path/' + relpath + DYLIB_NAME, - filename]) + relpath = "" if path == root else os.path.relpath(path, root) + "/" + subprocess.check_call( + [ + substs["INSTALL_NAME_TOOL"], + "-change", + absDylibPath, + "@executable_path/" + relpath + DYLIB_NAME, + filename, + ] + ) break if not dylibCopied: - sys.stderr.write('%s could not be found\n' % DYLIB_NAME) + sys.stderr.write("%s could not be found\n" % DYLIB_NAME) sys.exit(1) -if __name__ == '__main__': +if __name__ == "__main__": for d in sys.argv[1:]: scan_directory(d) diff --git a/build/unix/stdc++compat/moz.build b/build/unix/stdc++compat/moz.build index 4444d0c4f29724..aac8f2b0f591b1 100644 --- a/build/unix/stdc++compat/moz.build +++ b/build/unix/stdc++compat/moz.build @@ -4,14 +4,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_LIBSTDCXX_TARGET_VERSION']: - Library('stdc++compat') - SOURCES += ['stdc++compat.cpp'] +if CONFIG["MOZ_LIBSTDCXX_TARGET_VERSION"]: + Library("stdc++compat") + SOURCES += ["stdc++compat.cpp"] -if CONFIG['MOZ_LIBSTDCXX_HOST_VERSION']: - HostLibrary('host_stdc++compat') +if CONFIG["MOZ_LIBSTDCXX_HOST_VERSION"]: + HostLibrary("host_stdc++compat") HOST_SOURCES += [ - 'stdc++compat.cpp', + "stdc++compat.cpp", ] FORCE_STATIC_LIB = True @@ -19,9 +19,9 @@ FORCE_STATIC_LIB = True NO_PGO = True DisableStlWrapping() -COMPILE_FLAGS['CLANG_PLUGIN'] = [] +COMPILE_FLAGS["CLANG_PLUGIN"] = [] -DEFINES['MOZ_LIBSTDCXX_VERSION'] = CONFIG['MOZ_LIBSTDCXX_TARGET_VERSION'] -HOST_DEFINES['MOZ_LIBSTDCXX_VERSION'] = CONFIG['MOZ_LIBSTDCXX_HOST_VERSION'] +DEFINES["MOZ_LIBSTDCXX_VERSION"] = CONFIG["MOZ_LIBSTDCXX_TARGET_VERSION"] +HOST_DEFINES["MOZ_LIBSTDCXX_VERSION"] = CONFIG["MOZ_LIBSTDCXX_HOST_VERSION"] -OS_LIBS += ['-Wl,--version-script,%s/hide_std.ld' % SRCDIR] +OS_LIBS += ["-Wl,--version-script,%s/hide_std.ld" % SRCDIR] diff --git a/build/upload.py b/build/upload.py index 89de5e6a7aec5c..4742b284583a4e 100644 --- a/build/upload.py +++ b/build/upload.py @@ -34,14 +34,14 @@ def FixupMsysPath(path): and commandline arguments into Windows native paths. This sucks if you're trying to pass an absolute path on a remote server. This function attempts to un-mangle such paths.""" - if 'OSTYPE' in os.environ and os.environ['OSTYPE'] == 'msys': + if "OSTYPE" in os.environ and os.environ["OSTYPE"] == "msys": # sort of awful, find out where our shell is (should be in msys/bin) # and strip the first part of that path out of the other path - if 'SHELL' in os.environ: - sh = os.environ['SHELL'] - msys = sh[:sh.find('/bin')] + if "SHELL" in os.environ: + sh = os.environ["SHELL"] + msys = sh[: sh.find("/bin")] if path.startswith(msys): - path = path[len(msys):] + path = path[len(msys) :] return path @@ -55,7 +55,7 @@ def GetBaseRelativePath(path, local_file, base_path): dir = os.path.dirname(local_file) # strip base_path + extra slash and make it unixy - dir = dir[len(base_path) + 1:].replace('\\', '/') + dir = dir[len(base_path) + 1 :].replace("\\", "/") return path + dir @@ -79,26 +79,28 @@ def CopyFilesLocally(path, files, verbose=False, base_path=None): shutil.copy(file, target_path) -if __name__ == '__main__': - path = OptionalEnvironmentVariable('UPLOAD_PATH') +if __name__ == "__main__": + path = OptionalEnvironmentVariable("UPLOAD_PATH") - if sys.platform == 'win32': + if sys.platform == "win32": if path is not None: path = FixupMsysPath(path) parser = OptionParser(usage="usage: %prog [options] ") - parser.add_option("-b", "--base-path", - action="store", - help="Preserve file paths relative to this path when uploading. " - "If unset, all files will be uploaded directly to UPLOAD_PATH.") + parser.add_option( + "-b", + "--base-path", + action="store", + help="Preserve file paths relative to this path when uploading. " + "If unset, all files will be uploaded directly to UPLOAD_PATH.", + ) (options, args) = parser.parse_args() if len(args) < 1: print("You must specify at least one file to upload") sys.exit(1) try: - CopyFilesLocally(path, args, base_path=options.base_path, - verbose=True) + CopyFilesLocally(path, args, base_path=options.base_path, verbose=True) except IOError as strerror: print(strerror) sys.exit(1) diff --git a/build/upload_generated_sources.py b/build/upload_generated_sources.py index 0b5768cdf8d9c5..4a528b106eb5f0 100644 --- a/build/upload_generated_sources.py +++ b/build/upload_generated_sources.py @@ -26,29 +26,30 @@ # Arbitrary, should probably measure this. NUM_WORKER_THREADS = 10 -log = logging.getLogger('upload-generated-sources') +log = logging.getLogger("upload-generated-sources") log.setLevel(logging.INFO) @contextmanager def timed(): - ''' + """ Yield a function that provides the elapsed time in seconds since this function was called. - ''' + """ start = time.time() def elapsed(): return time.time() - start + yield elapsed def gzip_compress(data): - ''' + """ Apply gzip compression to `data` and return the result as a `BytesIO`. - ''' + """ b = io.BytesIO() - with gzip.GzipFile(fileobj=b, mode='w') as f: + with gzip.GzipFile(fileobj=b, mode="w") as f: f.write(data) b.flush() b.seek(0) @@ -56,16 +57,17 @@ def gzip_compress(data): def upload_worker(queue, event, bucket, session_args): - ''' + """ Get `(name, contents)` entries from `queue` and upload `contents` to S3 with gzip compression using `name` as the key, prefixed with the SHA-512 digest of `contents` as a hex string. If an exception occurs, set `event`. - ''' + """ try: import boto3 + session = boto3.session.Session(**session_args) - s3 = session.client('s3') + s3 = session.client("s3") while True: if event.is_set(): # Some other thread hit an exception. @@ -74,63 +76,68 @@ def upload_worker(queue, event, bucket, session_args): pathname = get_filename_with_digest(name, contents) compressed = gzip_compress(contents) extra_args = { - 'ContentEncoding': 'gzip', - 'ContentType': 'text/plain', + "ContentEncoding": "gzip", + "ContentType": "text/plain", } - log.info('Uploading "{}" ({} bytes)'.format( - pathname, len(compressed.getvalue()))) + log.info( + 'Uploading "{}" ({} bytes)'.format(pathname, len(compressed.getvalue())) + ) with timed() as elapsed: - s3.upload_fileobj(compressed, bucket, - pathname, ExtraArgs=extra_args) - log.info('Finished uploading "{}" in {:0.3f}s'.format( - pathname, elapsed())) + s3.upload_fileobj(compressed, bucket, pathname, ExtraArgs=extra_args) + log.info( + 'Finished uploading "{}" in {:0.3f}s'.format(pathname, elapsed()) + ) queue.task_done() except Exception: - log.exception('Thread encountered exception:') + log.exception("Thread encountered exception:") event.set() def do_work(artifact, region, bucket): - session_args = {'region_name': region} + session_args = {"region_name": region} session = requests.Session() - retry = Retry(total=5, backoff_factor=0.1, - status_forcelist=[500, 502, 503, 504]) + retry = Retry(total=5, backoff_factor=0.1, status_forcelist=[500, 502, 503, 504]) http_adapter = requests.adapters.HTTPAdapter(max_retries=retry) - session.mount('https://', http_adapter) - session.mount('http://', http_adapter) + session.mount("https://", http_adapter) + session.mount("http://", http_adapter) - if 'TASK_ID' in os.environ: - level = os.environ.get('MOZ_SCM_LEVEL', '1') - secrets_url = 'http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload'.format( # noqa - level) + if "TASK_ID" in os.environ: + level = os.environ.get("MOZ_SCM_LEVEL", "1") + secrets_url = "http://taskcluster/secrets/v1/secret/project/releng/gecko/build/level-{}/gecko-generated-sources-upload".format( # noqa + level + ) log.info( - 'Using AWS credentials from the secrets service: "{}"'.format(secrets_url)) + 'Using AWS credentials from the secrets service: "{}"'.format(secrets_url) + ) res = session.get(secrets_url) res.raise_for_status() secret = res.json() session_args.update( - aws_access_key_id=secret['secret']['AWS_ACCESS_KEY_ID'], - aws_secret_access_key=secret['secret']['AWS_SECRET_ACCESS_KEY'], + aws_access_key_id=secret["secret"]["AWS_ACCESS_KEY_ID"], + aws_secret_access_key=secret["secret"]["AWS_SECRET_ACCESS_KEY"], ) else: - log.info('Trying to use your AWS credentials..') + log.info("Trying to use your AWS credentials..") # First, fetch the artifact containing the sources. log.info('Fetching generated sources artifact: "{}"'.format(artifact)) with timed() as elapsed: res = session.get(artifact) - log.info('Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s'.format( - res.status_code, len(res.content), elapsed())) + log.info( + "Fetch HTTP status: {}, {} bytes downloaded in {:0.3f}s".format( + res.status_code, len(res.content), elapsed() + ) + ) res.raise_for_status() # Create a queue and worker threads for uploading. q = Queue() event = Event() - log.info('Creating {} worker threads'.format(NUM_WORKER_THREADS)) + log.info("Creating {} worker threads".format(NUM_WORKER_THREADS)) for i in range(NUM_WORKER_THREADS): t = Thread(target=upload_worker, args=(q, event, bucket, session_args)) t.daemon = True t.start() - with tarfile.open(fileobj=io.BytesIO(res.content), mode='r|gz') as tar: + with tarfile.open(fileobj=io.BytesIO(res.content), mode="r|gz") as tar: # Next, process each file. for entry in tar: if event.is_set(): @@ -141,28 +148,28 @@ def do_work(artifact, region, bucket): # We don't use q.join() here because we want to also monitor event. while q.unfinished_tasks: if event.wait(0.1): - log.error('Worker thread encountered exception, exiting...') + log.error("Worker thread encountered exception, exiting...") break def main(argv): - logging.basicConfig(format='%(levelname)s - %(threadName)s - %(message)s') + logging.basicConfig(format="%(levelname)s - %(threadName)s - %(message)s") parser = argparse.ArgumentParser( - description='Upload generated source files in ARTIFACT to BUCKET in S3.') - parser.add_argument('artifact', - help='generated-sources artifact from build task') + description="Upload generated source files in ARTIFACT to BUCKET in S3." + ) + parser.add_argument("artifact", help="generated-sources artifact from build task") args = parser.parse_args(argv) region, bucket = get_s3_region_and_bucket() config = MozbuildObject.from_environment() config.activate_virtualenv() - config.virtualenv_manager.install_pip_package('boto3==1.4.4') + config.virtualenv_manager.install_pip_package("boto3==1.4.4") with timed() as elapsed: do_work(region=region, bucket=bucket, artifact=args.artifact) - log.info('Finished in {:.03f}s'.format(elapsed())) + log.info("Finished in {:.03f}s".format(elapsed())) return 0 -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.argv[1:])) diff --git a/build/valgrind/mach_commands.py b/build/valgrind/mach_commands.py index 26801bc41c53c2..408c0be28c14fa 100644 --- a/build/valgrind/mach_commands.py +++ b/build/valgrind/mach_commands.py @@ -22,24 +22,32 @@ def is_valgrind_build(cls): - '''Must be a build with --enable-valgrind and --disable-jemalloc.''' + """Must be a build with --enable-valgrind and --disable-jemalloc.""" defines = cls.config_environment.defines - return 'MOZ_VALGRIND' in defines and 'MOZ_MEMORY' not in defines + return "MOZ_VALGRIND" in defines and "MOZ_MEMORY" not in defines @CommandProvider class MachCommands(MachCommandBase): - ''' + """ Run Valgrind tests. - ''' - @Command('valgrind-test', category='testing', - conditions=[conditions.is_firefox_or_thunderbird, is_valgrind_build], - description='Run the Valgrind test job (memory-related errors).') - @CommandArgument('--suppressions', default=[], action='append', - metavar='FILENAME', - help='Specify a suppression file for Valgrind to use. Use ' - '--suppression multiple times to specify multiple suppression ' - 'files.') + """ + + @Command( + "valgrind-test", + category="testing", + conditions=[conditions.is_firefox_or_thunderbird, is_valgrind_build], + description="Run the Valgrind test job (memory-related errors).", + ) + @CommandArgument( + "--suppressions", + default=[], + action="append", + metavar="FILENAME", + help="Specify a suppression file for Valgrind to use. Use " + "--suppression multiple times to specify multiple suppression " + "files.", + ) def valgrind_test(self, suppressions): from mozfile import TemporaryDirectory @@ -51,28 +59,29 @@ def valgrind_test(self, suppressions): from six import string_types from valgrind.output_handler import OutputHandler - build_dir = os.path.join(self.topsrcdir, 'build') + build_dir = os.path.join(self.topsrcdir, "build") # XXX: currently we just use the PGO inputs for Valgrind runs. This may # change in the future. - httpd = MozHttpd(docroot=os.path.join(build_dir, 'pgo')) + httpd = MozHttpd(docroot=os.path.join(build_dir, "pgo")) httpd.start(block=False) with TemporaryDirectory() as profilePath: # TODO: refactor this into mozprofile - profile_data_dir = os.path.join( - self.topsrcdir, 'testing', 'profiles') - with open(os.path.join(profile_data_dir, 'profiles.json'), 'r') as fh: - base_profiles = json.load(fh)['valgrind'] + profile_data_dir = os.path.join(self.topsrcdir, "testing", "profiles") + with open(os.path.join(profile_data_dir, "profiles.json"), "r") as fh: + base_profiles = json.load(fh)["valgrind"] - prefpaths = [os.path.join(profile_data_dir, profile, 'user.js') - for profile in base_profiles] + prefpaths = [ + os.path.join(profile_data_dir, profile, "user.js") + for profile in base_profiles + ] prefs = {} for path in prefpaths: prefs.update(Preferences.read_prefs(path)) interpolation = { - 'server': '%s:%d' % httpd.httpd.server_address, + "server": "%s:%d" % httpd.httpd.server_address, } for k, v in prefs.items(): if isinstance(v, string_types): @@ -80,88 +89,93 @@ def valgrind_test(self, suppressions): prefs[k] = Preferences.cast(v) quitter = os.path.join( - self.topsrcdir, 'tools', 'quitter', 'quitter@mozilla.org.xpi') + self.topsrcdir, "tools", "quitter", "quitter@mozilla.org.xpi" + ) locations = ServerLocations() - locations.add_host(host='127.0.0.1', - port=httpd.httpd.server_port, - options='primary') + locations.add_host( + host="127.0.0.1", port=httpd.httpd.server_port, options="primary" + ) - profile = FirefoxProfile(profile=profilePath, - preferences=prefs, - addons=[quitter], - locations=locations) + profile = FirefoxProfile( + profile=profilePath, + preferences=prefs, + addons=[quitter], + locations=locations, + ) firefox_args = [httpd.get_url()] env = os.environ.copy() - env['G_SLICE'] = 'always-malloc' - env['MOZ_CC_RUN_DURING_SHUTDOWN'] = '1' - env['MOZ_CRASHREPORTER_NO_REPORT'] = '1' - env['MOZ_DISABLE_NONLOCAL_CONNECTIONS'] = '1' - env['XPCOM_DEBUG_BREAK'] = 'warn' + env["G_SLICE"] = "always-malloc" + env["MOZ_CC_RUN_DURING_SHUTDOWN"] = "1" + env["MOZ_CRASHREPORTER_NO_REPORT"] = "1" + env["MOZ_DISABLE_NONLOCAL_CONNECTIONS"] = "1" + env["XPCOM_DEBUG_BREAK"] = "warn" outputHandler = OutputHandler(self.log) kp_kwargs = { - 'processOutputLine': [outputHandler], - 'universal_newlines': True, + "processOutputLine": [outputHandler], + "universal_newlines": True, } - valgrind = 'valgrind' + valgrind = "valgrind" if not os.path.exists(valgrind): valgrind = findInPath(valgrind) valgrind_args = [ valgrind, - '--sym-offsets=yes', - '--smc-check=all-non-file', - '--vex-iropt-register-updates=allregs-at-mem-access', - '--gen-suppressions=all', - '--num-callers=36', - '--leak-check=full', - '--show-possibly-lost=no', - '--track-origins=yes', - '--trace-children=yes', - '-v', # Enable verbosity to get the list of used suppressions + "--sym-offsets=yes", + "--smc-check=all-non-file", + "--vex-iropt-register-updates=allregs-at-mem-access", + "--gen-suppressions=all", + "--num-callers=36", + "--leak-check=full", + "--show-possibly-lost=no", + "--track-origins=yes", + "--trace-children=yes", + "-v", # Enable verbosity to get the list of used suppressions # Avoid excessive delays in the presence of spinlocks. # See bug 1309851. - '--fair-sched=yes', + "--fair-sched=yes", # Keep debuginfo after library unmap. See bug 1382280. - '--keep-debuginfo=yes', + "--keep-debuginfo=yes", # Reduce noise level on rustc and/or LLVM compiled code. # See bug 1365915 - '--expensive-definedness-checks=yes', + "--expensive-definedness-checks=yes", # Compensate for the compiler inlining `new` but not `delete` # or vice versa. - '--show-mismatched-frees=no', + "--show-mismatched-frees=no", ] for s in suppressions: - valgrind_args.append('--suppressions=' + s) + valgrind_args.append("--suppressions=" + s) - supps_dir = os.path.join(build_dir, 'valgrind') - supps_file1 = os.path.join(supps_dir, 'cross-architecture.sup') - valgrind_args.append('--suppressions=' + supps_file1) + supps_dir = os.path.join(build_dir, "valgrind") + supps_file1 = os.path.join(supps_dir, "cross-architecture.sup") + valgrind_args.append("--suppressions=" + supps_file1) - if mozinfo.os == 'linux': + if mozinfo.os == "linux": machtype = { - 'x86_64': 'x86_64-pc-linux-gnu', - 'x86': 'i386-pc-linux-gnu', + "x86_64": "x86_64-pc-linux-gnu", + "x86": "i386-pc-linux-gnu", }.get(mozinfo.processor) if machtype: - supps_file2 = os.path.join(supps_dir, machtype + '.sup') + supps_file2 = os.path.join(supps_dir, machtype + ".sup") if os.path.isfile(supps_file2): - valgrind_args.append('--suppressions=' + supps_file2) + valgrind_args.append("--suppressions=" + supps_file2) exitcode = None timeout = 1800 binary_not_found_exception = None try: - runner = FirefoxRunner(profile=profile, - binary=self.get_binary_path(), - cmdargs=firefox_args, - env=env, - process_args=kp_kwargs) + runner = FirefoxRunner( + profile=profile, + binary=self.get_binary_path(), + cmdargs=firefox_args, + env=env, + process_args=kp_kwargs, + ) runner.start(debug_args=valgrind_args) exitcode = runner.wait(timeout=timeout) except BinaryNotFoundException as e: @@ -171,39 +185,58 @@ def valgrind_test(self, suppressions): supps = outputHandler.suppression_count if errs != supps: status = 1 # turns the TBPL job orange - self.log(logging.ERROR, 'valgrind-fail-parsing', - {'errs': errs, 'supps': supps}, - 'TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors ' - 'seen, but {supps} generated suppressions seen') + self.log( + logging.ERROR, + "valgrind-fail-parsing", + {"errs": errs, "supps": supps}, + "TEST-UNEXPECTED-FAIL | valgrind-test | error parsing: {errs} errors " + "seen, but {supps} generated suppressions seen", + ) elif errs == 0: status = 0 - self.log(logging.INFO, 'valgrind-pass', {}, - 'TEST-PASS | valgrind-test | valgrind found no errors') + self.log( + logging.INFO, + "valgrind-pass", + {}, + "TEST-PASS | valgrind-test | valgrind found no errors", + ) else: status = 1 # turns the TBPL job orange # We've already printed details of the errors. if binary_not_found_exception: status = 2 # turns the TBPL job red - self.log(logging.ERROR, 'valgrind-fail-errors', - {'error': str(binary_not_found_exception)}, - 'TEST-UNEXPECTED-FAIL | valgrind-test | {error}') - self.log(logging.INFO, 'valgrind-fail-errors', - {'help': binary_not_found_exception.help()}, - '{help}') + self.log( + logging.ERROR, + "valgrind-fail-errors", + {"error": str(binary_not_found_exception)}, + "TEST-UNEXPECTED-FAIL | valgrind-test | {error}", + ) + self.log( + logging.INFO, + "valgrind-fail-errors", + {"help": binary_not_found_exception.help()}, + "{help}", + ) elif exitcode is None: status = 2 # turns the TBPL job red - self.log(logging.ERROR, 'valgrind-fail-timeout', - {'timeout': timeout}, - 'TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out ' - '(reached {timeout} second limit)') + self.log( + logging.ERROR, + "valgrind-fail-timeout", + {"timeout": timeout}, + "TEST-UNEXPECTED-FAIL | valgrind-test | Valgrind timed out " + "(reached {timeout} second limit)", + ) elif exitcode != 0: status = 2 # turns the TBPL job red - self.log(logging.ERROR, 'valgrind-fail-errors', - {'exitcode': exitcode}, - 'TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code ' - 'from Valgrind: {exitcode}') + self.log( + logging.ERROR, + "valgrind-fail-errors", + {"exitcode": exitcode}, + "TEST-UNEXPECTED-FAIL | valgrind-test | non-zero exit code " + "from Valgrind: {exitcode}", + ) httpd.stop() diff --git a/build/valgrind/output_handler.py b/build/valgrind/output_handler.py index 9557bb534391c1..d3a84d34443418 100644 --- a/build/valgrind/output_handler.py +++ b/build/valgrind/output_handler.py @@ -9,7 +9,7 @@ class OutputHandler(object): - ''' + """ A class for handling Valgrind output. Valgrind errors look like this: @@ -38,31 +38,32 @@ class OutputHandler(object): the count of these lines doesn't match the error count found during parsing, then the parsing has missed one or more errors and we can fail appropriately. - ''' # NOQA: E501 + """ # NOQA: E501 def __init__(self, logger): # The regexps in this list match all of Valgrind's errors. Note that # Valgrind is English-only, so we don't have to worry about # localization. self.logger = logger - self.re_error = \ - r'==\d+== (' + \ - r'(Use of uninitialised value of size \d+)|' + \ - r'(Conditional jump or move depends on uninitialised value\(s\))|' + \ - r'(Syscall param .* contains uninitialised byte\(s\))|' + \ - r'(Syscall param .* points to (unaddressable|uninitialised) byte\(s\))|' + \ - r'((Unaddressable|Uninitialised) byte\(s\) found during client check request)|' + \ - r'(Invalid free\(\) / delete / delete\[\] / realloc\(\))|' + \ - r'(Mismatched free\(\) / delete / delete \[\])|' + \ - r'(Invalid (read|write) of size \d+)|' + \ - r'(Jump to the invalid address stated on the next line)|' + \ - r'(Source and destination overlap in .*)|' + \ - r'(.* bytes in .* blocks are .* lost)' + \ - r')' + self.re_error = ( + r"==\d+== (" + + r"(Use of uninitialised value of size \d+)|" + + r"(Conditional jump or move depends on uninitialised value\(s\))|" + + r"(Syscall param .* contains uninitialised byte\(s\))|" + + r"(Syscall param .* points to (unaddressable|uninitialised) byte\(s\))|" + + r"((Unaddressable|Uninitialised) byte\(s\) found during client check request)|" + + r"(Invalid free\(\) / delete / delete\[\] / realloc\(\))|" + + r"(Mismatched free\(\) / delete / delete \[\])|" + + r"(Invalid (read|write) of size \d+)|" + + r"(Jump to the invalid address stated on the next line)|" + + r"(Source and destination overlap in .*)|" + + r"(.* bytes in .* blocks are .* lost)" + + r")" + ) # Match identifer chars, plus ':' for namespaces, and '\?' in order to # match "???" which Valgrind sometimes produces. - self.re_stack_entry = r'^==\d+==.*0x[A-Z0-9]+: ([A-Za-z0-9_:\?]+)' - self.re_suppression = r' *' + self.re_stack_entry = r"^==\d+==.*0x[A-Z0-9]+: ([A-Za-z0-9_:\?]+)" + self.re_suppression = r" *" self.error_count = 0 self.suppression_count = 0 self.number_of_stack_entries_to_get = 0 @@ -71,7 +72,7 @@ def __init__(self, logger): self.buffered_lines = None def log(self, line): - self.logger(logging.INFO, 'valgrind-output', {'line': line}, '{line}') + self.logger(logging.INFO, "valgrind-output", {"line": line}, "{line}") def __call__(self, line): if self.number_of_stack_entries_to_get == 0: @@ -94,18 +95,20 @@ def __call__(self, line): if m: self.curr_location += m.group(1) else: - self.curr_location += '?!?' + self.curr_location += "?!?" self.number_of_stack_entries_to_get -= 1 if self.number_of_stack_entries_to_get != 0: - self.curr_location += ' / ' + self.curr_location += " / " else: # We've finished getting the first few stack entries. Print the # failure message and the buffered lines, and then reset state. - self.logger(logging.ERROR, 'valgrind-error-msg', - {'error': self.curr_error, - 'location': self.curr_location}, - 'TEST-UNEXPECTED-FAIL | valgrind-test | {error} at {location}') + self.logger( + logging.ERROR, + "valgrind-error-msg", + {"error": self.curr_error, "location": self.curr_location}, + "TEST-UNEXPECTED-FAIL | valgrind-test | {error} at {location}", + ) for b in self.buffered_lines: self.log(b) self.curr_error = None diff --git a/build/variables.py b/build/variables.py index 01ae2f0d8bc286..ef2d6e6800f3c5 100644 --- a/build/variables.py +++ b/build/variables.py @@ -9,35 +9,36 @@ import sys from datetime import datetime -SOURCESTAMP_FILENAME = 'sourcestamp.txt' +SOURCESTAMP_FILENAME = "sourcestamp.txt" def buildid_header(output): - buildid = os.environ.get('MOZ_BUILD_DATE') + buildid = os.environ.get("MOZ_BUILD_DATE") if buildid and len(buildid) != 14: - print('Ignoring invalid MOZ_BUILD_DATE: %s' % buildid, file=sys.stderr) + print("Ignoring invalid MOZ_BUILD_DATE: %s" % buildid, file=sys.stderr) buildid = None if not buildid: - buildid = datetime.now().strftime('%Y%m%d%H%M%S') + buildid = datetime.now().strftime("%Y%m%d%H%M%S") output.write("#define MOZ_BUILDID %s\n" % buildid) def get_program_output(*command): try: with open(os.devnull) as stderr: - return subprocess.check_output(command, stderr=stderr, - universal_newlines=True) + return subprocess.check_output( + command, stderr=stderr, universal_newlines=True + ) except Exception: - return '' + return "" def get_hg_info(workdir): - repo = get_program_output('hg', '-R', workdir, 'path', 'default') + repo = get_program_output("hg", "-R", workdir, "path", "default") if repo: repo = repo.strip() - if repo.startswith('ssh://'): - repo = 'https://' + repo[6:] - repo = repo.rstrip('/') + if repo.startswith("ssh://"): + repo = "https://" + repo[6:] + repo = repo.rstrip("/") changeset = get_hg_changeset(workdir) @@ -45,7 +46,7 @@ def get_hg_info(workdir): def get_hg_changeset(path): - return get_program_output('hg', '-R', path, 'parent', '--template={node}') + return get_program_output("hg", "-R", path, "parent", "--template={node}") def get_info_from_sourcestamp(sourcestamp_path): @@ -62,50 +63,51 @@ def get_info_from_sourcestamp(sourcestamp_path): # Parse the repo and the changeset. The sourcestamp file is supposed to # contain two lines: the first is the build id and the second is the source # URL. - if len(lines) != 2 or not lines[1].startswith('http'): + if len(lines) != 2 or not lines[1].startswith("http"): # Just return if the file doesn't contain what we expect. return None, None # Return the repo and the changeset. - return lines[1].split('/rev/') + return lines[1].split("/rev/") def source_repo_header(output): # We allow the source repo and changeset to be specified via the # environment (see configure) import buildconfig - repo = buildconfig.substs.get('MOZ_SOURCE_REPO') - changeset = buildconfig.substs.get('MOZ_SOURCE_CHANGESET') - source = '' + + repo = buildconfig.substs.get("MOZ_SOURCE_REPO") + changeset = buildconfig.substs.get("MOZ_SOURCE_CHANGESET") + source = "" if not repo: - sourcestamp_path = os.path.join( - buildconfig.topsrcdir, SOURCESTAMP_FILENAME) - if os.path.exists(os.path.join(buildconfig.topsrcdir, '.hg')): + sourcestamp_path = os.path.join(buildconfig.topsrcdir, SOURCESTAMP_FILENAME) + if os.path.exists(os.path.join(buildconfig.topsrcdir, ".hg")): repo, changeset = get_hg_info(buildconfig.topsrcdir) elif os.path.exists(sourcestamp_path): repo, changeset = get_info_from_sourcestamp(sourcestamp_path) elif not changeset: changeset = get_hg_changeset(buildconfig.topsrcdir) if not changeset: - raise Exception('could not resolve changeset; ' - 'try setting MOZ_SOURCE_CHANGESET') + raise Exception( + "could not resolve changeset; " "try setting MOZ_SOURCE_CHANGESET" + ) if changeset: - output.write('#define MOZ_SOURCE_STAMP %s\n' % changeset) + output.write("#define MOZ_SOURCE_STAMP %s\n" % changeset) - if repo and buildconfig.substs.get('MOZ_INCLUDE_SOURCE_INFO'): - source = '%s/rev/%s' % (repo, changeset) - output.write('#define MOZ_SOURCE_REPO %s\n' % repo) - output.write('#define MOZ_SOURCE_URL %s\n' % source) + if repo and buildconfig.substs.get("MOZ_INCLUDE_SOURCE_INFO"): + source = "%s/rev/%s" % (repo, changeset) + output.write("#define MOZ_SOURCE_REPO %s\n" % repo) + output.write("#define MOZ_SOURCE_URL %s\n" % source) def main(args): - if (len(args)): + if len(args): func = globals().get(args[0]) if func: return func(sys.stdout, *args[1:]) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.argv[1:])) diff --git a/build/win32/autowinchecksec.py b/build/win32/autowinchecksec.py index 80ed2e218d9b14..1a394977dc140a 100644 --- a/build/win32/autowinchecksec.py +++ b/build/win32/autowinchecksec.py @@ -22,56 +22,63 @@ # execute winchecksec against the binary, using the WINCHECKSEC environment # variable as the path to winchecksec.exe try: - winchecksec_path = buildconfig.substs['WINCHECKSEC'] + winchecksec_path = buildconfig.substs["WINCHECKSEC"] except KeyError: - print("TEST-UNEXPECTED-FAIL | autowinchecksec.py | WINCHECKSEC environment variable is " - "not set, can't check DEP/ASLR etc. status.") + print( + "TEST-UNEXPECTED-FAIL | autowinchecksec.py | WINCHECKSEC environment variable is " + "not set, can't check DEP/ASLR etc. status." + ) sys.exit(1) -wine = buildconfig.substs.get('WINE') -if wine and winchecksec_path.lower().endswith('.exe'): +wine = buildconfig.substs.get("WINE") +if wine and winchecksec_path.lower().endswith(".exe"): cmd = [wine, winchecksec_path] else: cmd = [winchecksec_path] try: - result = subprocess.check_output(cmd + ['-j', binary_path], - universal_newlines=True) + result = subprocess.check_output(cmd + ["-j", binary_path], universal_newlines=True) except subprocess.CalledProcessError as e: - print("TEST-UNEXPECTED-FAIL | autowinchecksec.py | Winchecksec returned error code %d:\n%s" % ( - e.returncode, e.output)) + print( + "TEST-UNEXPECTED-FAIL | autowinchecksec.py | Winchecksec returned error code %d:\n%s" + % (e.returncode, e.output) + ) sys.exit(1) result = json.loads(result) checks = [ - 'aslr', - 'cfg', - 'dynamicBase', - 'gs', - 'isolation', - 'nx', - 'seh', + "aslr", + "cfg", + "dynamicBase", + "gs", + "isolation", + "nx", + "seh", ] -if buildconfig.substs['CPU_ARCH'] == 'x86': +if buildconfig.substs["CPU_ARCH"] == "x86": checks += [ - 'safeSEH', + "safeSEH", ] else: checks += [ - 'highEntropyVA', + "highEntropyVA", ] failed = [c for c in checks if result.get(c) is False] if failed: - print("TEST-UNEXPECTED-FAIL | autowinchecksec.py | Winchecksec reported %d error(s) for %s" % - (len(failed), binary_path)) - print("TEST-UNEXPECTED-FAIL | autowinchecksec.py | The following check(s) failed: %s" % - (', '.join(failed))) + print( + "TEST-UNEXPECTED-FAIL | autowinchecksec.py | Winchecksec reported %d error(s) for %s" + % (len(failed), binary_path) + ) + print( + "TEST-UNEXPECTED-FAIL | autowinchecksec.py | The following check(s) failed: %s" + % (", ".join(failed)) + ) sys.exit(1) else: print("TEST-PASS | autowinchecksec.py | %s succeeded" % binary_path) diff --git a/build/win32/crashinjectdll/moz.build b/build/win32/crashinjectdll/moz.build index 270b88ec07f602..ff113f61e6c104 100644 --- a/build/win32/crashinjectdll/moz.build +++ b/build/win32/crashinjectdll/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'crashinjectdll.cpp', + "crashinjectdll.cpp", ] -SharedLibrary('crashinjectdll') +SharedLibrary("crashinjectdll") -DEFFILE = 'crashinjectdll.def' +DEFFILE = "crashinjectdll.def" USE_STATIC_LIBS = True NO_PGO = True diff --git a/build/win32/moz.build b/build/win32/moz.build index 853d6f03c08223..7f4e378573a5a4 100644 --- a/build/win32/moz.build +++ b/build/win32/moz.build @@ -4,41 +4,35 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['crashinjectdll'] +TEST_DIRS += ["crashinjectdll"] -if CONFIG['ENABLE_TESTS']: - Program('crashinject') +if CONFIG["ENABLE_TESTS"]: + Program("crashinject") SOURCES += [ - 'crashinject.cpp', + "crashinject.cpp", ] USE_STATIC_LIBS = True NO_PGO = True -if CONFIG['WIN32_REDIST_DIR'] and CONFIG['COMPILE_ENVIRONMENT']: - for f in ['MSVC_C_RUNTIME_DLL', 'MSVC_CXX_RUNTIME_DLL']: - FINAL_TARGET_FILES += [ - '%%%s/%s' % (CONFIG['WIN32_REDIST_DIR'], CONFIG[f]) - ] +if CONFIG["WIN32_REDIST_DIR"] and CONFIG["COMPILE_ENVIRONMENT"]: + for f in ["MSVC_C_RUNTIME_DLL", "MSVC_CXX_RUNTIME_DLL"]: + FINAL_TARGET_FILES += ["%%%s/%s" % (CONFIG["WIN32_REDIST_DIR"], CONFIG[f])] -if CONFIG['WIN_UCRT_REDIST_DIR'] and CONFIG['COMPILE_ENVIRONMENT']: +if CONFIG["WIN_UCRT_REDIST_DIR"] and CONFIG["COMPILE_ENVIRONMENT"]: win7_ucrt_redists = [ - 'api-ms-win-core-file-l1-2-0.dll', - 'api-ms-win-core-file-l2-1-0.dll', - 'api-ms-win-core-localization-l1-2-0.dll', - 'api-ms-win-core-processthreads-l1-1-1.dll', - 'api-ms-win-core-synch-l1-2-0.dll', - 'api-ms-win-core-timezone-l1-1-0.dll', - 'api-ms-win-crt-*.dll', - 'ucrtbase.dll', + "api-ms-win-core-file-l1-2-0.dll", + "api-ms-win-core-file-l2-1-0.dll", + "api-ms-win-core-localization-l1-2-0.dll", + "api-ms-win-core-processthreads-l1-1-1.dll", + "api-ms-win-core-synch-l1-2-0.dll", + "api-ms-win-core-timezone-l1-1-0.dll", + "api-ms-win-crt-*.dll", + "ucrtbase.dll", ] for f in win7_ucrt_redists: - FINAL_TARGET_FILES += [ - '%%%s/%s' % (CONFIG['WIN_UCRT_REDIST_DIR'], f) - ] + FINAL_TARGET_FILES += ["%%%s/%s" % (CONFIG["WIN_UCRT_REDIST_DIR"], f)] -if CONFIG['LLVM_SYMBOLIZER'] and CONFIG['WIN_DIA_SDK_BIN_DIR']: +if CONFIG["LLVM_SYMBOLIZER"] and CONFIG["WIN_DIA_SDK_BIN_DIR"]: # On Windows, llvm-symbolizer depends on the MS DIA library. - FINAL_TARGET_FILES += [ - '%%%s/msdia140.dll' % CONFIG['WIN_DIA_SDK_BIN_DIR'] - ] + FINAL_TARGET_FILES += ["%%%s/msdia140.dll" % CONFIG["WIN_DIA_SDK_BIN_DIR"]] diff --git a/build/windows_toolchain.py b/build/windows_toolchain.py index 98a2b9fc2543b8..dab661042d2ba4 100644 --- a/build/windows_toolchain.py +++ b/build/windows_toolchain.py @@ -16,134 +16,126 @@ import os import sys -from mozpack.files import ( - FileFinder, -) -from mozpack.mozjar import ( - JarWriter, -) +from mozpack.files import FileFinder +from mozpack.mozjar import JarWriter import mozpack.path as mozpath -SDK_RELEASE = '10.0.17134.0' +SDK_RELEASE = "10.0.17134.0" PATTERNS = [ { - 'srcdir': '%(vs_path)s/DIA SDK', - 'dstdir': 'DIA SDK', - 'files': [ + "srcdir": "%(vs_path)s/DIA SDK", + "dstdir": "DIA SDK", + "files": [ { - 'pattern': 'bin/**', - 'ignore': ( - 'bin/arm/**', - ), + "pattern": "bin/**", + "ignore": ("bin/arm/**",), }, { - 'pattern': 'idl/**', + "pattern": "idl/**", }, { - 'pattern': 'include/**', + "pattern": "include/**", }, { - 'pattern': 'lib/**', - 'ignore': ( - 'lib/arm/**', - ), + "pattern": "lib/**", + "ignore": ("lib/arm/**",), }, ], }, { - 'srcdir': '%(vs_path)s/VC/Tools/MSVC/14.16.27023', - 'dstdir': 'VC', - 'files': [ + "srcdir": "%(vs_path)s/VC/Tools/MSVC/14.16.27023", + "dstdir": "VC", + "files": [ # ATL is needed by Breakpad. { - 'pattern': 'atlmfc/include/**', + "pattern": "atlmfc/include/**", }, { - 'pattern': 'atlmfc/lib/arm64/atls.*', + "pattern": "atlmfc/lib/arm64/atls.*", }, { - 'pattern': 'atlmfc/lib/x64/atls.*', + "pattern": "atlmfc/lib/x64/atls.*", }, { - 'pattern': 'atlmfc/lib/x86/atls.*', + "pattern": "atlmfc/lib/x86/atls.*", }, # ARM64 PGO-instrumented builds require ARM64 pgort140.dll. { - 'pattern': 'bin/arm64/pgort140.dll', + "pattern": "bin/arm64/pgort140.dll", }, { - 'pattern': 'bin/Hostx64/**', + "pattern": "bin/Hostx64/**", }, # 32-bit PGO-instrumented builds require 32-bit pgort140.dll. { - 'pattern': 'bin/Hostx86/x86/pgort140.dll', + "pattern": "bin/Hostx86/x86/pgort140.dll", }, { - 'pattern': 'include/**', + "pattern": "include/**", }, { - 'pattern': 'lib/**', - 'ignore': ( - 'lib/arm64/store/**', - 'lib/onecore/**', - 'lib/x64/store/**', - 'lib/x86/store/**', + "pattern": "lib/**", + "ignore": ( + "lib/arm64/store/**", + "lib/onecore/**", + "lib/x64/store/**", + "lib/x86/store/**", ), }, ], }, { - 'srcdir': '%(vs_path)s/VC/Redist/MSVC/14.16.27012', - 'dstdir': 'VC/redist', - 'files': [ + "srcdir": "%(vs_path)s/VC/Redist/MSVC/14.16.27012", + "dstdir": "VC/redist", + "files": [ { - 'pattern': 'arm64/Microsoft.VC141.CRT/**', + "pattern": "arm64/Microsoft.VC141.CRT/**", }, { - 'pattern': 'x64/Microsoft.VC141.CRT/**', + "pattern": "x64/Microsoft.VC141.CRT/**", }, { - 'pattern': 'x86/Microsoft.VC141.CRT/**', + "pattern": "x86/Microsoft.VC141.CRT/**", }, ], }, { - 'srcdir': '%(sdk_path)s', - 'dstdir': 'SDK', - 'files': [ + "srcdir": "%(sdk_path)s", + "dstdir": "SDK", + "files": [ { - 'pattern': 'bin/%s/x64/**' % SDK_RELEASE, + "pattern": "bin/%s/x64/**" % SDK_RELEASE, }, { - 'pattern': 'Include/%s/**' % SDK_RELEASE, + "pattern": "Include/%s/**" % SDK_RELEASE, }, { - 'pattern': 'Lib/%s/ucrt/arm64/**' % SDK_RELEASE, + "pattern": "Lib/%s/ucrt/arm64/**" % SDK_RELEASE, }, { - 'pattern': 'Lib/%s/ucrt/x64/**' % SDK_RELEASE, + "pattern": "Lib/%s/ucrt/x64/**" % SDK_RELEASE, }, { - 'pattern': 'Lib/%s/ucrt/x86/**' % SDK_RELEASE, + "pattern": "Lib/%s/ucrt/x86/**" % SDK_RELEASE, }, { - 'pattern': 'Lib/%s/um/arm64/**' % SDK_RELEASE, + "pattern": "Lib/%s/um/arm64/**" % SDK_RELEASE, }, { - 'pattern': 'Lib/%s/um/x64/**' % SDK_RELEASE, + "pattern": "Lib/%s/um/x64/**" % SDK_RELEASE, }, { - 'pattern': 'Lib/%s/um/x86/**' % SDK_RELEASE, + "pattern": "Lib/%s/um/x86/**" % SDK_RELEASE, }, { - 'pattern': 'Redist/D3D/**', + "pattern": "Redist/D3D/**", }, { - 'pattern': 'Redist/ucrt/DLLs/x64/**', + "pattern": "Redist/ucrt/DLLs/x64/**", }, { - 'pattern': 'Redist/ucrt/DLLs/x86/**', + "pattern": "Redist/ucrt/DLLs/x86/**", }, ], }, @@ -155,25 +147,28 @@ def find_vs_paths(): Returns a 2-tuple of (Visual Studio Path, SDK Path). """ - pf = os.environ.get('ProgramFiles(x86)') + pf = os.environ.get("ProgramFiles(x86)") if not pf: - raise Exception('No "ProgramFiles(x86)" environment variable. ' - 'Not running on 64-bit Windows?') + raise Exception( + 'No "ProgramFiles(x86)" environment variable. ' + "Not running on 64-bit Windows?" + ) - vs_path = os.path.join(pf, 'Microsoft Visual Studio', '2017', 'Community') + vs_path = os.path.join(pf, "Microsoft Visual Studio", "2017", "Community") if not os.path.exists(vs_path): - raise Exception('%s does not exist; Visual Studio 2017 not installed?' % - vs_path) + raise Exception( + "%s does not exist; Visual Studio 2017 not installed?" % vs_path + ) - sdk_path = os.path.join(pf, 'Windows Kits', '10') + sdk_path = os.path.join(pf, "Windows Kits", "10") if not os.path.exists(sdk_path): - raise Exception('%s does not exist; Windows 10 SDK not installed?' % - sdk_path) + raise Exception("%s does not exist; Windows 10 SDK not installed?" % sdk_path) - sdk_fullver_path = os.path.join(sdk_path, 'Include', SDK_RELEASE) + sdk_fullver_path = os.path.join(sdk_path, "Include", SDK_RELEASE) if not os.path.exists(sdk_fullver_path): - raise Exception('%s does not exist; Wrong SDK version installed?' % - sdk_fullver_path) + raise Exception( + "%s does not exist; Wrong SDK version installed?" % sdk_fullver_path + ) return vs_path, sdk_path @@ -187,15 +182,15 @@ def resolve_files(): vs_path, sdk_path = find_vs_paths() for entry in PATTERNS: - fullpath = entry['srcdir'] % { - 'vs_path': vs_path, - 'sdk_path': sdk_path, + fullpath = entry["srcdir"] % { + "vs_path": vs_path, + "sdk_path": sdk_path, } - for pattern in entry['files']: - finder = FileFinder(fullpath, ignore=pattern.get('ignore', [])) - for p, f in finder.find(pattern['pattern']): - dstpath = '%s/%s' % (entry['dstdir'], p) - yield dstpath.encode('utf-8'), f + for pattern in entry["files"]: + finder = FileFinder(fullpath, ignore=pattern.get("ignore", [])) + for p, f in finder.find(pattern["pattern"]): + dstpath = "%s/%s" % (entry["dstdir"], p) + yield dstpath.encode("utf-8"), f def resolve_files_and_hash(manifest): @@ -222,18 +217,18 @@ def format_manifest(manifest): """Return formatted SHA-256 manifests as a byte strings.""" sha256_lines = [] for path, (length, sha256) in sorted(manifest.items()): - sha256_lines.append(b'%s\t%d\t%s' % (sha256, length, path)) + sha256_lines.append(b"%s\t%d\t%s" % (sha256, length, path)) # Trailing newline. - sha256_lines.append(b'') + sha256_lines.append(b"") - return b'\n'.join(sha256_lines) + return b"\n".join(sha256_lines) def write_zip(zip_path, prefix=None): """Write toolchain data to a zip file.""" - if isinstance(prefix, unicode): # noqa Special case for Python 2 - prefix = prefix.encode('utf-8') + if isinstance(prefix, unicode): # noqa Special case for Python 2 + prefix = prefix.encode("utf-8") with JarWriter(file=zip_path, compress_level=5) as zip: manifest = {} @@ -246,37 +241,37 @@ def write_zip(zip_path, prefix=None): sha256_manifest = format_manifest(manifest) - sdk_path = b'SDK_VERSION' - sha256_path = b'MANIFEST.SHA256' + sdk_path = b"SDK_VERSION" + sha256_path = b"MANIFEST.SHA256" if prefix: sdk_path = mozpath.join(prefix, sdk_path) sha256_path = mozpath.join(prefix, sha256_path) - zip.add(sdk_path, SDK_RELEASE.encode('utf-8')) + zip.add(sdk_path, SDK_RELEASE.encode("utf-8")) zip.add(sha256_path, sha256_manifest) -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) != 3: - print('usage: %s create-zip ' % sys.argv[0]) + print("usage: %s create-zip " % sys.argv[0]) sys.exit(1) - assert sys.argv[1] == 'create-zip' + assert sys.argv[1] == "create-zip" prefix = os.path.basename(sys.argv[2]) - destzip = '%s.zip' % sys.argv[2] + destzip = "%s.zip" % sys.argv[2] write_zip(destzip, prefix=prefix) sha1 = hashlib.sha1() sha256 = hashlib.sha256() sha512 = hashlib.sha512() - with open(destzip, 'rb') as fh: + with open(destzip, "rb") as fh: data = fh.read() sha1.update(data) sha256.update(data) sha512.update(data) - print('Hashes of %s (size=%d)' % (destzip, len(data))) - print('SHA-1: %s' % sha1.hexdigest()) - print('SHA-256: %s' % sha256.hexdigest()) - print('SHA-512: %s' % sha512.hexdigest()) + print("Hashes of %s (size=%d)" % (destzip, len(data))) + print("SHA-1: %s" % sha1.hexdigest()) + print("SHA-256: %s" % sha256.hexdigest()) + print("SHA-512: %s" % sha512.hexdigest()) diff --git a/caps/moz.build b/caps/moz.build index a971c5252e0005..e35debe6dac6d2 100644 --- a/caps/moz.build +++ b/caps/moz.build @@ -4,79 +4,78 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/mochitest/chrome.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/mochitest/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/mochitest/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/mochitest/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] # Hack to make this file available as a resource:// URI. TESTING_JS_MODULES += [ - 'tests/mochitest/resource_test_file.html', + "tests/mochitest/resource_test_file.html", ] XPIDL_SOURCES += [ - 'nsIAddonPolicyService.idl', - 'nsIDomainPolicy.idl', - 'nsIPrincipal.idl', - 'nsIScriptSecurityManager.idl', + "nsIAddonPolicyService.idl", + "nsIDomainPolicy.idl", + "nsIPrincipal.idl", + "nsIScriptSecurityManager.idl", ] -XPIDL_MODULE = 'caps' +XPIDL_MODULE = "caps" EXPORTS += [ - 'nsJSPrincipals.h', - 'nsScriptSecurityManager.h', + "nsJSPrincipals.h", + "nsScriptSecurityManager.h", ] EXPORTS.mozilla = [ - 'BasePrincipal.h', - 'ContentPrincipal.h', - 'ExpandedPrincipal.h', - 'NullPrincipal.h', - 'NullPrincipalURI.h', - 'OriginAttributes.h', - 'PrincipalHashKey.h', - 'SystemPrincipal.h', + "BasePrincipal.h", + "ContentPrincipal.h", + "ExpandedPrincipal.h", + "NullPrincipal.h", + "NullPrincipalURI.h", + "OriginAttributes.h", + "PrincipalHashKey.h", + "SystemPrincipal.h", ] SOURCES += [ # Compile this separately since nsExceptionHandler.h conflicts # with something from NullPrincipal.cpp. - 'BasePrincipal.cpp', + "BasePrincipal.cpp", ] UNIFIED_SOURCES += [ - 'ContentPrincipal.cpp', - 'DomainPolicy.cpp', - 'ExpandedPrincipal.cpp', - 'nsJSPrincipals.cpp', - 'nsScriptSecurityManager.cpp', - 'NullPrincipal.cpp', - 'NullPrincipalURI.cpp', - 'OriginAttributes.cpp', - 'SystemPrincipal.cpp', + "ContentPrincipal.cpp", + "DomainPolicy.cpp", + "ExpandedPrincipal.cpp", + "nsJSPrincipals.cpp", + "nsScriptSecurityManager.cpp", + "NullPrincipal.cpp", + "NullPrincipalURI.cpp", + "OriginAttributes.cpp", + "SystemPrincipal.cpp", ] USE_LIBS += [ - 'jsoncpp', + "jsoncpp", ] LOCAL_INCLUDES += [ - '/docshell/base', - '/dom/base', - '/js/xpconnect/src', - '/netwerk/base', - '/netwerk/cookie', - '/toolkit/components/jsoncpp/include', + "/docshell/base", + "/dom/base", + "/js/xpconnect/src", + "/netwerk/base", + "/netwerk/cookie", + "/toolkit/components/jsoncpp/include", ] -if CONFIG['ENABLE_TESTS']: - DIRS += ['tests/gtest'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["tests/gtest"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" with Files("**"): BUG_COMPONENT = ("Core", "Security: CAPS") - diff --git a/caps/tests/gtest/moz.build b/caps/tests/gtest/moz.build index bcc102c939c128..1ce86631197a90 100644 --- a/caps/tests/gtest/moz.build +++ b/caps/tests/gtest/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestOriginAttributes.cpp', - 'TestPrincipalAttributes.cpp', - 'TestPrincipalSerialization.cpp' + "TestOriginAttributes.cpp", + "TestPrincipalAttributes.cpp", + "TestPrincipalSerialization.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/chrome/moz.build b/chrome/moz.build index 55e720f3349f2c..b5d767b65ceeb7 100644 --- a/chrome/moz.build +++ b/chrome/moz.build @@ -4,44 +4,44 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['test'] +TEST_DIRS += ["test"] XPIDL_SOURCES += [ - 'nsIChromeRegistry.idl', - 'nsIToolkitChromeRegistry.idl', + "nsIChromeRegistry.idl", + "nsIToolkitChromeRegistry.idl", ] -XPIDL_MODULE = 'chrome' +XPIDL_MODULE = "chrome" EXPORTS += [ - 'nsChromeProtocolHandler.h', + "nsChromeProtocolHandler.h", ] EXPORTS.mozilla.chrome += [ - 'RegistryMessageUtils.h', + "RegistryMessageUtils.h", ] UNIFIED_SOURCES += [ - 'nsChromeProtocolHandler.cpp', - 'nsChromeRegistry.cpp', - 'nsChromeRegistryChrome.cpp', - 'nsChromeRegistryContent.cpp', + "nsChromeProtocolHandler.cpp", + "nsChromeRegistry.cpp", + "nsChromeRegistryChrome.cpp", + "nsChromeRegistryContent.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '!/xpcom', - '/dom/base', - '/netwerk/base', - '/netwerk/protocol/res', - '/xpcom/components' + "!/xpcom", + "/dom/base", + "/netwerk/base", + "/netwerk/protocol/res", + "/xpcom/components", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - CXXFLAGS += CONFIG['TK_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + CXXFLAGS += CONFIG["TK_CFLAGS"] with Files("**"): BUG_COMPONENT = ("Toolkit", "Startup and Profile System") diff --git a/chrome/test/moz.build b/chrome/test/moz.build index ed3bd5bbe74120..148e5d5ad97042 100644 --- a/chrome/test/moz.build +++ b/chrome/test/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPCSHELL_TESTS_MANIFESTS += [ - 'unit/xpcshell.ini', - 'unit_ipc/xpcshell.ini', + "unit/xpcshell.ini", + "unit_ipc/xpcshell.ini", ] diff --git a/client.py b/client.py index ad6c1ab7fa7f23..76b325b7f503d6 100755 --- a/client.py +++ b/client.py @@ -5,10 +5,10 @@ from __future__ import absolute_import, print_function -LIBFFI_DIRS = (('js/ctypes/libffi', 'libffi'),) -HG_EXCLUSIONS = ['.hg', '.hgignore', '.hgtags'] +LIBFFI_DIRS = (("js/ctypes/libffi", "libffi"),) +HG_EXCLUSIONS = [".hg", ".hgignore", ".hgtags"] -CVSROOT_LIBFFI = ':pserver:anoncvs@sources.redhat.com:/cvs/libffi' +CVSROOT_LIBFFI = ":pserver:anoncvs@sources.redhat.com:/cvs/libffi" import os import sys @@ -19,8 +19,8 @@ from subprocess import check_call topsrcdir = os.path.dirname(__file__) -if topsrcdir == '': - topsrcdir = '.' +if topsrcdir == "": + topsrcdir = "." def check_call_noisy(cmd, *args, **kwargs): @@ -32,27 +32,28 @@ def do_hg_pull(dir, repository, hg): fulldir = os.path.join(topsrcdir, dir) # clone if the dir doesn't exist, pull if it does if not os.path.exists(fulldir): - check_call_noisy([hg, 'clone', repository, fulldir]) + check_call_noisy([hg, "clone", repository, fulldir]) else: - cmd = [hg, 'pull', '-u', '-R', fulldir] + cmd = [hg, "pull", "-u", "-R", fulldir] if repository is not None: cmd.append(repository) check_call_noisy(cmd) - check_call([hg, 'parent', '-R', fulldir, - '--template=Updated to revision {node}.\n']) + check_call( + [hg, "parent", "-R", fulldir, "--template=Updated to revision {node}.\n"] + ) def do_hg_replace(dir, repository, tag, exclusions, hg): """ - Replace the contents of dir with the contents of repository, except for - files matching exclusions. + Replace the contents of dir with the contents of repository, except for + files matching exclusions. """ fulldir = os.path.join(topsrcdir, dir) if os.path.exists(fulldir): shutil.rmtree(fulldir) assert not os.path.exists(fulldir) - check_call_noisy([hg, 'clone', '-u', tag, repository, fulldir]) + check_call_noisy([hg, "clone", "-u", tag, repository, fulldir]) for thing in exclusions: for excluded in glob.iglob(os.path.join(fulldir, thing)): @@ -76,11 +77,18 @@ def do_cvs_export(modules, tag, cvsroot, cvs): shutil.rmtree(fullpath) (parent, leaf) = os.path.split(module) - print("CVS export begin: " + datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")) - check_call_noisy([cvs, '-d', cvsroot, - 'export', '-r', tag, '-d', leaf, cvs_module], - cwd=os.path.join(topsrcdir, parent)) - print("CVS export end: " + datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC")) + print( + "CVS export begin: " + + datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC") + ) + check_call_noisy( + [cvs, "-d", cvsroot, "export", "-r", tag, "-d", leaf, cvs_module], + cwd=os.path.join(topsrcdir, parent), + ) + print( + "CVS export end: " + + datetime.datetime.utcnow().strftime("%Y-%m-%d %H:%M:%S UTC") + ) def toggle_trailing_blank_line(depname): @@ -96,7 +104,7 @@ def toggle_trailing_blank_line(depname): open(depname, "wb").writelines(lines[:-1]) else: # adding blank line - open(depname, "ab").write(b'\n') + open(depname, "ab").write(b"\n") def get_trailing_blank_line_state(depname): @@ -111,9 +119,9 @@ def get_trailing_blank_line_state(depname): def update_nspr_or_nss(tag, depfile, destination, hgpath): - destination = destination.rstrip('/') - permanent_patch_dir = destination + '/patches' - temporary_patch_dir = destination + '.patches' + destination = destination.rstrip("/") + permanent_patch_dir = destination + "/patches" + temporary_patch_dir = destination + ".patches" if os.path.exists(temporary_patch_dir): print("please clean up leftover directory " + temporary_patch_dir) sys.exit(2) @@ -123,7 +131,7 @@ def update_nspr_or_nss(tag, depfile, destination, hgpath): shutil.move(permanent_patch_dir, temporary_patch_dir) # now update the destination print("reverting to HG version of %s to get its blank line state" % depfile) - check_call_noisy([options.hg, 'revert', depfile]) + check_call_noisy([options.hg, "revert", depfile]) old_state = get_trailing_blank_line_state(depfile) print("old state of %s is: %s" % (depfile, old_state)) do_hg_replace(destination, hgpath, tag, HG_EXCLUSIONS, options.hg) @@ -133,7 +141,7 @@ def update_nspr_or_nss(tag, depfile, destination, hgpath): print("toggling blank line in: ", depfile) toggle_trailing_blank_line(depfile) tag_file = destination + "/TAG-INFO" - with open(tag_file, 'w') as f: + with open(tag_file, "w") as f: f.write(tag) # move patch directory back to a subdirectory if os.path.exists(temporary_patch_dir): @@ -155,19 +163,36 @@ def warn_if_patch_exists(path): o = OptionParser( - usage="client.py [options] update_nspr tagname | update_nss tagname | update_libffi tagname") -o.add_option("--skip-mozilla", dest="skip_mozilla", - action="store_true", default=False, - help="Obsolete") - -o.add_option("--cvs", dest="cvs", default=os.environ.get('CVS', 'cvs'), - help="The location of the cvs binary") -o.add_option("--cvsroot", dest="cvsroot", - help="The CVSROOT for libffi (default : %s)" % CVSROOT_LIBFFI) -o.add_option("--hg", dest="hg", default=os.environ.get('HG', 'hg'), - help="The location of the hg binary") -o.add_option("--repo", dest="repo", - help="the repo to update from (default: upstream repo)") + usage="client.py [options] update_nspr tagname | update_nss tagname | update_libffi tagname" +) +o.add_option( + "--skip-mozilla", + dest="skip_mozilla", + action="store_true", + default=False, + help="Obsolete", +) + +o.add_option( + "--cvs", + dest="cvs", + default=os.environ.get("CVS", "cvs"), + help="The location of the cvs binary", +) +o.add_option( + "--cvsroot", + dest="cvsroot", + help="The CVSROOT for libffi (default : %s)" % CVSROOT_LIBFFI, +) +o.add_option( + "--hg", + dest="hg", + default=os.environ.get("HG", "hg"), + help="The location of the hg binary", +) +o.add_option( + "--repo", dest="repo", help="the repo to update from (default: upstream repo)" +) try: options, args = o.parse_args() @@ -176,23 +201,23 @@ def warn_if_patch_exists(path): o.print_help() sys.exit(2) -if action in ('checkout', 'co'): +if action in ("checkout", "co"): print("Warning: client.py checkout is obsolete.", file=sys.stderr) pass -elif action in ('update_nspr'): - tag, = args[1:] +elif action in ("update_nspr"): + (tag,) = args[1:] depfile = "nsprpub/config/prdepend.h" if not options.repo: - options.repo = 'https://hg.mozilla.org/projects/nspr' - update_nspr_or_nss(tag, depfile, 'nsprpub', options.repo) -elif action in ('update_nss'): - tag, = args[1:] + options.repo = "https://hg.mozilla.org/projects/nspr" + update_nspr_or_nss(tag, depfile, "nsprpub", options.repo) +elif action in ("update_nss"): + (tag,) = args[1:] depfile = "security/nss/coreconf/coreconf.dep" if not options.repo: - options.repo = 'https://hg.mozilla.org/projects/nss' - update_nspr_or_nss(tag, depfile, 'security/nss', options.repo) -elif action in ('update_libffi'): - tag, = args[1:] + options.repo = "https://hg.mozilla.org/projects/nss" + update_nspr_or_nss(tag, depfile, "security/nss", options.repo) +elif action in ("update_libffi"): + (tag,) = args[1:] if not options.cvsroot: options.cvsroot = CVSROOT_LIBFFI do_cvs_export(LIBFFI_DIRS, tag, options.cvsroot, options.cvs) diff --git a/config/MozZipFile.py b/config/MozZipFile.py index 400da02dbce8ac..c5f08aef03cabb 100644 --- a/config/MozZipFile.py +++ b/config/MozZipFile.py @@ -12,25 +12,24 @@ class ZipFile(zipfile.ZipFile): - """ Class with methods to open, read, write, close, list zip files. + """Class with methods to open, read, write, close, list zip files. Subclassing zipfile.ZipFile to allow for overwriting of existing entries, though only for writestr, not for write. """ - def __init__(self, file, mode="r", compression=zipfile.ZIP_STORED, - lock=False): + def __init__(self, file, mode="r", compression=zipfile.ZIP_STORED, lock=False): if lock: assert isinstance(file, six.text_type) - self.lockfile = lock_file(file + '.lck') + self.lockfile = lock_file(file + ".lck") else: self.lockfile = None - if mode == 'a' and lock: + if mode == "a" and lock: # appending to a file which doesn't exist fails, but we can't check # existence util we hold the lock if (not os.path.isfile(file)) or os.path.getsize(file) == 0: - mode = 'w' + mode = "w" zipfile.ZipFile.__init__(self, file, mode, compression) self._remove = [] @@ -45,11 +44,12 @@ def writestr(self, zinfo_or_arcname, bytes): This method is overloaded to allow overwriting existing entries. """ if not isinstance(zinfo_or_arcname, zipfile.ZipInfo): - zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname, - date_time=time.localtime(time.time())) + zinfo = zipfile.ZipInfo( + filename=zinfo_or_arcname, date_time=time.localtime(time.time()) + ) zinfo.compress_type = self.compression # Add some standard UNIX file access permissions (-rw-r--r--). - zinfo.external_attr = (0x81a4 & 0xFFFF) << 16 + zinfo.external_attr = (0x81A4 & 0xFFFF) << 16 else: zinfo = zinfo_or_arcname @@ -70,9 +70,10 @@ def writestr(self, zinfo_or_arcname, bytes): if self.filelist[i].filename == zinfo.filename: break zi = self.filelist[i] - if ((zinfo.compress_type == zipfile.ZIP_STORED - and zi.compress_size == len(bytes)) - or (i + 1) == len(self.filelist)): + if ( + zinfo.compress_type == zipfile.ZIP_STORED + and zi.compress_size == len(bytes) + ) or (i + 1) == len(self.filelist): # make sure we're allowed to write, otherwise done by writestr below self._writecheck(zi) # overwrite existing entry @@ -108,18 +109,21 @@ def close(self): self.lockfile = None return r - if self.fp.mode != 'r+b': + if self.fp.mode != "r+b": # adjust file mode if we originally just wrote, now we rewrite self.fp.close() - self.fp = open(self.filename, 'r+b') - all = map(lambda zi: (zi, True), self.filelist) + \ - map(lambda zi: (zi, False), self._remove) + self.fp = open(self.filename, "r+b") + all = map(lambda zi: (zi, True), self.filelist) + map( + lambda zi: (zi, False), self._remove + ) all.sort(key=lambda l: l[0].header_offset) # empty _remove for multiple closes self._remove = [] - lengths = [all[i+1][0].header_offset - all[i][0].header_offset - for i in xrange(len(all)-1)] + lengths = [ + all[i + 1][0].header_offset - all[i][0].header_offset + for i in xrange(len(all) - 1) + ] lengths.append(self.end - all[-1][0].header_offset) to_pos = 0 for (zi, keep), length in zip(all, lengths): @@ -127,7 +131,7 @@ def close(self): continue oldoff = zi.header_offset # python <= 2.4 has file_offset - if hasattr(zi, 'file_offset'): + if hasattr(zi, "file_offset"): zi.file_offset = zi.file_offset + to_pos - oldoff zi.header_offset = to_pos self.fp.seek(oldoff) diff --git a/config/check_js_msg_encoding.py b/config/check_js_msg_encoding.py index 197a51e1fb7f7c..69d2a368dcc54c 100644 --- a/config/check_js_msg_encoding.py +++ b/config/check_js_msg_encoding.py @@ -18,33 +18,32 @@ scriptname = os.path.basename(__file__) -expected_encoding = 'ascii' +expected_encoding = "ascii" # The following files don't define JSErrorFormatString. ignore_files = [ - 'dom/base/domerr.msg', - 'js/xpconnect/src/xpc.msg', + "dom/base/domerr.msg", + "js/xpconnect/src/xpc.msg", ] def log_pass(filename, text): - print('TEST-PASS | {} | {} | {}'.format(scriptname, filename, text)) + print("TEST-PASS | {} | {} | {}".format(scriptname, filename, text)) def log_fail(filename, text): - print('TEST-UNEXPECTED-FAIL | {} | {} | {}'.format(scriptname, filename, - text)) + print("TEST-UNEXPECTED-FAIL | {} | {} | {}".format(scriptname, filename, text)) def check_single_file(filename): - with open(filename, 'rb') as f: + with open(filename, "rb") as f: data = f.read() try: data.decode(expected_encoding) except Exception: - log_fail(filename, 'not in {} encoding'.format(expected_encoding)) + log_fail(filename, "not in {} encoding".format(expected_encoding)) - log_pass(filename, 'ok') + log_pass(filename, "ok") return True @@ -54,7 +53,7 @@ def check_files(): with get_repository_from_env() as repo: root = repo.path - for filename, _ in repo.get_tracked_files_finder().find('**/*.msg'): + for filename, _ in repo.get_tracked_files_finder().find("**/*.msg"): if filename not in ignore_files: if not check_single_file(os.path.join(root, filename)): result = False @@ -69,5 +68,5 @@ def main(): sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/config/check_js_opcode.py b/config/check_js_opcode.py index dfa9921a0b0eab..c42ea5ee63fa9f 100644 --- a/config/check_js_opcode.py +++ b/config/check_js_opcode.py @@ -18,15 +18,15 @@ def log_pass(text): - print('TEST-PASS | {} | {}'.format(scriptname, text)) + print("TEST-PASS | {} | {}".format(scriptname, text)) def log_fail(text): - print('TEST-UNEXPECTED-FAIL | {} | {}'.format(scriptname, text)) + print("TEST-UNEXPECTED-FAIL | {} | {}".format(scriptname, text)) def check_opcode(): - sys.path.insert(0, os.path.join(topsrcdir, 'js', 'src', 'vm')) + sys.path.insert(0, os.path.join(topsrcdir, "js", "src", "vm")) import jsopcode try: @@ -35,7 +35,7 @@ def check_opcode(): log_fail(e.args[0]) return False - log_pass('ok') + log_pass("ok") return True @@ -46,5 +46,5 @@ def main(): sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/config/check_source_count.py b/config/check_source_count.py index df87c0689f610c..33f26acd393fe4 100755 --- a/config/check_source_count.py +++ b/config/check_source_count.py @@ -32,23 +32,32 @@ details[f] = num if count == expected_count: - print("TEST-PASS | check_source_count.py {0} | {1}" - .format(search_string, expected_count)) + print( + "TEST-PASS | check_source_count.py {0} | {1}".format( + search_string, expected_count + ) + ) else: - print("TEST-UNEXPECTED-FAIL | check_source_count.py {0} | " - .format(search_string), - end='') + print( + "TEST-UNEXPECTED-FAIL | check_source_count.py {0} | ".format(search_string), + end="", + ) if count < expected_count: - print("There are fewer occurrences of /{0}/ than expected. " - "This may mean that you have removed some, but forgotten to " - "account for it {1}.".format(search_string, error_location)) + print( + "There are fewer occurrences of /{0}/ than expected. " + "This may mean that you have removed some, but forgotten to " + "account for it {1}.".format(search_string, error_location) + ) else: - print("There are more occurrences of /{0}/ than expected. We're trying " - "to prevent an increase in the number of {1}'s, using {2} if " - "possible. If it is unavoidable, you should update the expected " - "count {3}.".format(search_string, search_string, replacement, - error_location)) + print( + "There are more occurrences of /{0}/ than expected. We're trying " + "to prevent an increase in the number of {1}'s, using {2} if " + "possible. If it is unavoidable, you should update the expected " + "count {3}.".format( + search_string, search_string, replacement, error_location + ) + ) print("Expected: {0}; found: {1}".format(expected_count, count)) for k in sorted(details): diff --git a/config/check_spidermonkey_style.py b/config/check_spidermonkey_style.py index 262026f5a115d6..41d0866cd387a0 100644 --- a/config/check_spidermonkey_style.py +++ b/config/check_spidermonkey_style.py @@ -45,99 +45,103 @@ # We don't bother checking files in these directories, because they're (a) auxiliary or (b) # imported code that doesn't follow our coding style. ignored_js_src_dirs = [ - 'js/src/config/', # auxiliary stuff - 'js/src/ctypes/libffi/', # imported code - 'js/src/devtools/', # auxiliary stuff - 'js/src/editline/', # imported code - 'js/src/gdb/', # auxiliary stuff - 'js/src/vtune/', # imported code - 'js/src/zydis/', # imported code + "js/src/config/", # auxiliary stuff + "js/src/ctypes/libffi/", # imported code + "js/src/devtools/", # auxiliary stuff + "js/src/editline/", # imported code + "js/src/gdb/", # auxiliary stuff + "js/src/vtune/", # imported code + "js/src/zydis/", # imported code ] # We ignore #includes of these files, because they don't follow the usual rules. -included_inclnames_to_ignore = set([ - 'ffi.h', # generated in ctypes/libffi/ - 'devtools/Instruments.h', # we ignore devtools/ in general - 'double-conversion/double-conversion.h', # strange MFBT case - 'javascript-trace.h', # generated in $OBJDIR if HAVE_DTRACE is defined - 'frontend/ReservedWordsGenerated.h', # generated in $OBJDIR - 'frontend/smoosh_generated.h', # generated in $OBJDIR - 'gc/StatsPhasesGenerated.h', # generated in $OBJDIR - 'gc/StatsPhasesGenerated.inc', # generated in $OBJDIR - 'jit/CacheIROpsGenerated.h', # generated in $OBJDIR - 'jit/LOpcodesGenerated.h', # generated in $OBJDIR - 'jit/MOpcodesGenerated.h', # generated in $OBJDIR - 'js/ProfilingCategoryList.h', # comes from mozglue/baseprofiler - 'jscustomallocator.h', # provided by embedders; allowed to be missing - 'js-config.h', # generated in $OBJDIR - 'fdlibm.h', # fdlibm - 'FuzzerDefs.h', # included without a path - 'FuzzingInterface.h', # included without a path - 'mozmemory.h', # included without a path - 'pratom.h', # NSPR - 'prcvar.h', # NSPR - 'prerror.h', # NSPR - 'prinit.h', # NSPR - 'prio.h', # NSPR - 'private/pprio.h', # NSPR - 'prlink.h', # NSPR - 'prlock.h', # NSPR - 'prprf.h', # NSPR - 'prthread.h', # NSPR - 'prtypes.h', # NSPR - 'selfhosted.out.h', # generated in $OBJDIR - 'shellmoduleloader.out.h', # generated in $OBJDIR - 'unicode/basictz.h', # ICU - 'unicode/locid.h', # ICU - 'unicode/plurrule.h', # ICU - 'unicode/putil.h', # ICU - 'unicode/timezone.h', # ICU - 'unicode/ucal.h', # ICU - 'unicode/uchar.h', # ICU - 'unicode/uclean.h', # ICU - 'unicode/ucol.h', # ICU - 'unicode/ucurr.h', # ICU - 'unicode/udat.h', # ICU - 'unicode/udata.h', # ICU - 'unicode/udateintervalformat.h', # ICU - 'unicode/udatpg.h', # ICU - 'unicode/udisplaycontext.h', # ICU - 'unicode/uenum.h', # ICU - 'unicode/ufieldpositer.h', # ICU - 'unicode/uformattedvalue.h', # ICU - 'unicode/ulistformatter.h', # ICU - 'unicode/uldnames.h', # ICU - 'unicode/uloc.h', # ICU - 'unicode/umachine.h', # ICU - 'unicode/uniset.h', # ICU - 'unicode/unistr.h', # ICU - 'unicode/unorm2.h', # ICU - 'unicode/unum.h', # ICU - 'unicode/unumberformatter.h', # ICU - 'unicode/unumsys.h', # ICU - 'unicode/upluralrules.h', # ICU - 'unicode/ureldatefmt.h', # ICU - 'unicode/ures.h', # ICU - 'unicode/ustring.h', # ICU - 'unicode/utypes.h', # ICU - 'unicode/uversion.h', # ICU - 'vtune/VTuneWrapper.h', # VTune - 'zydis/ZydisAPI.h', # Zydis -]) +included_inclnames_to_ignore = set( + [ + "ffi.h", # generated in ctypes/libffi/ + "devtools/Instruments.h", # we ignore devtools/ in general + "double-conversion/double-conversion.h", # strange MFBT case + "javascript-trace.h", # generated in $OBJDIR if HAVE_DTRACE is defined + "frontend/ReservedWordsGenerated.h", # generated in $OBJDIR + "frontend/smoosh_generated.h", # generated in $OBJDIR + "gc/StatsPhasesGenerated.h", # generated in $OBJDIR + "gc/StatsPhasesGenerated.inc", # generated in $OBJDIR + "jit/CacheIROpsGenerated.h", # generated in $OBJDIR + "jit/LOpcodesGenerated.h", # generated in $OBJDIR + "jit/MOpcodesGenerated.h", # generated in $OBJDIR + "js/ProfilingCategoryList.h", # comes from mozglue/baseprofiler + "jscustomallocator.h", # provided by embedders; allowed to be missing + "js-config.h", # generated in $OBJDIR + "fdlibm.h", # fdlibm + "FuzzerDefs.h", # included without a path + "FuzzingInterface.h", # included without a path + "mozmemory.h", # included without a path + "pratom.h", # NSPR + "prcvar.h", # NSPR + "prerror.h", # NSPR + "prinit.h", # NSPR + "prio.h", # NSPR + "private/pprio.h", # NSPR + "prlink.h", # NSPR + "prlock.h", # NSPR + "prprf.h", # NSPR + "prthread.h", # NSPR + "prtypes.h", # NSPR + "selfhosted.out.h", # generated in $OBJDIR + "shellmoduleloader.out.h", # generated in $OBJDIR + "unicode/basictz.h", # ICU + "unicode/locid.h", # ICU + "unicode/plurrule.h", # ICU + "unicode/putil.h", # ICU + "unicode/timezone.h", # ICU + "unicode/ucal.h", # ICU + "unicode/uchar.h", # ICU + "unicode/uclean.h", # ICU + "unicode/ucol.h", # ICU + "unicode/ucurr.h", # ICU + "unicode/udat.h", # ICU + "unicode/udata.h", # ICU + "unicode/udateintervalformat.h", # ICU + "unicode/udatpg.h", # ICU + "unicode/udisplaycontext.h", # ICU + "unicode/uenum.h", # ICU + "unicode/ufieldpositer.h", # ICU + "unicode/uformattedvalue.h", # ICU + "unicode/ulistformatter.h", # ICU + "unicode/uldnames.h", # ICU + "unicode/uloc.h", # ICU + "unicode/umachine.h", # ICU + "unicode/uniset.h", # ICU + "unicode/unistr.h", # ICU + "unicode/unorm2.h", # ICU + "unicode/unum.h", # ICU + "unicode/unumberformatter.h", # ICU + "unicode/unumsys.h", # ICU + "unicode/upluralrules.h", # ICU + "unicode/ureldatefmt.h", # ICU + "unicode/ures.h", # ICU + "unicode/ustring.h", # ICU + "unicode/utypes.h", # ICU + "unicode/uversion.h", # ICU + "vtune/VTuneWrapper.h", # VTune + "zydis/ZydisAPI.h", # Zydis + ] +) # These files have additional constraints on where they are #included, so we # ignore #includes of them when checking #include ordering. -oddly_ordered_inclnames = set([ - 'ctypes/typedefs.h', # Included multiple times in the body of ctypes/CTypes.h - # Included in the body of frontend/TokenStream.h - 'frontend/ReservedWordsGenerated.h', - 'gc/StatsPhasesGenerated.h', # Included in the body of gc/Statistics.h - 'gc/StatsPhasesGenerated.inc', # Included in the body of gc/Statistics.cpp - 'psapi.h', # Must be included after "util/Windows.h" on Windows - 'machine/endian.h', # Must be included after on BSD - 'winbase.h', # Must precede other system headers(?) - 'windef.h' # Must precede other system headers(?) -]) +oddly_ordered_inclnames = set( + [ + "ctypes/typedefs.h", # Included multiple times in the body of ctypes/CTypes.h + # Included in the body of frontend/TokenStream.h + "frontend/ReservedWordsGenerated.h", + "gc/StatsPhasesGenerated.h", # Included in the body of gc/Statistics.h + "gc/StatsPhasesGenerated.inc", # Included in the body of gc/Statistics.cpp + "psapi.h", # Must be included after "util/Windows.h" on Windows + "machine/endian.h", # Must be included after on BSD + "winbase.h", # Must precede other system headers(?) + "windef.h", # Must precede other system headers(?) + ] +) # The files in tests/style/ contain code that fails this checking in various # ways. Here is the output we expect. If the actual output differs from @@ -147,7 +151,7 @@ # accordingly. # - This script has been broken somehow. # -expected_output = '''\ +expected_output = """\ js/src/tests/style/BadIncludes.h:3: error: the file includes itself @@ -204,24 +208,26 @@ -> tests/style/HeaderCycleB1-inl.h -> tests/style/HeaderCycleB4-inl.h -'''.splitlines(True) +""".splitlines( + True +) actual_output = [] def out(*lines): for line in lines: - actual_output.append(line + '\n') + actual_output.append(line + "\n") def error(filename, linenum, *lines): location = filename if linenum is not None: - location += ':' + str(linenum) - out(location + ': error:') - for line in (lines): - out(' ' + line) - out('') + location += ":" + str(linenum) + out(location + ": error:") + for line in lines: + out(" " + line) + out("") class FileKind(object): @@ -234,25 +240,25 @@ class FileKind(object): @staticmethod def get(filename): - if filename.endswith('.c'): + if filename.endswith(".c"): return FileKind.C - if filename.endswith('.cpp'): + if filename.endswith(".cpp"): return FileKind.CPP - if filename.endswith(('inlines.h', '-inl.h')): + if filename.endswith(("inlines.h", "-inl.h")): return FileKind.INL_H - if filename.endswith('.h'): + if filename.endswith(".h"): return FileKind.H - if filename.endswith('.tbl'): + if filename.endswith(".tbl"): return FileKind.TBL - if filename.endswith('.msg'): + if filename.endswith(".msg"): return FileKind.MSG - error(filename, None, 'unknown file kind') + error(filename, None, "unknown file kind") def check_style(enable_fixup): @@ -267,52 +273,51 @@ def check_style(enable_fixup): # - "js/public/Vector.h" -> "js/Vector.h" # - "js/src/vm/String.h" -> "vm/String.h" - non_js_dirnames = ('mfbt/', - 'memory/mozalloc/', - 'mozglue/') # type: tuple(str) - non_js_inclnames = set() # type: set(inclname) - js_names = dict() # type: dict(filename, inclname) + non_js_dirnames = ("mfbt/", "memory/mozalloc/", "mozglue/") # type: tuple(str) + non_js_inclnames = set() # type: set(inclname) + js_names = dict() # type: dict(filename, inclname) # Process files in js/src. - js_src_root = os.path.join('js', 'src') + js_src_root = os.path.join("js", "src") for dirpath, dirnames, filenames in os.walk(js_src_root): if dirpath == js_src_root: # Skip any subdirectories that contain a config.status file # (likely objdirs). builddirs = [] for dirname in dirnames: - path = os.path.join(dirpath, dirname, 'config.status') + path = os.path.join(dirpath, dirname, "config.status") if os.path.isfile(path): builddirs.append(dirname) for dirname in builddirs: dirnames.remove(dirname) for filename in filenames: - filepath = os.path.join(dirpath, filename).replace('\\', '/') - if not filepath.startswith(tuple(ignored_js_src_dirs)) and \ - filepath.endswith(('.c', '.cpp', '.h', '.tbl', '.msg')): - inclname = filepath[len('js/src/'):] + filepath = os.path.join(dirpath, filename).replace("\\", "/") + if not filepath.startswith( + tuple(ignored_js_src_dirs) + ) and filepath.endswith((".c", ".cpp", ".h", ".tbl", ".msg")): + inclname = filepath[len("js/src/") :] js_names[filepath] = inclname # Look for header files in directories in non_js_dirnames. for non_js_dir in non_js_dirnames: for dirpath, dirnames, filenames in os.walk(non_js_dir): for filename in filenames: - if filename.endswith('.h'): - inclname = 'mozilla/' + filename + if filename.endswith(".h"): + inclname = "mozilla/" + filename non_js_inclnames.add(inclname) # Look for header files in js/public. - js_public_root = os.path.join('js', 'public') + js_public_root = os.path.join("js", "public") for dirpath, dirnames, filenames in os.walk(js_public_root): for filename in filenames: - if filename.endswith(('.h', '.msg')): - filepath = os.path.join(dirpath, filename).replace('\\', '/') - inclname = 'js/' + filepath[len('js/public/'):] + if filename.endswith((".h", ".msg")): + filepath = os.path.join(dirpath, filename).replace("\\", "/") + inclname = "js/" + filepath[len("js/public/") :] js_names[filepath] = inclname all_inclnames = non_js_inclnames | set(js_names.values()) - edges = dict() # type: dict(inclname, set(inclname)) + edges = dict() # type: dict(inclname, set(inclname)) # We don't care what's inside the MFBT and MOZALLOC files, but because they # are #included from JS files we have to add them to the inclusion graph. @@ -323,46 +328,59 @@ def check_style(enable_fixup): for filename in sorted(js_names.keys()): inclname = js_names[filename] file_kind = FileKind.get(filename) - if file_kind == FileKind.C or file_kind == FileKind.CPP or \ - file_kind == FileKind.H or file_kind == FileKind.INL_H: - included_h_inclnames = set() # type: set(inclname) - - with open(filename, encoding='utf-8') as f: + if ( + file_kind == FileKind.C + or file_kind == FileKind.CPP + or file_kind == FileKind.H + or file_kind == FileKind.INL_H + ): + included_h_inclnames = set() # type: set(inclname) + + with open(filename, encoding="utf-8") as f: code = read_file(f) if enable_fixup: code = code.sorted(inclname) - with open(filename, 'w') as f: + with open(filename, "w") as f: f.write(code.to_source()) - check_file(filename, inclname, file_kind, code, - all_inclnames, included_h_inclnames) + check_file( + filename, inclname, file_kind, code, all_inclnames, included_h_inclnames + ) edges[inclname] = included_h_inclnames find_cycles(all_inclnames, edges) # Compare expected and actual output. - difflines = difflib.unified_diff(expected_output, actual_output, - fromfile='check_spidermonkey_style.py expected output', - tofile='check_spidermonkey_style.py actual output') + difflines = difflib.unified_diff( + expected_output, + actual_output, + fromfile="check_spidermonkey_style.py expected output", + tofile="check_spidermonkey_style.py actual output", + ) ok = True for diffline in difflines: ok = False - print(diffline, end='') + print(diffline, end="") return ok def module_name(name): - '''Strip the trailing .cpp, .h, inlines.h or -inl.h from a filename.''' + """Strip the trailing .cpp, .h, inlines.h or -inl.h from a filename.""" - return name.replace('inlines.h', '').replace('-inl.h', '').replace('.h', '').replace('.cpp', '') # NOQA: E501 + return ( + name.replace("inlines.h", "") + .replace("-inl.h", "") + .replace(".h", "") + .replace(".cpp", "") + ) # NOQA: E501 def is_module_header(enclosing_inclname, header_inclname): - '''Determine if an included name is the "module header", i.e. should be - first in the file.''' + """Determine if an included name is the "module header", i.e. should be + first in the file.""" module = module_name(enclosing_inclname) @@ -380,15 +398,15 @@ def is_module_header(enclosing_inclname, header_inclname): # # module == "vm/SourceHook", # header_inclname == "js/experimental/SourceHook.h" - m = re.match(r'js\/.*?([^\/]+)\.h', header_inclname) - if m is not None and module.endswith('/' + m.group(1)): + m = re.match(r"js\/.*?([^\/]+)\.h", header_inclname) + if m is not None and module.endswith("/" + m.group(1)): return True return False class Include(object): - '''Important information for a single #include statement.''' + """Important information for a single #include statement.""" def __init__(self, include_prefix, inclname, line_suffix, linenum, is_system): self.include_prefix = include_prefix @@ -403,7 +421,7 @@ def is_style_relevant(self): return True def section(self, enclosing_inclname): - '''Identify which section inclname belongs to. + """Identify which section inclname belongs to. The section numbers are as follows. 0. Module header (e.g. jsfoo.h or jsfooinlines.h within jsfoo.cpp) @@ -414,12 +432,12 @@ def section(self, enclosing_inclname): 5. jsfooinlines.h 6. foo/Bar-inl.h 7. non-.h, e.g. *.tbl, *.msg (these can be scattered throughout files) - ''' + """ if self.is_system: return 2 - if not self.inclname.endswith('.h'): + if not self.inclname.endswith(".h"): return 7 # A couple of modules have the .h file in js/ and the .cpp file elsewhere and so need @@ -427,23 +445,23 @@ def section(self, enclosing_inclname): if is_module_header(enclosing_inclname, self.inclname): return 0 - if '/' in self.inclname: - if self.inclname.startswith('mozilla/'): + if "/" in self.inclname: + if self.inclname.startswith("mozilla/"): return 1 - if self.inclname.endswith('-inl.h'): + if self.inclname.endswith("-inl.h"): return 6 return 4 - if self.inclname.endswith('inlines.h'): + if self.inclname.endswith("inlines.h"): return 5 return 3 def quote(self): if self.is_system: - return '<' + self.inclname + '>' + return "<" + self.inclname + ">" else: return '"' + self.inclname + '"' @@ -451,22 +469,22 @@ def sort_key(self, enclosing_inclname): return (self.section(enclosing_inclname), self.inclname.lower()) def to_source(self): - return self.include_prefix + self.quote() + self.line_suffix + '\n' + return self.include_prefix + self.quote() + self.line_suffix + "\n" class CppBlock(object): - '''C preprocessor block: a whole file or a single #if/#elif/#else block. + """C preprocessor block: a whole file or a single #if/#elif/#else block. A #if/#endif block is the contents of a #if/#endif (or similar) section. The top-level block, which is not within a #if/#endif pair, is also considered a block. Each kid is either an Include (representing a #include), OrdinaryCode, or - a nested CppBlock.''' + a nested CppBlock.""" def __init__(self, start_line=""): self.start = start_line - self.end = '' + self.end = "" self.kids = [] def is_style_relevant(self): @@ -488,8 +506,8 @@ def sorted(self, enclosing_inclname): """ def pretty_sorted_includes(includes): - """ Return a new list containing the given includes, in order, - with blank lines separating sections. """ + """Return a new list containing the given includes, in order, + with blank lines separating sections.""" keys = [inc.sort_key(enclosing_inclname) for inc in includes] if sorted(keys) == keys: return includes # if nothing is out of order, don't touch anything @@ -504,11 +522,13 @@ def pretty_sorted_includes(includes): return output def should_try_to_sort(includes): - if 'tests/style/BadIncludes' in enclosing_inclname: + if "tests/style/BadIncludes" in enclosing_inclname: return False # don't straighten the counterexample if any(inc.inclname in oddly_ordered_inclnames for inc in includes): return False # don't sort batches containing odd includes - if includes == sorted(includes, key=lambda inc: inc.sort_key(enclosing_inclname)): + if includes == sorted( + includes, key=lambda inc: inc.sort_key(enclosing_inclname) + ): return False # it's already sorted, avoid whitespace-only fixups return True @@ -522,9 +542,11 @@ def should_try_to_sort(includes): def flush_batch(): """Sort the contents of `batch` and move it to `output`.""" - assert all(isinstance(item, Include) - or (isinstance(item, OrdinaryCode) and "".join(item.lines).isspace()) - for item in batch) + assert all( + isinstance(item, Include) + or (isinstance(item, OrdinaryCode) and "".join(item.lines).isspace()) + for item in batch + ) # Here we throw away the blank lines. # `pretty_sorted_includes` puts them back. @@ -537,8 +559,7 @@ def flush_batch(): cutoff = last_include_index + 1 if should_try_to_sort(includes): - output.extend(pretty_sorted_includes( - includes) + batch[cutoff:]) + output.extend(pretty_sorted_includes(includes) + batch[cutoff:]) else: output.extend(batch) del batch[:] @@ -565,11 +586,11 @@ def flush_batch(): return result def to_source(self): - return self.start + ''.join(kid.to_source() for kid in self.kids) + self.end + return self.start + "".join(kid.to_source() for kid in self.kids) + self.end class OrdinaryCode(object): - ''' A list of lines of code that aren't #include/#if/#else/#endif lines. ''' + """ A list of lines of code that aren't #include/#if/#else/#endif lines. """ def __init__(self, lines=None): self.lines = lines if lines is not None else [] @@ -578,7 +599,7 @@ def is_style_relevant(self): return False def to_source(self): - return ''.join(self.lines) + return "".join(self.lines) # A "snippet" is one of: @@ -587,30 +608,33 @@ def to_source(self): # * CppBlock - a whole file or #if/#elif/#else block; contains a list of snippets # * OrdinaryCode - representing lines of non-#include-relevant code + def read_file(f): block_stack = [CppBlock()] # Extract the #include statements as a tree of snippets. for linenum, line in enumerate(f, start=1): - if line.lstrip().startswith('#'): + if line.lstrip().startswith("#"): # Look for a |#include "..."| line. m = re.match(r'(\s*#\s*include\s+)"([^"]*)"(.*)', line) if m is not None: prefix, inclname, suffix = m.groups() - block_stack[-1].kids.append(Include(prefix, - inclname, suffix, linenum, is_system=False)) + block_stack[-1].kids.append( + Include(prefix, inclname, suffix, linenum, is_system=False) + ) continue # Look for a |#include <...>| line. - m = re.match(r'(\s*#\s*include\s+)<([^>]*)>(.*)', line) + m = re.match(r"(\s*#\s*include\s+)<([^>]*)>(.*)", line) if m is not None: prefix, inclname, suffix = m.groups() - block_stack[-1].kids.append(Include(prefix, - inclname, suffix, linenum, is_system=True)) + block_stack[-1].kids.append( + Include(prefix, inclname, suffix, linenum, is_system=True) + ) continue # Look for a |#{if,ifdef,ifndef}| line. - m = re.match(r'\s*#\s*(if|ifdef|ifndef)\b', line) + m = re.match(r"\s*#\s*(if|ifdef|ifndef)\b", line) if m is not None: # Open a new block. new_block = CppBlock(line) @@ -619,7 +643,7 @@ def read_file(f): continue # Look for a |#{elif,else}| line. - m = re.match(r'\s*#\s*(elif|else)\b', line) + m = re.match(r"\s*#\s*(elif|else)\b", line) if m is not None: # Close the current block, and open an adjacent one. block_stack.pop() @@ -629,13 +653,12 @@ def read_file(f): continue # Look for a |#endif| line. - m = re.match(r'\s*#\s*endif\b', line) + m = re.match(r"\s*#\s*endif\b", line) if m is not None: # Close the current block. block_stack.pop().end = line if len(block_stack) == 0: - raise ValueError( - "#endif without #if at line " + str(linenum)) + raise ValueError("#endif without #if at line " + str(linenum)) continue # Otherwise, we have an ordinary line. @@ -646,18 +669,24 @@ def read_file(f): return block_stack[-1] -def check_file(filename, inclname, file_kind, code, all_inclnames, included_h_inclnames): - +def check_file( + filename, inclname, file_kind, code, all_inclnames, included_h_inclnames +): def check_include_statement(include): - '''Check the style of a single #include statement.''' + """Check the style of a single #include statement.""" if include.is_system: # Check it is not a known local file (in which case it's probably a system header). - if include.inclname in included_inclnames_to_ignore or \ - include.inclname in all_inclnames: - error(filename, include.linenum, - include.quote() + ' should be included using', - 'the #include "..." form') + if ( + include.inclname in included_inclnames_to_ignore + or include.inclname in all_inclnames + ): + error( + filename, + include.linenum, + include.quote() + " should be included using", + 'the #include "..." form', + ) else: if include.inclname not in included_inclnames_to_ignore: @@ -665,9 +694,12 @@ def check_include_statement(include): # Check the #include path has the correct form. if include.inclname not in all_inclnames: - error(filename, include.linenum, - include.quote() + ' is included using the wrong path;', - 'did you forget a prefix, or is the file not yet committed?') + error( + filename, + include.linenum, + include.quote() + " is included using the wrong path;", + "did you forget a prefix, or is the file not yet committed?", + ) # Record inclusions of .h files for cycle detection later. # (Exclude .tbl and .msg files.) @@ -676,28 +708,38 @@ def check_include_statement(include): # Check a H file doesn't #include an INL_H file. if file_kind == FileKind.H and included_kind == FileKind.INL_H: - error(filename, include.linenum, - 'vanilla header includes an inline-header file ' + include.quote()) + error( + filename, + include.linenum, + "vanilla header includes an inline-header file " + + include.quote(), + ) # Check a file doesn't #include itself. (We do this here because the cycle # detection below doesn't detect this case.) if inclname == include.inclname: - error(filename, include.linenum, - 'the file includes itself') + error(filename, include.linenum, "the file includes itself") def check_includes_order(include1, include2): - '''Check the ordering of two #include statements.''' + """Check the ordering of two #include statements.""" - if include1.inclname in oddly_ordered_inclnames or \ - include2.inclname in oddly_ordered_inclnames: + if ( + include1.inclname in oddly_ordered_inclnames + or include2.inclname in oddly_ordered_inclnames + ): return section1 = include1.section(inclname) section2 = include2.section(inclname) - if (section1 > section2) or \ - ((section1 == section2) and (include1.inclname.lower() > include2.inclname.lower())): - error(filename, str(include1.linenum) + ':' + str(include2.linenum), - include1.quote() + ' should be included after ' + include2.quote()) + if (section1 > section2) or ( + (section1 == section2) + and (include1.inclname.lower() > include2.inclname.lower()) + ): + error( + filename, + str(include1.linenum) + ":" + str(include2.linenum), + include1.quote() + " should be included after " + include2.quote(), + ) # Check the extracted #include statements, both individually, and the ordering of # adjacent pairs that live in the same block. @@ -715,7 +757,7 @@ def pair_traverse(prev, this): def find_cycles(all_inclnames, edges): - '''Find and draw any cycles.''' + """Find and draw any cycles.""" SCCs = tarjan(all_inclnames, edges) @@ -726,22 +768,22 @@ def draw_SCC(c): drawn = set() def draw(v, indent): - out(' ' * indent + ('-> ' if indent else ' ') + v) + out(" " * indent + ("-> " if indent else " ") + v) if v in drawn: return drawn.add(v) for succ in sorted(edges[v]): if succ in cset: draw(succ, indent + 1) + draw(sorted(c)[0], 0) - out('') + out("") have_drawn_an_SCC = False for scc in sorted(SCCs): if len(scc) != 1: if not have_drawn_an_SCC: - error('(multiple files)', None, - 'header files form one or more cycles') + error("(multiple files)", None, "header files form one or more cycles") have_drawn_an_SCC = True draw_SCC(scc) @@ -799,25 +841,31 @@ def main(): elif sys.argv[1:] == []: fixup = False else: - print("TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | unexpected command " - "line options: " + repr(sys.argv[1:])) + print( + "TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | unexpected command " + "line options: " + repr(sys.argv[1:]) + ) sys.exit(1) ok = check_style(fixup) if ok: - print('TEST-PASS | check_spidermonkey_style.py | ok') + print("TEST-PASS | check_spidermonkey_style.py | ok") else: - print('TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | ' + - 'actual output does not match expected output; diff is above.') - print('TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | ' + - 'Hint: If the problem is that you renamed a header, and many #includes ' + - 'are no longer in alphabetical order, commit your work and then try ' + - '`check_spidermonkey_style.py --fixup`. ' + - 'You need to commit first because --fixup modifies your files in place.') + print( + "TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | " + + "actual output does not match expected output; diff is above." + ) + print( + "TEST-UNEXPECTED-FAIL | check_spidermonkey_style.py | " + + "Hint: If the problem is that you renamed a header, and many #includes " + + "are no longer in alphabetical order, commit your work and then try " + + "`check_spidermonkey_style.py --fixup`. " + + "You need to commit first because --fixup modifies your files in place." + ) sys.exit(0 if ok else 1) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/config/check_vanilla_allocations.py b/config/check_vanilla_allocations.py index 64e1c611b73cfb..10578d8b656875 100644 --- a/config/check_vanilla_allocations.py +++ b/config/check_vanilla_allocations.py @@ -62,39 +62,40 @@ def fail(msg): - print('TEST-UNEXPECTED-FAIL | check_vanilla_allocations.py |', msg) + print("TEST-UNEXPECTED-FAIL | check_vanilla_allocations.py |", msg) global has_failed has_failed = True def main(): parser = argparse.ArgumentParser() - parser.add_argument('--aggressive', action='store_true', - help='also check for malloc, calloc, realloc and free') - parser.add_argument('file', type=str, - help='name of the file to check') + parser.add_argument( + "--aggressive", + action="store_true", + help="also check for malloc, calloc, realloc and free", + ) + parser.add_argument("file", type=str, help="name of the file to check") args = parser.parse_args() # Run |nm|. Options: # -u: show only undefined symbols # -C: demangle symbol names # -A: show an object filename for each undefined symbol - nm = buildconfig.substs.get('NM') or 'nm' - cmd = [nm, '-u', '-C', '-A', args.file] - lines = subprocess.check_output(cmd, universal_newlines=True, - stderr=subprocess.PIPE).split('\n') + nm = buildconfig.substs.get("NM") or "nm" + cmd = [nm, "-u", "-C", "-A", args.file] + lines = subprocess.check_output( + cmd, universal_newlines=True, stderr=subprocess.PIPE + ).split("\n") # alloc_fns contains all the vanilla allocation/free functions that we look # for. Regexp chars are escaped appropriately. alloc_fns = [ # Matches |operator new(unsigned T)|, where |T| is |int| or |long|. - r'operator new\(unsigned', - + r"operator new\(unsigned", # Matches |operator new[](unsigned T)|, where |T| is |int| or |long|. - r'operator new\[\]\(unsigned', - - r'memalign', + r"operator new\[\]\(unsigned", + r"memalign", # These three aren't available on all Linux configurations. # r'posix_memalign', # r'aligned_alloc', @@ -102,23 +103,17 @@ def main(): ] if args.aggressive: - alloc_fns += [ - r'malloc', - r'calloc', - r'realloc', - r'free', - r'strdup' - ] + alloc_fns += [r"malloc", r"calloc", r"realloc", r"free", r"strdup"] # This is like alloc_fns, but regexp chars are not escaped. - alloc_fns_unescaped = [fn.replace('\\', '') for fn in alloc_fns] + alloc_fns_unescaped = [fn.replace("\\", "") for fn in alloc_fns] # This regexp matches the relevant lines in the output of |nm|, which look # like the following. # # js/src/libjs_static.a:Utility.o: U malloc # - alloc_fns_re = r'([^:/ ]+):\s+U (' + r'|'.join(alloc_fns) + r')' + alloc_fns_re = r"([^:/ ]+):\s+U (" + r"|".join(alloc_fns) + r")" # This tracks which allocation/free functions have been seen in # util/Utility.cpp. @@ -136,7 +131,7 @@ def main(): # The stdc++compat library has an implicit call to operator new in # thread::_M_start_thread. - if 'stdc++compat' in filename: + if "stdc++compat" in filename: continue # The memory allocator code contains calls to memalign. These are ok, so @@ -158,15 +153,15 @@ def main(): # From intl/icu/source/common/umutex.h: # On Linux, the default constructor of std::condition_variable_any # produces an in-line reference to global operator new(), [...]. - if filename == 'umutex.o': + if filename == "umutex.o": continue # Ignore allocations from decimal conversion functions inside mozglue. - if filename == 'Decimal.o': + if filename == "Decimal.o": continue fn = m.group(2) - if filename == 'Utility.o': + if filename == "Utility.o": util_Utility_cpp.add(fn) else: # An allocation is present in a non-special file. Fail! @@ -184,45 +179,51 @@ def main(): # This should never happen, but check just in case. if util_Utility_cpp: - fail('unexpected allocation fns used in util/Utility.cpp: ' + - ', '.join(util_Utility_cpp)) + fail( + "unexpected allocation fns used in util/Utility.cpp: " + + ", ".join(util_Utility_cpp) + ) # If we found any improper references to allocation functions, try to use # DWARF debug info to get more accurate line number information about the # bad calls. This is a lot slower than 'nm -A', and it is not always # precise when building with --enable-optimized. if emit_line_info: - print('check_vanilla_allocations.py: Source lines with allocation calls:') - print('check_vanilla_allocations.py: Accurate in unoptimized builds; ' - 'util/Utility.cpp expected.') + print("check_vanilla_allocations.py: Source lines with allocation calls:") + print( + "check_vanilla_allocations.py: Accurate in unoptimized builds; " + "util/Utility.cpp expected." + ) # Run |nm|. Options: # -u: show only undefined symbols # -C: demangle symbol names # -l: show line number information for each undefined symbol - cmd = ['nm', '-u', '-C', '-l', args.file] - lines = subprocess.check_output(cmd, universal_newlines=True, - stderr=subprocess.PIPE).split('\n') + cmd = ["nm", "-u", "-C", "-l", args.file] + lines = subprocess.check_output( + cmd, universal_newlines=True, stderr=subprocess.PIPE + ).split("\n") # This regexp matches the relevant lines in the output of |nm -l|, # which look like the following. # # U malloc util/Utility.cpp:117 # - alloc_lines_re = r'U ((' + r'|'.join(alloc_fns) + r').*)\s+(\S+:\d+)$' + alloc_lines_re = r"U ((" + r"|".join(alloc_fns) + r").*)\s+(\S+:\d+)$" for line in lines: m = re.search(alloc_lines_re, line) if m: - print('check_vanilla_allocations.py:', - m.group(1), 'called at', m.group(3)) + print( + "check_vanilla_allocations.py:", m.group(1), "called at", m.group(3) + ) if has_failed: sys.exit(1) - print('TEST-PASS | check_vanilla_allocations.py | ok') + print("TEST-PASS | check_vanilla_allocations.py | ok") sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/config/create_rc.py b/config/create_rc.py index f3c366e2386ae6..973bb5b44e7762 100644 --- a/config/create_rc.py +++ b/config/create_rc.py @@ -11,7 +11,7 @@ import buildconfig -TEMPLATE = ''' +TEMPLATE = """ // This Source Code Form is subject to the terms of the Mozilla Public // License, v. 2.0. If a copy of the MPL was not distributed with this // file, You can obtain one at http://mozilla.org/MPL/2.0/. @@ -65,15 +65,15 @@ END END -''' +""" def preprocess(path, defines): - pp = Preprocessor(defines=defines, marker='%') + pp = Preprocessor(defines=defines, marker="%") pp.context.update(defines) pp.out = io.StringIO() - pp.do_filter('substitution') - pp.do_include(io.open(path, 'r', encoding='latin1')) + pp.do_filter("substitution") + pp.do_include(io.open(path, "r", encoding="latin1")) pp.out.seek(0) return pp.out @@ -81,23 +81,23 @@ def preprocess(path, defines): def parse_module_ver(path, defines): result = {} for line in preprocess(path, defines): - content, *comment = line.split('#', 1) + content, *comment = line.split("#", 1) if not content.strip(): continue - entry, value = content.split('=', 1) + entry, value = content.split("=", 1) result[entry.strip()] = value.strip() return result def get_buildid(): - path = os.path.join(buildconfig.topobjdir, 'buildid.h') - define, MOZ_BUILDID, buildid = io.open(path, 'r', encoding='utf-8').read().split() + path = os.path.join(buildconfig.topobjdir, "buildid.h") + define, MOZ_BUILDID, buildid = io.open(path, "r", encoding="utf-8").read().split() return buildid def days_from_2000_to_buildid(buildid): start = datetime(2000, 1, 1, 0, 0, 0) - buildid_time = datetime.strptime(buildid, '%Y%m%d%H%M%S') + buildid_time = datetime.strptime(buildid, "%Y%m%d%H%M%S") return (buildid_time - start).days @@ -105,11 +105,11 @@ def digits_only(s): for l in range(len(s), 0, -1): if s[:l].isdigit(): return s[:l] - return '0' + return "0" def split_and_normalize_version(version, len): - return ([digits_only(x) for x in version.split('.')] + ['0'] * len)[:len] + return ([digits_only(x) for x in version.split(".")] + ["0"] * len)[:len] def has_manifest(module_rc, manifest_id): @@ -118,41 +118,43 @@ def has_manifest(module_rc, manifest_id): if len(line) < 2: continue id, what, *rest = line - if id == manifest_id and what in ('24', 'RT_MANIFEST'): + if id == manifest_id and what in ("24", "RT_MANIFEST"): return True return False -def generate_module_rc(binary='', rcinclude=None): +def generate_module_rc(binary="", rcinclude=None): deps = set() buildid = get_buildid() - milestone = buildconfig.substs['GRE_MILESTONE'] - app_version = buildconfig.substs.get('MOZ_APP_VERSION') or milestone - app_winversion = ','.join(split_and_normalize_version(app_version, 4)) - milestone_winversion = ','.join(split_and_normalize_version(milestone, 3) + - [str(days_from_2000_to_buildid(buildid))]) - display_name = buildconfig.substs.get('MOZ_APP_DISPLAYNAME', 'Mozilla') + milestone = buildconfig.substs["GRE_MILESTONE"] + app_version = buildconfig.substs.get("MOZ_APP_VERSION") or milestone + app_winversion = ",".join(split_and_normalize_version(app_version, 4)) + milestone_winversion = ",".join( + split_and_normalize_version(milestone, 3) + + [str(days_from_2000_to_buildid(buildid))] + ) + display_name = buildconfig.substs.get("MOZ_APP_DISPLAYNAME", "Mozilla") milestone_string = milestone - flags = ['0'] - if buildconfig.substs.get('MOZ_DEBUG'): - flags.append('VS_FF_DEBUG') - milestone_string += ' Debug' - if not buildconfig.substs.get('MOZILLA_OFFICIAL'): - flags.append('VS_FF_PRIVATEBUILD') - if buildconfig.substs.get('NIGHTLY_BUILD'): - flags.append('VS_FF_PRERELEASE') + flags = ["0"] + if buildconfig.substs.get("MOZ_DEBUG"): + flags.append("VS_FF_DEBUG") + milestone_string += " Debug" + if not buildconfig.substs.get("MOZILLA_OFFICIAL"): + flags.append("VS_FF_PRIVATEBUILD") + if buildconfig.substs.get("NIGHTLY_BUILD"): + flags.append("VS_FF_PRERELEASE") defines = { - 'MOZ_APP_DISPLAYNAME': display_name, - 'MOZ_APP_VERSION': app_version, - 'MOZ_APP_WINVERSION': app_winversion, + "MOZ_APP_DISPLAYNAME": display_name, + "MOZ_APP_VERSION": app_version, + "MOZ_APP_WINVERSION": app_winversion, } - relobjdir = os.path.relpath('.', buildconfig.topobjdir) + relobjdir = os.path.relpath(".", buildconfig.topobjdir) srcdir = os.path.join(buildconfig.topsrcdir, relobjdir) - module_ver = os.path.join(srcdir, 'module.ver') + module_ver = os.path.join(srcdir, "module.ver") if os.path.exists(module_ver): deps.add(module_ver) overrides = parse_module_ver(module_ver, defines) @@ -160,39 +162,42 @@ def generate_module_rc(binary='', rcinclude=None): overrides = {} if rcinclude: - include = '// From included resource {}\n{}'.format( - rcinclude, preprocess(rcinclude, defines).read()) + include = "// From included resource {}\n{}".format( + rcinclude, preprocess(rcinclude, defines).read() + ) else: - include = '' + include = "" data = TEMPLATE.format( include=include, - fileversion=overrides.get('WIN32_MODULE_FILEVERSION', milestone_winversion), - productversion=overrides.get('WIN32_MODULE_PRODUCTVERSION', milestone_winversion), - fileflags=' | '.join(flags), - comment=overrides.get('WIN32_MODULE_COMMENT', ''), - copyright=overrides.get('WIN32_MODULE_COPYRIGHT', 'License: MPL 2'), - company=overrides.get('WIN32_MODULE_COMPANYNAME', 'Mozilla Foundation'), - description=overrides.get('WIN32_MODULE_DESCRIPTION', ''), - mfversion=overrides.get('WIN32_MODULE_FILEVERSION_STRING', milestone_string), - mpversion=overrides.get('WIN32_MODULE_PRODUCTVERSION_STRING', milestone_string), - module=overrides.get('WIN32_MODULE_NAME', ''), - trademarks=overrides.get('WIN32_MODULE_TRADEMARKS', 'Mozilla'), - binary=overrides.get('WIN32_MODULE_ORIGINAL_FILENAME', binary), - productname=overrides.get('WIN32_MODULE_PRODUCTNAME', display_name), + fileversion=overrides.get("WIN32_MODULE_FILEVERSION", milestone_winversion), + productversion=overrides.get( + "WIN32_MODULE_PRODUCTVERSION", milestone_winversion + ), + fileflags=" | ".join(flags), + comment=overrides.get("WIN32_MODULE_COMMENT", ""), + copyright=overrides.get("WIN32_MODULE_COPYRIGHT", "License: MPL 2"), + company=overrides.get("WIN32_MODULE_COMPANYNAME", "Mozilla Foundation"), + description=overrides.get("WIN32_MODULE_DESCRIPTION", ""), + mfversion=overrides.get("WIN32_MODULE_FILEVERSION_STRING", milestone_string), + mpversion=overrides.get("WIN32_MODULE_PRODUCTVERSION_STRING", milestone_string), + module=overrides.get("WIN32_MODULE_NAME", ""), + trademarks=overrides.get("WIN32_MODULE_TRADEMARKS", "Mozilla"), + binary=overrides.get("WIN32_MODULE_ORIGINAL_FILENAME", binary), + productname=overrides.get("WIN32_MODULE_PRODUCTNAME", display_name), buildid=buildid, ) - manifest_id = '2' if binary.lower().endswith('.dll') else '1' + manifest_id = "2" if binary.lower().endswith(".dll") else "1" if binary and not has_manifest(data, manifest_id): - manifest_path = os.path.join(srcdir, binary + '.manifest') + manifest_path = os.path.join(srcdir, binary + ".manifest") if os.path.exists(manifest_path): - manifest_path = manifest_path.replace('\\', '\\\\') + manifest_path = manifest_path.replace("\\", "\\\\") data += '\n{} RT_MANIFEST "{}"\n'.format(manifest_id, manifest_path) - with io.open('{}.rc'.format(binary or 'module'), 'w', encoding='latin1') as fh: + with io.open("{}.rc".format(binary or "module"), "w", encoding="latin1") as fh: fh.write(data) -if __name__ == '__main__': +if __name__ == "__main__": generate_module_rc(*sys.argv[1:]) diff --git a/config/create_res.py b/config/create_res.py index bfa69aafbcf650..5588fa0a4900dc 100644 --- a/config/create_res.py +++ b/config/create_res.py @@ -18,27 +18,31 @@ class CPPFlag(Action): all_flags = [] def __call__(self, parser, namespace, values, option_string=None): - if 'windres' in buildconfig.substs['RC'].lower(): - if option_string == '-U': + if "windres" in buildconfig.substs["RC"].lower(): + if option_string == "-U": return - if option_string == '-I': - option_string = '--include-dir' + if option_string == "-I": + option_string = "--include-dir" self.all_flags.extend((option_string, values)) def generate_res(): parser = ArgumentParser() - parser.add_argument('-D', action=CPPFlag, metavar='VAR[=VAL]', help='Define a variable') - parser.add_argument('-U', action=CPPFlag, metavar='VAR', help='Undefine a variable') - parser.add_argument('-I', action=CPPFlag, metavar='DIR', help='Search path for includes') - parser.add_argument('-o', dest='output', metavar='OUTPUT', help='Output file') - parser.add_argument('input', help='Input file') + parser.add_argument( + "-D", action=CPPFlag, metavar="VAR[=VAL]", help="Define a variable" + ) + parser.add_argument("-U", action=CPPFlag, metavar="VAR", help="Undefine a variable") + parser.add_argument( + "-I", action=CPPFlag, metavar="DIR", help="Search path for includes" + ) + parser.add_argument("-o", dest="output", metavar="OUTPUT", help="Output file") + parser.add_argument("input", help="Input file") args = parser.parse_args() - is_windres = 'windres' in buildconfig.substs['RC'].lower() + is_windres = "windres" in buildconfig.substs["RC"].lower() - verbose = os.environ.get('BUILD_VERBOSE_LOG') + verbose = os.environ.get("BUILD_VERBOSE_LOG") # llvm-rc doesn't preprocess on its own, so preprocess manually # Theoretically, not windres could be rc.exe, but configure won't use it @@ -46,12 +50,12 @@ def generate_res(): # output. try: if not is_windres: - fd, path = tempfile.mkstemp(suffix='.rc') - command = buildconfig.substs['CXXCPP'] + CPPFlag.all_flags - command.extend(('-DRC_INVOKED', args.input)) + fd, path = tempfile.mkstemp(suffix=".rc") + command = buildconfig.substs["CXXCPP"] + CPPFlag.all_flags + command.extend(("-DRC_INVOKED", args.input)) if verbose: - print('Executing:', ' '.join(command)) - with os.fdopen(fd, 'wb') as fh: + print("Executing:", " ".join(command)) + with os.fdopen(fd, "wb") as fh: retcode = subprocess.run(command, stdout=fh).returncode if retcode: # Rely on the subprocess printing out any relevant error @@ -59,9 +63,9 @@ def generate_res(): else: path = args.input - command = [buildconfig.substs['RC']] + command = [buildconfig.substs["RC"]] if is_windres: - command.extend(('-O', 'coff')) + command.extend(("-O", "coff")) # Even though llvm-rc doesn't preprocess, we still need to pass at least # the -I flags. @@ -69,15 +73,15 @@ def generate_res(): if args.output: if is_windres: - command.extend(('-o', args.output)) + command.extend(("-o", args.output)) else: # Use win1252 code page for the input. - command.extend(('-c', '1252', '-Fo' + args.output)) + command.extend(("-c", "1252", "-Fo" + args.output)) command.append(path) if verbose: - print('Executing:', ' '.join(command)) + print("Executing:", " ".join(command)) retcode = subprocess.run(command).returncode if retcode: # Rely on the subprocess printing out any relevant error @@ -89,5 +93,5 @@ def generate_res(): return 0 -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(generate_res()) diff --git a/config/createprecomplete.py b/config/createprecomplete.py index fadd796aa21f40..dda4efcdf8e194 100644 --- a/config/createprecomplete.py +++ b/config/createprecomplete.py @@ -14,25 +14,27 @@ def get_build_entries(root_path): - """ Iterates through the root_path, creating a list for each file and - directory. Excludes any file paths ending with channel-prefs.js. + """Iterates through the root_path, creating a list for each file and + directory. Excludes any file paths ending with channel-prefs.js. """ rel_file_path_set = set() rel_dir_path_set = set() for root, dirs, files in os.walk(root_path): for file_name in files: - parent_dir_rel_path = root[len(root_path)+1:] + parent_dir_rel_path = root[len(root_path) + 1 :] rel_path_file = os.path.join(parent_dir_rel_path, file_name) rel_path_file = rel_path_file.replace("\\", "/") - if not (rel_path_file.endswith("channel-prefs.js") or - rel_path_file.endswith("update-settings.ini") or - rel_path_file.find("distribution/") != -1): + if not ( + rel_path_file.endswith("channel-prefs.js") + or rel_path_file.endswith("update-settings.ini") + or rel_path_file.find("distribution/") != -1 + ): rel_file_path_set.add(rel_path_file) for dir_name in dirs: - parent_dir_rel_path = root[len(root_path)+1:] + parent_dir_rel_path = root[len(root_path) + 1 :] rel_path_dir = os.path.join(parent_dir_rel_path, dir_name) - rel_path_dir = rel_path_dir.replace("\\", "/")+"/" + rel_path_dir = rel_path_dir.replace("\\", "/") + "/" if rel_path_dir.find("distribution/") == -1: rel_dir_path_set.add(rel_path_dir) @@ -45,26 +47,26 @@ def get_build_entries(root_path): def generate_precomplete(root_path): - """ Creates the precomplete file containing the remove and rmdir - application update instructions. The given directory is used - for the location to enumerate and to create the precomplete file. + """Creates the precomplete file containing the remove and rmdir + application update instructions. The given directory is used + for the location to enumerate and to create the precomplete file. """ rel_path_precomplete = "precomplete" # If inside a Mac bundle use the root of the bundle for the path. if os.path.basename(root_path) == "Resources": - root_path = os.path.abspath(os.path.join(root_path, '../../')) + root_path = os.path.abspath(os.path.join(root_path, "../../")) rel_path_precomplete = "Contents/Resources/precomplete" precomplete_file_path = os.path.join(root_path, rel_path_precomplete) # Open the file so it exists before building the list of files and open it # in binary mode to prevent OS specific line endings. - precomplete_file = io.open(precomplete_file_path, mode="wt", newline='\n') + precomplete_file = io.open(precomplete_file_path, mode="wt", newline="\n") rel_file_path_list, rel_dir_path_list = get_build_entries(root_path) for rel_file_path in rel_file_path_list: - precomplete_file.write("remove \""+rel_file_path+"\"\n") + precomplete_file.write('remove "' + rel_file_path + '"\n') for rel_dir_path in rel_dir_path_list: - precomplete_file.write("rmdir \""+rel_dir_path+"\"\n") + precomplete_file.write('rmdir "' + rel_dir_path + '"\n') precomplete_file.close() diff --git a/config/external/fdlibm/moz.build b/config/external/fdlibm/moz.build index 51caca5328d73a..343645f0ce8f91 100644 --- a/config/external/fdlibm/moz.build +++ b/config/external/fdlibm/moz.build @@ -4,9 +4,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'JavaScript Engine') +with Files("**"): + BUG_COMPONENT = ("Core", "JavaScript Engine") DIRS += [ - '../../../modules/fdlibm', + "../../../modules/fdlibm", ] diff --git a/config/external/ffi/moz.build b/config/external/ffi/moz.build index 6d3ab38e802536..682dc9933de26c 100644 --- a/config/external/ffi/moz.build +++ b/config/external/ffi/moz.build @@ -4,133 +4,148 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" -if CONFIG['MOZ_SYSTEM_FFI']: - OS_LIBS += CONFIG['MOZ_FFI_LIBS'] +if CONFIG["MOZ_SYSTEM_FFI"]: + OS_LIBS += CONFIG["MOZ_FFI_LIBS"] else: AllowCompilerWarnings() NoVisibilityFlags() CONFIGURE_DEFINE_FILES += [ - '/js/src/ctypes/libffi/fficonfig.h', + "/js/src/ctypes/libffi/fficonfig.h", ] LOCAL_INCLUDES += [ - '!/js/src/ctypes/libffi', - '!/js/src/ctypes/libffi/include', - '/js/src/ctypes/libffi/include', - '/js/src/ctypes/libffi/src/%s' % CONFIG['FFI_TARGET_DIR'], + "!/js/src/ctypes/libffi", + "!/js/src/ctypes/libffi/include", + "/js/src/ctypes/libffi/include", + "/js/src/ctypes/libffi/src/%s" % CONFIG["FFI_TARGET_DIR"], ] - DEFINES['TARGET'] = CONFIG['FFI_TARGET'] - DEFINES[CONFIG['FFI_TARGET']] = True - DEFINES['FFI_NO_RAW_API'] = True - DEFINES['FFI_BUILDING'] = True - DEFINES['HAVE_AS_ASCII_PSEUDO_OP'] = True - DEFINES['HAVE_AS_STRING_PSEUDO_OP'] = True - - if CONFIG['MOZ_DEBUG']: - DEFINES['FFI_DEBUG'] = True - if not CONFIG['MOZ_NO_DEBUG_RTL']: - DEFINES['USE_DEBUG_RTL'] = True + DEFINES["TARGET"] = CONFIG["FFI_TARGET"] + DEFINES[CONFIG["FFI_TARGET"]] = True + DEFINES["FFI_NO_RAW_API"] = True + DEFINES["FFI_BUILDING"] = True + DEFINES["HAVE_AS_ASCII_PSEUDO_OP"] = True + DEFINES["HAVE_AS_STRING_PSEUDO_OP"] = True + + if CONFIG["MOZ_DEBUG"]: + DEFINES["FFI_DEBUG"] = True + if not CONFIG["MOZ_NO_DEBUG_RTL"]: + DEFINES["USE_DEBUG_RTL"] = True SOURCES += [ - '/js/src/ctypes/libffi/src/debug.c', + "/js/src/ctypes/libffi/src/debug.c", ] - if CONFIG['OS_TARGET'] != 'WINNT': - DEFINES['HAVE_HIDDEN_VISIBILITY_ATTRIBUTE'] = True + if CONFIG["OS_TARGET"] != "WINNT": + DEFINES["HAVE_HIDDEN_VISIBILITY_ATTRIBUTE"] = True - if CONFIG['INTEL_ARCHITECTURE']: - DEFINES['HAVE_AS_X86_PCREL'] = True + if CONFIG["INTEL_ARCHITECTURE"]: + DEFINES["HAVE_AS_X86_PCREL"] = True # Don't bother setting EH_FRAME_FLAGS on Windows. # Quoted defines confuse msvcc.sh, and the value isn't used there. - if CONFIG['OS_TARGET'] != 'WINNT': - if CONFIG['FFI_TARGET'] == 'ARM': - DEFINES['EH_FRAME_FLAGS'] = '"aw"' + if CONFIG["OS_TARGET"] != "WINNT": + if CONFIG["FFI_TARGET"] == "ARM": + DEFINES["EH_FRAME_FLAGS"] = '"aw"' else: - DEFINES['EH_FRAME_FLAGS'] = '"a"' + DEFINES["EH_FRAME_FLAGS"] = '"a"' # Common source files. SOURCES += [ - '/js/src/ctypes/libffi/src/closures.c', - '/js/src/ctypes/libffi/src/java_raw_api.c', - '/js/src/ctypes/libffi/src/prep_cif.c', - '/js/src/ctypes/libffi/src/raw_api.c', - '/js/src/ctypes/libffi/src/types.c', + "/js/src/ctypes/libffi/src/closures.c", + "/js/src/ctypes/libffi/src/java_raw_api.c", + "/js/src/ctypes/libffi/src/prep_cif.c", + "/js/src/ctypes/libffi/src/raw_api.c", + "/js/src/ctypes/libffi/src/types.c", ] # Per-platform sources and flags. ffi_srcs = () ffi_h_defines = [] - if CONFIG['FFI_TARGET'] == 'X86_WIN64': - if CONFIG['CC_TYPE'] == 'clang-cl': - ffi_srcs = ('ffiw64.c',) + if CONFIG["FFI_TARGET"] == "X86_WIN64": + if CONFIG["CC_TYPE"] == "clang-cl": + ffi_srcs = ("ffiw64.c",) # libffi asm needs to be preprocessed for MSVC's assembler - GeneratedFile('win64_intel.asm', inputs=[ - '/js/src/ctypes/libffi/src/x86/win64_intel.S', - '!/js/src/ctypes/libffi/fficonfig.h', - '!/js/src/ctypes/libffi/include/ffi.h', - ], script='preprocess_libffi_asm.py', - flags=['$(DEFINES)', '$(LOCAL_INCLUDES)']) - SOURCES += ['!win64_intel.asm'] + GeneratedFile( + "win64_intel.asm", + inputs=[ + "/js/src/ctypes/libffi/src/x86/win64_intel.S", + "!/js/src/ctypes/libffi/fficonfig.h", + "!/js/src/ctypes/libffi/include/ffi.h", + ], + script="preprocess_libffi_asm.py", + flags=["$(DEFINES)", "$(LOCAL_INCLUDES)"], + ) + SOURCES += ["!win64_intel.asm"] else: - ffi_srcs = ('ffiw64.c', 'win64.S') + ffi_srcs = ("ffiw64.c", "win64.S") - elif CONFIG['FFI_TARGET'] == 'X86_64': - DEFINES['HAVE_AS_X86_64_UNWIND_SECTION_TYPE'] = True - ffi_srcs = ('ffi64.c', 'unix64.S', 'ffiw64.c', 'win64.S') + elif CONFIG["FFI_TARGET"] == "X86_64": + DEFINES["HAVE_AS_X86_64_UNWIND_SECTION_TYPE"] = True + ffi_srcs = ("ffi64.c", "unix64.S", "ffiw64.c", "win64.S") - elif CONFIG['FFI_TARGET'] == 'X86_WIN32' and CONFIG['CC_TYPE'] == 'clang-cl': - ffi_srcs = ('ffi.c',) + elif CONFIG["FFI_TARGET"] == "X86_WIN32" and CONFIG["CC_TYPE"] == "clang-cl": + ffi_srcs = ("ffi.c",) # libffi asm needs to be preprocessed for MSVC's assembler - GeneratedFile('sysv_intel.asm', inputs=[ - '/js/src/ctypes/libffi/src/x86/sysv_intel.S', - '!/js/src/ctypes/libffi/fficonfig.h', - '!/js/src/ctypes/libffi/include/ffi.h', - ], script='preprocess_libffi_asm.py', - flags=['$(DEFINES)', '$(LOCAL_INCLUDES)']) - SOURCES += ['!sysv_intel.asm'] - ASFLAGS += ['-safeseh'] - - elif CONFIG['FFI_TARGET'] == 'ARM_WIN64': - ffi_srcs = ('ffi.c',) + GeneratedFile( + "sysv_intel.asm", + inputs=[ + "/js/src/ctypes/libffi/src/x86/sysv_intel.S", + "!/js/src/ctypes/libffi/fficonfig.h", + "!/js/src/ctypes/libffi/include/ffi.h", + ], + script="preprocess_libffi_asm.py", + flags=["$(DEFINES)", "$(LOCAL_INCLUDES)"], + ) + SOURCES += ["!sysv_intel.asm"] + ASFLAGS += ["-safeseh"] + + elif CONFIG["FFI_TARGET"] == "ARM_WIN64": + ffi_srcs = ("ffi.c",) # libffi asm needs to be preprocessed for MSVC's assembler GeneratedFile( - 'win64_armasm.asm', + "win64_armasm.asm", inputs=[ - '/js/src/ctypes/libffi/src/aarch64/win64_armasm.S', - '!/js/src/ctypes/libffi/fficonfig.h', - '!/js/src/ctypes/libffi/include/ffi.h', + "/js/src/ctypes/libffi/src/aarch64/win64_armasm.S", + "!/js/src/ctypes/libffi/fficonfig.h", + "!/js/src/ctypes/libffi/include/ffi.h", ], - script='preprocess_libffi_asm.py', - flags=['$(DEFINES)', '$(LOCAL_INCLUDES)']) - SOURCES += ['!win64_armasm.asm'] + script="preprocess_libffi_asm.py", + flags=["$(DEFINES)", "$(LOCAL_INCLUDES)"], + ) + SOURCES += ["!win64_armasm.asm"] else: - ffi_srcs = ('ffi.c', 'sysv.S') - if CONFIG['FFI_TARGET'] == 'ARM' and CONFIG['CC_TYPE'] == 'clang': - ASFLAGS += ['-no-integrated-as'] - - if CONFIG['FFI_TARGET'] in ('X86_WIN32', 'X86_DARWIN') and \ - CONFIG['CC_TYPE'] in ('gcc', 'clang'): - DEFINES['SYMBOL_UNDERSCORE'] = True - - if CONFIG['OS_ARCH'] == 'Darwin' and CONFIG['CPU_ARCH'] in ('arm', 'aarch64'): - DEFINES['FFI_EXEC_TRAMPOLINE_TABLE'] = True - ffi_h_defines.append('FFI_EXEC_TRAMPOLINE_TABLE') - - elif CONFIG['OS_ARCH'] in ('Darwin', 'FreeBSD', 'GNU_kFreeBSD', 'OpenBSD', 'SunOS') or \ - CONFIG['OS_TARGET'] == 'Android': - DEFINES['FFI_MMAP_EXEC_WRIT'] = True + ffi_srcs = ("ffi.c", "sysv.S") + if CONFIG["FFI_TARGET"] == "ARM" and CONFIG["CC_TYPE"] == "clang": + ASFLAGS += ["-no-integrated-as"] + + if CONFIG["FFI_TARGET"] in ("X86_WIN32", "X86_DARWIN") and CONFIG["CC_TYPE"] in ( + "gcc", + "clang", + ): + DEFINES["SYMBOL_UNDERSCORE"] = True + + if CONFIG["OS_ARCH"] == "Darwin" and CONFIG["CPU_ARCH"] in ("arm", "aarch64"): + DEFINES["FFI_EXEC_TRAMPOLINE_TABLE"] = True + ffi_h_defines.append("FFI_EXEC_TRAMPOLINE_TABLE") + + elif ( + CONFIG["OS_ARCH"] in ("Darwin", "FreeBSD", "GNU_kFreeBSD", "OpenBSD", "SunOS") + or CONFIG["OS_TARGET"] == "Android" + ): + DEFINES["FFI_MMAP_EXEC_WRIT"] = True SOURCES += [ - '/js/src/ctypes/libffi/src/%s/%s' % (CONFIG['FFI_TARGET_DIR'], s) + "/js/src/ctypes/libffi/src/%s/%s" % (CONFIG["FFI_TARGET_DIR"], s) for s in sorted(ffi_srcs) ] - GeneratedFile('/js/src/ctypes/libffi/include/ffi.h', - script='subst_header.py', - inputs=['/js/src/ctypes/libffi/include/ffi.h.in'], - flags=ffi_h_defines) + GeneratedFile( + "/js/src/ctypes/libffi/include/ffi.h", + script="subst_header.py", + inputs=["/js/src/ctypes/libffi/include/ffi.h.in"], + flags=ffi_h_defines, + ) diff --git a/config/external/ffi/preprocess_libffi_asm.py b/config/external/ffi/preprocess_libffi_asm.py index 81ce51d6fa2fa9..69e65be5ea89e9 100644 --- a/config/external/ffi/preprocess_libffi_asm.py +++ b/config/external/ffi/preprocess_libffi_asm.py @@ -18,9 +18,9 @@ def main(output, input_asm, ffi_h, ffi_config_h, defines, includes): includes = shlex.split(includes) # CPP uses -E which generates #line directives. -EP suppresses them. # -TC forces the compiler to treat the input as C. - cpp = buildconfig.substs['CPP'] + ['-EP'] + ['-TC'] + cpp = buildconfig.substs["CPP"] + ["-EP"] + ["-TC"] input_asm = mozpath.relpath(input_asm, os.getcwd()) args = cpp + defines + includes + [input_asm] - print(' '.join(args)) + print(" ".join(args)) preprocessed = subprocess.check_output(args) output.write(preprocessed) diff --git a/config/external/ffi/subst_header.py b/config/external/ffi/subst_header.py index da48e2fca25545..7614cfd92fd5f5 100644 --- a/config/external/ffi/subst_header.py +++ b/config/external/ffi/subst_header.py @@ -12,19 +12,21 @@ def main(output, input_file, *defines): pp = Preprocessor() - pp.context.update({ - 'FFI_EXEC_TRAMPOLINE_TABLE': '0', - 'HAVE_LONG_DOUBLE': '0', - 'TARGET': buildconfig.substs['FFI_TARGET'], - 'VERSION': '', - }) + pp.context.update( + { + "FFI_EXEC_TRAMPOLINE_TABLE": "0", + "HAVE_LONG_DOUBLE": "0", + "TARGET": buildconfig.substs["FFI_TARGET"], + "VERSION": "", + } + ) for d in defines: - pp.context.update({d: '1'}) - pp.do_filter('substitution') + pp.context.update({d: "1"}) + pp.do_filter("substitution") pp.setMarker(None) pp.out = output pp.do_include(input_file) -if __name__ == '__main__': +if __name__ == "__main__": main(*sys.agv[1:]) diff --git a/config/external/freetype2/moz.build b/config/external/freetype2/moz.build index 1c13f39fbf113f..3d025e1ee4c645 100644 --- a/config/external/freetype2/moz.build +++ b/config/external/freetype2/moz.build @@ -4,11 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('freetype') +Library("freetype") -if CONFIG['MOZ_TREE_FREETYPE']: +if CONFIG["MOZ_TREE_FREETYPE"]: DIRS += [ - '/modules/freetype2', + "/modules/freetype2", ] else: - OS_LIBS += CONFIG['FT2_LIBS'] + OS_LIBS += CONFIG["FT2_LIBS"] diff --git a/config/external/icu/common/moz.build b/config/external/icu/common/moz.build index 6b1e02c20336ce..ce1b0d05748b42 100644 --- a/config/external/icu/common/moz.build +++ b/config/external/icu/common/moz.build @@ -4,30 +4,30 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('icuuc') -FINAL_LIBRARY = 'icu' +Library("icuuc") +FINAL_LIBRARY = "icu" -DEFINES['U_COMMON_IMPLEMENTATION'] = True +DEFINES["U_COMMON_IMPLEMENTATION"] = True -LOCAL_INCLUDES += ['/intl/icu/source/i18n'] +LOCAL_INCLUDES += ["/intl/icu/source/i18n"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): +if CONFIG["CC_TYPE"] in ("clang", "gcc"): CXXFLAGS += [ - '-Wno-deprecated-declarations', - '-Wno-type-limits', - '-Wno-unused-but-set-variable', - '-Wno-unused-function', - '-Wno-sign-compare', - '-Wno-maybe-uninitialized', + "-Wno-deprecated-declarations", + "-Wno-type-limits", + "-Wno-unused-but-set-variable", + "-Wno-unused-function", + "-Wno-sign-compare", + "-Wno-maybe-uninitialized", ] -include('../defs.mozbuild') -include('sources.mozbuild') +include("../defs.mozbuild") +include("sources.mozbuild") SOURCES += sources -if CONFIG['TARGET_ENDIANNESS'] == 'big': - HostLibrary('host_icuuc') - HOST_DEFINES['U_COMMON_IMPLEMENTATION'] = True +if CONFIG["TARGET_ENDIANNESS"] == "big": + HostLibrary("host_icuuc") + HOST_DEFINES["U_COMMON_IMPLEMENTATION"] = True HOST_SOURCES += sources HOST_SOURCES += other_sources diff --git a/config/external/icu/data/convert_icudata.py b/config/external/icu/data/convert_icudata.py index c6cb2bdfc565ca..c8c18f9e0196b2 100644 --- a/config/external/icu/data/convert_icudata.py +++ b/config/external/icu/data/convert_icudata.py @@ -10,9 +10,11 @@ def main(output, data_file): output.close() - subprocess.run([ - os.path.join(buildconfig.topobjdir, 'dist', 'host', 'bin', 'icupkg'), - '-tb', - data_file, - output.name, - ]) + subprocess.run( + [ + os.path.join(buildconfig.topobjdir, "dist", "host", "bin", "icupkg"), + "-tb", + data_file, + output.name, + ] + ) diff --git a/config/external/icu/data/moz.build b/config/external/icu/data/moz.build index 9033631336a355..a2b7cbe8bbcaf8 100644 --- a/config/external/icu/data/moz.build +++ b/config/external/icu/data/moz.build @@ -6,33 +6,35 @@ # Build a library containing the ICU data for use in the JS shell, so that # JSAPI consumers don't have to deal with setting ICU's data path. -Library('icudata') +Library("icudata") -LOCAL_INCLUDES += ['.'] +LOCAL_INCLUDES += ["."] -prefix = '' -if (CONFIG['OS_ARCH'] == 'WINNT' and CONFIG['CPU_ARCH'] == 'x86') or CONFIG['OS_ARCH'] == 'Darwin': - prefix = '_' +prefix = "" +if (CONFIG["OS_ARCH"] == "WINNT" and CONFIG["CPU_ARCH"] == "x86") or CONFIG[ + "OS_ARCH" +] == "Darwin": + prefix = "_" data_file = { - 'little': 'icudt%sl.dat' % CONFIG['MOZ_ICU_VERSION'], - 'big': 'icudt%sb.dat' % CONFIG['MOZ_ICU_VERSION'], + "little": "icudt%sl.dat" % CONFIG["MOZ_ICU_VERSION"], + "big": "icudt%sb.dat" % CONFIG["MOZ_ICU_VERSION"], } data_dir = { - 'little': SRCDIR, - 'big': OBJDIR, + "little": SRCDIR, + "big": OBJDIR, } -endianness = CONFIG.get('TARGET_ENDIANNESS', 'little') -DEFINES['ICU_DATA_FILE'] = '"%s/%s"' % (data_dir[endianness], data_file[endianness]) -DEFINES['ICU_DATA_SYMBOL'] = '%sicudt%s_dat' % (prefix, CONFIG['MOZ_ICU_VERSION']) +endianness = CONFIG.get("TARGET_ENDIANNESS", "little") +DEFINES["ICU_DATA_FILE"] = '"%s/%s"' % (data_dir[endianness], data_file[endianness]) +DEFINES["ICU_DATA_SYMBOL"] = "%sicudt%s_dat" % (prefix, CONFIG["MOZ_ICU_VERSION"]) SOURCES += [ - 'icu_data.S', + "icu_data.S", ] -if CONFIG['OS_ARCH'] == 'WINNT' and CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["OS_ARCH"] == "WINNT" and CONFIG["CC_TYPE"] == "clang-cl": USE_INTEGRATED_CLANGCL_AS = True -if CONFIG['TARGET_ENDIANNESS'] == 'big': - GeneratedFile(data_file['big'], - script='convert_icudata.py', - inputs=[data_file['little']]) +if CONFIG["TARGET_ENDIANNESS"] == "big": + GeneratedFile( + data_file["big"], script="convert_icudata.py", inputs=[data_file["little"]] + ) diff --git a/config/external/icu/i18n/moz.build b/config/external/icu/i18n/moz.build index b530e2399b7289..07adbcc7a118a2 100644 --- a/config/external/icu/i18n/moz.build +++ b/config/external/icu/i18n/moz.build @@ -4,20 +4,20 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('icui18n') -FINAL_LIBRARY = 'icu' +Library("icui18n") +FINAL_LIBRARY = "icu" -DEFINES['U_I18N_IMPLEMENTATION'] = True +DEFINES["U_I18N_IMPLEMENTATION"] = True -LOCAL_INCLUDES += ['/intl/icu/source/common'] +LOCAL_INCLUDES += ["/intl/icu/source/common"] -include('../defs.mozbuild') -include('sources.mozbuild') +include("../defs.mozbuild") +include("sources.mozbuild") SOURCES += sources -if CONFIG['TARGET_ENDIANNESS'] == 'big': - HostLibrary('host_icui18n') - HOST_DEFINES['U_I18N_IMPLEMENTATION'] = True +if CONFIG["TARGET_ENDIANNESS"] == "big": + HostLibrary("host_icui18n") + HOST_DEFINES["U_I18N_IMPLEMENTATION"] = True HOST_SOURCES += sources HOST_SOURCES += other_sources diff --git a/config/external/icu/icupkg/moz.build b/config/external/icu/icupkg/moz.build index 72cf9c33d91050..39314fbda79116 100644 --- a/config/external/icu/icupkg/moz.build +++ b/config/external/icu/icupkg/moz.build @@ -4,24 +4,24 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -HostProgram('icupkg') +HostProgram("icupkg") LOCAL_INCLUDES += [ - '/intl/icu/source/common', - '/intl/icu/source/i18n', - '/intl/icu/source/tools/toolutil', + "/intl/icu/source/common", + "/intl/icu/source/i18n", + "/intl/icu/source/tools/toolutil", ] -include('../defs.mozbuild') -include('sources.mozbuild') +include("../defs.mozbuild") +include("sources.mozbuild") HOST_SOURCES += sources HOST_SOURCES += [ - '/intl/icu/source/stubdata/stubdata.cpp', + "/intl/icu/source/stubdata/stubdata.cpp", ] HOST_USE_LIBS += [ - 'host_icui18n', - 'host_icutoolutil', - 'host_icuuc', + "host_icui18n", + "host_icutoolutil", + "host_icuuc", ] diff --git a/config/external/icu/moz.build b/config/external/icu/moz.build index ee203e8cdb434b..296d60d521f775 100644 --- a/config/external/icu/moz.build +++ b/config/external/icu/moz.build @@ -4,19 +4,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('icu') +Library("icu") -if CONFIG['MOZ_SYSTEM_ICU']: - OS_LIBS += CONFIG['MOZ_ICU_LIBS'] +if CONFIG["MOZ_SYSTEM_ICU"]: + OS_LIBS += CONFIG["MOZ_ICU_LIBS"] else: DIRS += [ - 'common', - 'data', - 'i18n', + "common", + "data", + "i18n", ] - if CONFIG['TARGET_ENDIANNESS'] == 'big': + if CONFIG["TARGET_ENDIANNESS"] == "big": DIRS += [ - 'toolutil', - 'icupkg', + "toolutil", + "icupkg", ] - USE_LIBS += ['icudata'] + USE_LIBS += ["icudata"] diff --git a/config/external/icu/toolutil/moz.build b/config/external/icu/toolutil/moz.build index 7de97db9e100e0..e8dc827c2bbeec 100644 --- a/config/external/icu/toolutil/moz.build +++ b/config/external/icu/toolutil/moz.build @@ -4,17 +4,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -HostLibrary('host_icutoolutil') -FINAL_LIBRARY = 'icu' +HostLibrary("host_icutoolutil") +FINAL_LIBRARY = "icu" -DEFINES['U_TOOLUTIL_IMPLEMENTATION'] = True +DEFINES["U_TOOLUTIL_IMPLEMENTATION"] = True LOCAL_INCLUDES += [ - '/intl/icu/source/common', - '/intl/icu/source/i18n', + "/intl/icu/source/common", + "/intl/icu/source/i18n", ] -include('../defs.mozbuild') -include('sources.mozbuild') +include("../defs.mozbuild") +include("sources.mozbuild") HOST_SOURCES += sources diff --git a/config/external/lgpllibs/moz.build b/config/external/lgpllibs/moz.build index a6d616c7bbfde3..d7c38ac630e95c 100644 --- a/config/external/lgpllibs/moz.build +++ b/config/external/lgpllibs/moz.build @@ -10,5 +10,5 @@ # # Any library added here should also be reflected in the about:license page. -GeckoSharedLibrary('lgpllibs', linkage=None) -SHARED_LIBRARY_NAME = 'lgpllibs' +GeckoSharedLibrary("lgpllibs", linkage=None) +SHARED_LIBRARY_NAME = "lgpllibs" diff --git a/config/external/moz.build b/config/external/moz.build index 70aa33930812c2..4e9888f36503e5 100644 --- a/config/external/moz.build +++ b/config/external/moz.build @@ -7,65 +7,65 @@ external_dirs = [] DIRS += [ - 'lgpllibs', - 'rlbox', - 'rlbox_lucet_sandbox', - 'sqlite', + "lgpllibs", + "rlbox", + "rlbox_lucet_sandbox", + "sqlite", ] -if not CONFIG['MOZ_SYSTEM_JPEG']: - external_dirs += ['media/libjpeg'] +if not CONFIG["MOZ_SYSTEM_JPEG"]: + external_dirs += ["media/libjpeg"] DIRS += [ - '/third_party/prio', - '/third_party/msgpack', - '/third_party/sipcc', + "/third_party/prio", + "/third_party/msgpack", + "/third_party/sipcc", ] # There's no "native" brotli or woff2 yet, but probably in the future... -external_dirs += ['modules/brotli'] -external_dirs += ['modules/woff2'] +external_dirs += ["modules/brotli"] +external_dirs += ["modules/woff2"] -external_dirs += ['modules/xz-embedded'] +external_dirs += ["modules/xz-embedded"] -if CONFIG['MOZ_VORBIS']: - external_dirs += ['media/libvorbis'] +if CONFIG["MOZ_VORBIS"]: + external_dirs += ["media/libvorbis"] -if CONFIG['MOZ_TREMOR']: - external_dirs += ['media/libtremor'] +if CONFIG["MOZ_TREMOR"]: + external_dirs += ["media/libtremor"] -if CONFIG['MOZ_WEBM_ENCODER']: - external_dirs += ['media/libmkv'] +if CONFIG["MOZ_WEBM_ENCODER"]: + external_dirs += ["media/libmkv"] -if not CONFIG['MOZ_SYSTEM_LIBVPX']: - external_dirs += ['media/libvpx'] +if not CONFIG["MOZ_SYSTEM_LIBVPX"]: + external_dirs += ["media/libvpx"] -if CONFIG['MOZ_AV1']: - external_dirs += ['media/libaom'] - external_dirs += ['media/libdav1d'] +if CONFIG["MOZ_AV1"]: + external_dirs += ["media/libaom"] + external_dirs += ["media/libdav1d"] -if not CONFIG['MOZ_SYSTEM_PNG']: - external_dirs += ['media/libpng'] +if not CONFIG["MOZ_SYSTEM_PNG"]: + external_dirs += ["media/libpng"] -if not CONFIG['MOZ_SYSTEM_WEBP']: - external_dirs += ['media/libwebp'] +if not CONFIG["MOZ_SYSTEM_WEBP"]: + external_dirs += ["media/libwebp"] -if CONFIG['CPU_ARCH'] == 'arm': - external_dirs += ['media/openmax_dl/dl'] +if CONFIG["CPU_ARCH"] == "arm": + external_dirs += ["media/openmax_dl/dl"] -if CONFIG['MOZ_FFVPX']: - external_dirs += ['media/ffvpx'] +if CONFIG["MOZ_FFVPX"]: + external_dirs += ["media/ffvpx"] external_dirs += [ - 'media/kiss_fft', - 'media/libcubeb', - 'media/libnestegg', - 'media/libogg', - 'media/libopus', - 'media/libtheora', - 'media/libspeex_resampler', - 'media/libsoundtouch', - 'media/mp4parse-rust', - 'media/psshparser' + "media/kiss_fft", + "media/libcubeb", + "media/libnestegg", + "media/libogg", + "media/libopus", + "media/libtheora", + "media/libspeex_resampler", + "media/libsoundtouch", + "media/mp4parse-rust", + "media/psshparser", ] -DIRS += ['../../' + i for i in external_dirs] +DIRS += ["../../" + i for i in external_dirs] diff --git a/config/external/nspr/ds/moz.build b/config/external/nspr/ds/moz.build index d0e5c3fa37469e..d2676e20937a7f 100644 --- a/config/external/nspr/ds/moz.build +++ b/config/external/nspr/ds/moz.build @@ -4,31 +4,31 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_FOLD_LIBS']: - Library('plds4') +if CONFIG["MOZ_FOLD_LIBS"]: + Library("plds4") else: - SharedLibrary('plds4') - SOURCES += ['/nsprpub/lib/ds/plvrsion.c'] - USE_LIBS += ['nspr4'] + SharedLibrary("plds4") + SOURCES += ["/nsprpub/lib/ds/plvrsion.c"] + USE_LIBS += ["nspr4"] # We allow warnings for third-party code that can be updated from upstream. # TODO: fix NSPR warnings and remove this AllowCompilerWarnings() -DEFINES['_NSPR_BUILD_'] = True +DEFINES["_NSPR_BUILD_"] = True LOCAL_INCLUDES += [ - '/config/external/nspr', - '/nsprpub/pr/include', + "/config/external/nspr", + "/nsprpub/pr/include", ] EXPORTS.nspr += [ - '/nsprpub/lib/ds/plarena.h', - '/nsprpub/lib/ds/plarenas.h', - '/nsprpub/lib/ds/plhash.h', + "/nsprpub/lib/ds/plarena.h", + "/nsprpub/lib/ds/plarenas.h", + "/nsprpub/lib/ds/plhash.h", ] SOURCES += [ - '/nsprpub/lib/ds/plarena.c', - '/nsprpub/lib/ds/plhash.c', + "/nsprpub/lib/ds/plarena.c", + "/nsprpub/lib/ds/plhash.c", ] diff --git a/config/external/nspr/libc/moz.build b/config/external/nspr/libc/moz.build index c735ba7de2faf0..62bb8814f6579f 100644 --- a/config/external/nspr/libc/moz.build +++ b/config/external/nspr/libc/moz.build @@ -4,42 +4,42 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_FOLD_LIBS']: - Library('plc4') +if CONFIG["MOZ_FOLD_LIBS"]: + Library("plc4") else: - SharedLibrary('plc4') - SOURCES += ['/nsprpub/lib/libc/src/plvrsion.c'] - USE_LIBS += ['nspr4'] + SharedLibrary("plc4") + SOURCES += ["/nsprpub/lib/libc/src/plvrsion.c"] + USE_LIBS += ["nspr4"] # We allow warnings for third-party code that can be updated from upstream. # TODO: fix NSPR warnings and remove this AllowCompilerWarnings() -DEFINES['_NSPR_BUILD_'] = True +DEFINES["_NSPR_BUILD_"] = True LOCAL_INCLUDES += [ - '/config/external/nspr', + "/config/external/nspr", ] EXPORTS.nspr += [ - '/nsprpub/lib/libc/include/plbase64.h', - '/nsprpub/lib/libc/include/plerror.h', - '/nsprpub/lib/libc/include/plgetopt.h', - '/nsprpub/lib/libc/include/plstr.h', + "/nsprpub/lib/libc/include/plbase64.h", + "/nsprpub/lib/libc/include/plerror.h", + "/nsprpub/lib/libc/include/plgetopt.h", + "/nsprpub/lib/libc/include/plstr.h", ] SOURCES += [ - '/nsprpub/lib/libc/src/base64.c', - '/nsprpub/lib/libc/src/plerror.c', - '/nsprpub/lib/libc/src/plgetopt.c', - '/nsprpub/lib/libc/src/strcase.c', - '/nsprpub/lib/libc/src/strcat.c', - '/nsprpub/lib/libc/src/strchr.c', - '/nsprpub/lib/libc/src/strcmp.c', - '/nsprpub/lib/libc/src/strcpy.c', - '/nsprpub/lib/libc/src/strdup.c', - '/nsprpub/lib/libc/src/strlen.c', - '/nsprpub/lib/libc/src/strpbrk.c', - '/nsprpub/lib/libc/src/strstr.c', - '/nsprpub/lib/libc/src/strtok.c', + "/nsprpub/lib/libc/src/base64.c", + "/nsprpub/lib/libc/src/plerror.c", + "/nsprpub/lib/libc/src/plgetopt.c", + "/nsprpub/lib/libc/src/strcase.c", + "/nsprpub/lib/libc/src/strcat.c", + "/nsprpub/lib/libc/src/strchr.c", + "/nsprpub/lib/libc/src/strcmp.c", + "/nsprpub/lib/libc/src/strcpy.c", + "/nsprpub/lib/libc/src/strdup.c", + "/nsprpub/lib/libc/src/strlen.c", + "/nsprpub/lib/libc/src/strpbrk.c", + "/nsprpub/lib/libc/src/strstr.c", + "/nsprpub/lib/libc/src/strtok.c", ] diff --git a/config/external/nspr/moz.build b/config/external/nspr/moz.build index 0629702034769a..f2b3d4e8c1d5c7 100644 --- a/config/external/nspr/moz.build +++ b/config/external/nspr/moz.build @@ -4,25 +4,25 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('nspr') +Library("nspr") -if CONFIG['MOZ_BUILD_NSPR']: +if CONFIG["MOZ_BUILD_NSPR"]: DIRS += [ - 'pr', - 'ds', - 'libc', + "pr", + "ds", + "libc", ] - if CONFIG['MOZ_FOLD_LIBS']: + if CONFIG["MOZ_FOLD_LIBS"]: # When folding libraries, nspr is actually in the nss library. USE_LIBS += [ - 'nss', + "nss", ] else: USE_LIBS += [ - 'nspr4', - 'plc4', - 'plds4', + "nspr4", + "plc4", + "plds4", ] - EXPORTS.nspr += ['prcpucfg.h'] + EXPORTS.nspr += ["prcpucfg.h"] else: - OS_LIBS += CONFIG['NSPR_LIBS'] + OS_LIBS += CONFIG["NSPR_LIBS"] diff --git a/config/external/nspr/pr/moz.build b/config/external/nspr/pr/moz.build index 9b323af89ad2e0..4e06a37323a41c 100644 --- a/config/external/nspr/pr/moz.build +++ b/config/external/nspr/pr/moz.build @@ -4,250 +4,253 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_FOLD_LIBS']: - Library('nspr4') +if CONFIG["MOZ_FOLD_LIBS"]: + Library("nspr4") else: - SharedLibrary('nspr4') + SharedLibrary("nspr4") # We allow warnings for third-party code that can be updated from upstream. # TODO: fix NSPR warnings and remove this AllowCompilerWarnings() -DEFINES['_NSPR_BUILD_'] = True -if CONFIG['OS_ARCH'] == 'Linux': - OS_LIBS += CONFIG['REALTIME_LIBS'] - DEFINES['LINUX'] = True - DEFINES['HAVE_FCNTL_FILE_LOCKING'] = True - DEFINES['HAVE_POINTER_LOCALTIME_R'] = True - DEFINES['_GNU_SOURCE'] = True - SOURCES += ['/nsprpub/pr/src/md/unix/linux.c'] - if CONFIG['CPU_ARCH'] == 'x86_64': - SOURCES += ['/nsprpub/pr/src/md/unix/os_Linux_x86_64.s'] - elif CONFIG['CPU_ARCH'] == 'x86': - DEFINES['i386'] = True - SOURCES += ['/nsprpub/pr/src/md/unix/os_Linux_x86.s'] - elif CONFIG['CPU_ARCH'] == 'ppc': - SOURCES += ['/nsprpub/pr/src/md/unix/os_Linux_ppc.s'] -elif CONFIG['OS_TARGET'] in ('FreeBSD', 'OpenBSD', 'NetBSD'): - DEFINES['HAVE_BSD_FLOCK'] = True - DEFINES['HAVE_SOCKLEN_T'] = True - DEFINES['HAVE_POINTER_LOCALTIME_R'] = True - DEFINES[CONFIG['OS_TARGET'].upper()] = True - SOURCES += ['/nsprpub/pr/src/md/unix/%s.c' % CONFIG['OS_TARGET'].lower()] -elif CONFIG['OS_TARGET'] == 'Darwin': - OS_LIBS += ['-framework CoreServices'] +DEFINES["_NSPR_BUILD_"] = True +if CONFIG["OS_ARCH"] == "Linux": + OS_LIBS += CONFIG["REALTIME_LIBS"] + DEFINES["LINUX"] = True + DEFINES["HAVE_FCNTL_FILE_LOCKING"] = True + DEFINES["HAVE_POINTER_LOCALTIME_R"] = True + DEFINES["_GNU_SOURCE"] = True + SOURCES += ["/nsprpub/pr/src/md/unix/linux.c"] + if CONFIG["CPU_ARCH"] == "x86_64": + SOURCES += ["/nsprpub/pr/src/md/unix/os_Linux_x86_64.s"] + elif CONFIG["CPU_ARCH"] == "x86": + DEFINES["i386"] = True + SOURCES += ["/nsprpub/pr/src/md/unix/os_Linux_x86.s"] + elif CONFIG["CPU_ARCH"] == "ppc": + SOURCES += ["/nsprpub/pr/src/md/unix/os_Linux_ppc.s"] +elif CONFIG["OS_TARGET"] in ("FreeBSD", "OpenBSD", "NetBSD"): + DEFINES["HAVE_BSD_FLOCK"] = True + DEFINES["HAVE_SOCKLEN_T"] = True + DEFINES["HAVE_POINTER_LOCALTIME_R"] = True + DEFINES[CONFIG["OS_TARGET"].upper()] = True + SOURCES += ["/nsprpub/pr/src/md/unix/%s.c" % CONFIG["OS_TARGET"].lower()] +elif CONFIG["OS_TARGET"] == "Darwin": + OS_LIBS += ["-framework CoreServices"] # See also IncreaseDescriptorLimits in toolkit/xre/nsAppRunner.cpp - DEFINES['FD_SETSIZE'] = 4096 - DEFINES['_DARWIN_UNLIMITED_SELECT'] = True - if not CONFIG['HOST_MAJOR_VERSION']: - DEFINES['HAS_CONNECTX'] = True - elif CONFIG['HOST_MAJOR_VERSION'] >= '15': - DEFINES['HAS_CONNECTX'] = True - DEFINES['DARWIN'] = True - DEFINES['HAVE_BSD_FLOCK'] = True - DEFINES['HAVE_SOCKLEN_T'] = True - DEFINES['HAVE_POINTER_LOCALTIME_R'] = True + DEFINES["FD_SETSIZE"] = 4096 + DEFINES["_DARWIN_UNLIMITED_SELECT"] = True + if not CONFIG["HOST_MAJOR_VERSION"]: + DEFINES["HAS_CONNECTX"] = True + elif CONFIG["HOST_MAJOR_VERSION"] >= "15": + DEFINES["HAS_CONNECTX"] = True + DEFINES["DARWIN"] = True + DEFINES["HAVE_BSD_FLOCK"] = True + DEFINES["HAVE_SOCKLEN_T"] = True + DEFINES["HAVE_POINTER_LOCALTIME_R"] = True SOURCES += [ - '/nsprpub/pr/src/md/unix/darwin.c', - '/nsprpub/pr/src/md/unix/os_Darwin.s', + "/nsprpub/pr/src/md/unix/darwin.c", + "/nsprpub/pr/src/md/unix/os_Darwin.s", ] - DEFINES['HAVE_CRT_EXTERNS_H'] = True -elif CONFIG['OS_TARGET'] == 'SunOS': - DEFINES['HAVE_FCNTL_FILE_LOCKING'] = True - DEFINES['HAVE_SOCKLEN_T'] = True - DEFINES['_PR_HAVE_OFF64_T'] = True - DEFINES['_PR_INET6'] = True - DEFINES['SOLARIS'] = True - SOURCES += ['/nsprpub/pr/src/md/unix/solaris.c'] - if CONFIG['CPU_ARCH'] == 'x86_64': - SOURCES += ['/nsprpub/pr/src/md/unix/os_SunOS_x86_64.s'] - elif CONFIG['CPU_ARCH'] == 'x86': - SOURCES += ['/nsprpub/pr/src/md/unix/os_SunOS_x86.s'] - elif CONFIG['CPU_ARCH'] == 'sparc64': - ASFLAGS += ['-x', 'assembler-with-cpp', '-D_ASM'] - SOURCES += ['/nsprpub/pr/src/md/unix/os_SunOS_sparcv9.s'] -elif CONFIG['OS_TARGET'] == 'WINNT': + DEFINES["HAVE_CRT_EXTERNS_H"] = True +elif CONFIG["OS_TARGET"] == "SunOS": + DEFINES["HAVE_FCNTL_FILE_LOCKING"] = True + DEFINES["HAVE_SOCKLEN_T"] = True + DEFINES["_PR_HAVE_OFF64_T"] = True + DEFINES["_PR_INET6"] = True + DEFINES["SOLARIS"] = True + SOURCES += ["/nsprpub/pr/src/md/unix/solaris.c"] + if CONFIG["CPU_ARCH"] == "x86_64": + SOURCES += ["/nsprpub/pr/src/md/unix/os_SunOS_x86_64.s"] + elif CONFIG["CPU_ARCH"] == "x86": + SOURCES += ["/nsprpub/pr/src/md/unix/os_SunOS_x86.s"] + elif CONFIG["CPU_ARCH"] == "sparc64": + ASFLAGS += ["-x", "assembler-with-cpp", "-D_ASM"] + SOURCES += ["/nsprpub/pr/src/md/unix/os_SunOS_sparcv9.s"] +elif CONFIG["OS_TARGET"] == "WINNT": OS_LIBS += [ - 'advapi32', - 'ws2_32', - 'mswsock', - 'winmm', + "advapi32", + "ws2_32", + "mswsock", + "winmm", ] - DEFINES['XP_PC'] = True - DEFINES['WIN32'] = True + DEFINES["XP_PC"] = True + DEFINES["WIN32"] = True # For historical reasons we use the WIN95 NSPR target instead of # WINNT. - DEFINES['WIN95'] = True - DEFINES['WINNT'] = False - DEFINES['_PR_GLOBAL_THREADS_ONLY'] = True - if not CONFIG['DEBUG']: - DEFINES['DO_NOT_WAIT_FOR_CONNECT_OVERLAPPED_OPERATIONS'] = True - if CONFIG['CPU_ARCH'] == 'x86_64': - DEFINES['_AMD64_'] = True - elif CONFIG['CPU_ARCH'] == 'x86': - DEFINES['_X86_'] = True - elif CONFIG['CPU_ARCH'] == 'aarch64': - DEFINES['_ARM64_'] = True + DEFINES["WIN95"] = True + DEFINES["WINNT"] = False + DEFINES["_PR_GLOBAL_THREADS_ONLY"] = True + if not CONFIG["DEBUG"]: + DEFINES["DO_NOT_WAIT_FOR_CONNECT_OVERLAPPED_OPERATIONS"] = True + if CONFIG["CPU_ARCH"] == "x86_64": + DEFINES["_AMD64_"] = True + elif CONFIG["CPU_ARCH"] == "x86": + DEFINES["_X86_"] = True + elif CONFIG["CPU_ARCH"] == "aarch64": + DEFINES["_ARM64_"] = True else: - error('Unsupported CPU_ARCH value: %s' % CONFIG['CPU_ARCH']) + error("Unsupported CPU_ARCH value: %s" % CONFIG["CPU_ARCH"]) else: - error('Not a supported OS_TARGET for NSPR in moz.build: "%s". Use --with-system-nspr' % CONFIG['OS_TARGET']) + error( + 'Not a supported OS_TARGET for NSPR in moz.build: "%s". Use --with-system-nspr' + % CONFIG["OS_TARGET"] + ) LOCAL_INCLUDES += [ - '/config/external/nspr', - '/nsprpub/pr/include', - '/nsprpub/pr/include/private', + "/config/external/nspr", + "/nsprpub/pr/include", + "/nsprpub/pr/include/private", ] SOURCES += [ - '/nsprpub/pr/src/io/prpolevt.c', # conflicting definition of PRFilePrivate - '/nsprpub/pr/src/misc/praton.c', # duplicate definition of index_hex - ] + "/nsprpub/pr/src/io/prpolevt.c", # conflicting definition of PRFilePrivate + "/nsprpub/pr/src/misc/praton.c", # duplicate definition of index_hex +] UNIFIED_SOURCES += [ - '/nsprpub/pr/src/io/prfdcach.c', - '/nsprpub/pr/src/io/priometh.c', - '/nsprpub/pr/src/io/pripv6.c', - '/nsprpub/pr/src/io/prlayer.c', - '/nsprpub/pr/src/io/prlog.c', - '/nsprpub/pr/src/io/prmapopt.c', - '/nsprpub/pr/src/io/prmmap.c', - '/nsprpub/pr/src/io/prmwait.c', - '/nsprpub/pr/src/io/prprf.c', - '/nsprpub/pr/src/io/prscanf.c', - '/nsprpub/pr/src/io/prstdio.c', - '/nsprpub/pr/src/linking/prlink.c', - '/nsprpub/pr/src/malloc/prmalloc.c', - '/nsprpub/pr/src/malloc/prmem.c', - '/nsprpub/pr/src/md/prosdep.c', - '/nsprpub/pr/src/memory/prseg.c', - '/nsprpub/pr/src/memory/prshma.c', - '/nsprpub/pr/src/misc/pratom.c', - '/nsprpub/pr/src/misc/prdtoa.c', - '/nsprpub/pr/src/misc/prenv.c', - '/nsprpub/pr/src/misc/prerr.c', - '/nsprpub/pr/src/misc/prerror.c', - '/nsprpub/pr/src/misc/prerrortable.c', - '/nsprpub/pr/src/misc/prinit.c', - '/nsprpub/pr/src/misc/prinrval.c', - '/nsprpub/pr/src/misc/pripc.c', - '/nsprpub/pr/src/misc/prlog2.c', - '/nsprpub/pr/src/misc/prnetdb.c', - '/nsprpub/pr/src/misc/prrng.c', - '/nsprpub/pr/src/misc/prsystem.c', - '/nsprpub/pr/src/misc/prtime.c', - '/nsprpub/pr/src/misc/prtpool.c', - '/nsprpub/pr/src/threads/prcmon.c', - '/nsprpub/pr/src/threads/prrwlock.c', - '/nsprpub/pr/src/threads/prtpd.c', + "/nsprpub/pr/src/io/prfdcach.c", + "/nsprpub/pr/src/io/priometh.c", + "/nsprpub/pr/src/io/pripv6.c", + "/nsprpub/pr/src/io/prlayer.c", + "/nsprpub/pr/src/io/prlog.c", + "/nsprpub/pr/src/io/prmapopt.c", + "/nsprpub/pr/src/io/prmmap.c", + "/nsprpub/pr/src/io/prmwait.c", + "/nsprpub/pr/src/io/prprf.c", + "/nsprpub/pr/src/io/prscanf.c", + "/nsprpub/pr/src/io/prstdio.c", + "/nsprpub/pr/src/linking/prlink.c", + "/nsprpub/pr/src/malloc/prmalloc.c", + "/nsprpub/pr/src/malloc/prmem.c", + "/nsprpub/pr/src/md/prosdep.c", + "/nsprpub/pr/src/memory/prseg.c", + "/nsprpub/pr/src/memory/prshma.c", + "/nsprpub/pr/src/misc/pratom.c", + "/nsprpub/pr/src/misc/prdtoa.c", + "/nsprpub/pr/src/misc/prenv.c", + "/nsprpub/pr/src/misc/prerr.c", + "/nsprpub/pr/src/misc/prerror.c", + "/nsprpub/pr/src/misc/prerrortable.c", + "/nsprpub/pr/src/misc/prinit.c", + "/nsprpub/pr/src/misc/prinrval.c", + "/nsprpub/pr/src/misc/pripc.c", + "/nsprpub/pr/src/misc/prlog2.c", + "/nsprpub/pr/src/misc/prnetdb.c", + "/nsprpub/pr/src/misc/prrng.c", + "/nsprpub/pr/src/misc/prsystem.c", + "/nsprpub/pr/src/misc/prtime.c", + "/nsprpub/pr/src/misc/prtpool.c", + "/nsprpub/pr/src/threads/prcmon.c", + "/nsprpub/pr/src/threads/prrwlock.c", + "/nsprpub/pr/src/threads/prtpd.c", ] -if CONFIG['OS_TARGET'] != 'WINNT': - DEFINES['_PR_PTHREADS'] = True +if CONFIG["OS_TARGET"] != "WINNT": + DEFINES["_PR_PTHREADS"] = True UNIFIED_SOURCES += [ - '/nsprpub/pr/src/md/unix/unix.c', - '/nsprpub/pr/src/md/unix/unix_errors.c', - '/nsprpub/pr/src/md/unix/uxproces.c', - '/nsprpub/pr/src/md/unix/uxrng.c', - '/nsprpub/pr/src/md/unix/uxshm.c', - '/nsprpub/pr/src/pthreads/ptio.c', - '/nsprpub/pr/src/pthreads/ptmisc.c', - '/nsprpub/pr/src/pthreads/ptsynch.c', - '/nsprpub/pr/src/pthreads/ptthread.c', + "/nsprpub/pr/src/md/unix/unix.c", + "/nsprpub/pr/src/md/unix/unix_errors.c", + "/nsprpub/pr/src/md/unix/uxproces.c", + "/nsprpub/pr/src/md/unix/uxrng.c", + "/nsprpub/pr/src/md/unix/uxshm.c", + "/nsprpub/pr/src/pthreads/ptio.c", + "/nsprpub/pr/src/pthreads/ptmisc.c", + "/nsprpub/pr/src/pthreads/ptsynch.c", + "/nsprpub/pr/src/pthreads/ptthread.c", ] else: SOURCES += [ # ntinrval.c needs windows.h without WIN32_LEAN_AND_MEAN, so it can't be # unified after any file that pulled in windows.h in lean-and-mean mode. - '/nsprpub/pr/src/md/windows/ntinrval.c', + "/nsprpub/pr/src/md/windows/ntinrval.c", # w32poll.c needs a custom value of FD_SETSIZE for winsock.h. - '/nsprpub/pr/src/md/windows/w32poll.c', + "/nsprpub/pr/src/md/windows/w32poll.c", # w95sock.c needs winsock2.h which conflicts with winsock.h. - '/nsprpub/pr/src/md/windows/w95sock.c', + "/nsprpub/pr/src/md/windows/w95sock.c", ] UNIFIED_SOURCES += [ - '/nsprpub/pr/src/io/prdir.c', - '/nsprpub/pr/src/io/prfile.c', - '/nsprpub/pr/src/io/prio.c', - '/nsprpub/pr/src/io/prsocket.c', - '/nsprpub/pr/src/md/windows/ntgc.c', - '/nsprpub/pr/src/md/windows/ntmisc.c', - '/nsprpub/pr/src/md/windows/ntsec.c', - '/nsprpub/pr/src/md/windows/ntsem.c', - '/nsprpub/pr/src/md/windows/w32ipcsem.c', - '/nsprpub/pr/src/md/windows/w32rng.c', - '/nsprpub/pr/src/md/windows/w32shm.c', - '/nsprpub/pr/src/md/windows/w95cv.c', - '/nsprpub/pr/src/md/windows/w95dllmain.c', - '/nsprpub/pr/src/md/windows/w95io.c', - '/nsprpub/pr/src/md/windows/w95thred.c', - '/nsprpub/pr/src/md/windows/win32_errors.c', - '/nsprpub/pr/src/misc/pripcsem.c', - '/nsprpub/pr/src/threads/combined/prucpu.c', - '/nsprpub/pr/src/threads/combined/prucv.c', - '/nsprpub/pr/src/threads/combined/prulock.c', - '/nsprpub/pr/src/threads/combined/prustack.c', - '/nsprpub/pr/src/threads/combined/pruthr.c', - '/nsprpub/pr/src/threads/prcthr.c', - '/nsprpub/pr/src/threads/prdump.c', - '/nsprpub/pr/src/threads/prmon.c', - '/nsprpub/pr/src/threads/prsem.c', + "/nsprpub/pr/src/io/prdir.c", + "/nsprpub/pr/src/io/prfile.c", + "/nsprpub/pr/src/io/prio.c", + "/nsprpub/pr/src/io/prsocket.c", + "/nsprpub/pr/src/md/windows/ntgc.c", + "/nsprpub/pr/src/md/windows/ntmisc.c", + "/nsprpub/pr/src/md/windows/ntsec.c", + "/nsprpub/pr/src/md/windows/ntsem.c", + "/nsprpub/pr/src/md/windows/w32ipcsem.c", + "/nsprpub/pr/src/md/windows/w32rng.c", + "/nsprpub/pr/src/md/windows/w32shm.c", + "/nsprpub/pr/src/md/windows/w95cv.c", + "/nsprpub/pr/src/md/windows/w95dllmain.c", + "/nsprpub/pr/src/md/windows/w95io.c", + "/nsprpub/pr/src/md/windows/w95thred.c", + "/nsprpub/pr/src/md/windows/win32_errors.c", + "/nsprpub/pr/src/misc/pripcsem.c", + "/nsprpub/pr/src/threads/combined/prucpu.c", + "/nsprpub/pr/src/threads/combined/prucv.c", + "/nsprpub/pr/src/threads/combined/prulock.c", + "/nsprpub/pr/src/threads/combined/prustack.c", + "/nsprpub/pr/src/threads/combined/pruthr.c", + "/nsprpub/pr/src/threads/prcthr.c", + "/nsprpub/pr/src/threads/prdump.c", + "/nsprpub/pr/src/threads/prmon.c", + "/nsprpub/pr/src/threads/prsem.c", ] EXPORTS.nspr += [ - '/nsprpub/pr/include/nspr.h', - '/nsprpub/pr/include/pratom.h', - '/nsprpub/pr/include/prbit.h', - '/nsprpub/pr/include/prclist.h', - '/nsprpub/pr/include/prcmon.h', - '/nsprpub/pr/include/prcvar.h', - '/nsprpub/pr/include/prdtoa.h', - '/nsprpub/pr/include/prenv.h', - '/nsprpub/pr/include/prerr.h', - '/nsprpub/pr/include/prerror.h', - '/nsprpub/pr/include/prinet.h', - '/nsprpub/pr/include/prinit.h', - '/nsprpub/pr/include/prinrval.h', - '/nsprpub/pr/include/prio.h', - '/nsprpub/pr/include/pripcsem.h', - '/nsprpub/pr/include/prlink.h', - '/nsprpub/pr/include/prlock.h', - '/nsprpub/pr/include/prlog.h', - '/nsprpub/pr/include/prlong.h', - '/nsprpub/pr/include/prmem.h', - '/nsprpub/pr/include/prmon.h', - '/nsprpub/pr/include/prmwait.h', - '/nsprpub/pr/include/prnetdb.h', - '/nsprpub/pr/include/prpdce.h', - '/nsprpub/pr/include/prprf.h', - '/nsprpub/pr/include/prproces.h', - '/nsprpub/pr/include/prrng.h', - '/nsprpub/pr/include/prrwlock.h', - '/nsprpub/pr/include/prshm.h', - '/nsprpub/pr/include/prshma.h', - '/nsprpub/pr/include/prsystem.h', - '/nsprpub/pr/include/prthread.h', - '/nsprpub/pr/include/prtime.h', - '/nsprpub/pr/include/prtpool.h', - '/nsprpub/pr/include/prtrace.h', - '/nsprpub/pr/include/prtypes.h', - '/nsprpub/pr/include/prvrsion.h', - '/nsprpub/pr/include/prwin16.h', + "/nsprpub/pr/include/nspr.h", + "/nsprpub/pr/include/pratom.h", + "/nsprpub/pr/include/prbit.h", + "/nsprpub/pr/include/prclist.h", + "/nsprpub/pr/include/prcmon.h", + "/nsprpub/pr/include/prcvar.h", + "/nsprpub/pr/include/prdtoa.h", + "/nsprpub/pr/include/prenv.h", + "/nsprpub/pr/include/prerr.h", + "/nsprpub/pr/include/prerror.h", + "/nsprpub/pr/include/prinet.h", + "/nsprpub/pr/include/prinit.h", + "/nsprpub/pr/include/prinrval.h", + "/nsprpub/pr/include/prio.h", + "/nsprpub/pr/include/pripcsem.h", + "/nsprpub/pr/include/prlink.h", + "/nsprpub/pr/include/prlock.h", + "/nsprpub/pr/include/prlog.h", + "/nsprpub/pr/include/prlong.h", + "/nsprpub/pr/include/prmem.h", + "/nsprpub/pr/include/prmon.h", + "/nsprpub/pr/include/prmwait.h", + "/nsprpub/pr/include/prnetdb.h", + "/nsprpub/pr/include/prpdce.h", + "/nsprpub/pr/include/prprf.h", + "/nsprpub/pr/include/prproces.h", + "/nsprpub/pr/include/prrng.h", + "/nsprpub/pr/include/prrwlock.h", + "/nsprpub/pr/include/prshm.h", + "/nsprpub/pr/include/prshma.h", + "/nsprpub/pr/include/prsystem.h", + "/nsprpub/pr/include/prthread.h", + "/nsprpub/pr/include/prtime.h", + "/nsprpub/pr/include/prtpool.h", + "/nsprpub/pr/include/prtrace.h", + "/nsprpub/pr/include/prtypes.h", + "/nsprpub/pr/include/prvrsion.h", + "/nsprpub/pr/include/prwin16.h", ] EXPORTS.nspr.md += [ - '/nsprpub/pr/include/md/_darwin.cfg', - '/nsprpub/pr/include/md/_freebsd.cfg', - '/nsprpub/pr/include/md/_linux.cfg', - '/nsprpub/pr/include/md/_netbsd.cfg', - '/nsprpub/pr/include/md/_openbsd.cfg', - '/nsprpub/pr/include/md/_solaris.cfg', - '/nsprpub/pr/include/md/_win95.cfg', + "/nsprpub/pr/include/md/_darwin.cfg", + "/nsprpub/pr/include/md/_freebsd.cfg", + "/nsprpub/pr/include/md/_linux.cfg", + "/nsprpub/pr/include/md/_netbsd.cfg", + "/nsprpub/pr/include/md/_openbsd.cfg", + "/nsprpub/pr/include/md/_solaris.cfg", + "/nsprpub/pr/include/md/_win95.cfg", ] EXPORTS.nspr.private += [ - '/nsprpub/pr/include/private/pprio.h', - '/nsprpub/pr/include/private/pprthred.h', - '/nsprpub/pr/include/private/prpriv.h', + "/nsprpub/pr/include/private/pprio.h", + "/nsprpub/pr/include/private/pprthred.h", + "/nsprpub/pr/include/private/prpriv.h", ] diff --git a/config/external/rlbox/moz.build b/config/external/rlbox/moz.build index d216e62cad861e..d222f530a90ae0 100644 --- a/config/external/rlbox/moz.build +++ b/config/external/rlbox/moz.build @@ -5,25 +5,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.rlbox += [ - '/third_party/rlbox/include/rlbox.hpp', - '/third_party/rlbox/include/rlbox_conversion.hpp', - '/third_party/rlbox/include/rlbox_helpers.hpp', - '/third_party/rlbox/include/rlbox_noop_sandbox.hpp', - '/third_party/rlbox/include/rlbox_policy_types.hpp', - '/third_party/rlbox/include/rlbox_range.hpp', - '/third_party/rlbox/include/rlbox_sandbox.hpp', - '/third_party/rlbox/include/rlbox_stdlib.hpp', - '/third_party/rlbox/include/rlbox_stdlib_polyfill.hpp', - '/third_party/rlbox/include/rlbox_struct_support.hpp', - '/third_party/rlbox/include/rlbox_type_traits.hpp', - '/third_party/rlbox/include/rlbox_types.hpp', - '/third_party/rlbox/include/rlbox_unwrap.hpp', - '/third_party/rlbox/include/rlbox_wrapper_traits.hpp', - 'rlbox_config.h', + "/third_party/rlbox/include/rlbox.hpp", + "/third_party/rlbox/include/rlbox_conversion.hpp", + "/third_party/rlbox/include/rlbox_helpers.hpp", + "/third_party/rlbox/include/rlbox_noop_sandbox.hpp", + "/third_party/rlbox/include/rlbox_policy_types.hpp", + "/third_party/rlbox/include/rlbox_range.hpp", + "/third_party/rlbox/include/rlbox_sandbox.hpp", + "/third_party/rlbox/include/rlbox_stdlib.hpp", + "/third_party/rlbox/include/rlbox_stdlib_polyfill.hpp", + "/third_party/rlbox/include/rlbox_struct_support.hpp", + "/third_party/rlbox/include/rlbox_type_traits.hpp", + "/third_party/rlbox/include/rlbox_types.hpp", + "/third_party/rlbox/include/rlbox_unwrap.hpp", + "/third_party/rlbox/include/rlbox_wrapper_traits.hpp", + "rlbox_config.h", ] -SOURCES += [ - 'rlbox_thread_locals.cpp' -] +SOURCES += ["rlbox_thread_locals.cpp"] -FINAL_LIBRARY = 'xul' \ No newline at end of file +FINAL_LIBRARY = "xul" diff --git a/config/external/rlbox_lucet_sandbox/moz.build b/config/external/rlbox_lucet_sandbox/moz.build index 0eb93084bb3a68..42825246362c96 100644 --- a/config/external/rlbox_lucet_sandbox/moz.build +++ b/config/external/rlbox_lucet_sandbox/moz.build @@ -5,12 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.rlbox += [ - '/third_party/rust/rlbox_lucet_sandbox/include/lucet_sandbox.h', - '/third_party/rust/rlbox_lucet_sandbox/include/rlbox_lucet_sandbox.hpp', + "/third_party/rust/rlbox_lucet_sandbox/include/lucet_sandbox.h", + "/third_party/rust/rlbox_lucet_sandbox/include/rlbox_lucet_sandbox.hpp", ] -SOURCES += [ - 'rlbox_lucet_thread_locals.cpp' -] +SOURCES += ["rlbox_lucet_thread_locals.cpp"] -FINAL_LIBRARY = 'xul' \ No newline at end of file +FINAL_LIBRARY = "xul" diff --git a/config/external/sqlite/moz.build b/config/external/sqlite/moz.build index dc575ec0426d92..6294924c564ae8 100644 --- a/config/external/sqlite/moz.build +++ b/config/external/sqlite/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['../../../third_party/sqlite3/src'] -if CONFIG['MOZ_FOLD_LIBS']: - Library('sqlite') +DIRS += ["../../../third_party/sqlite3/src"] +if CONFIG["MOZ_FOLD_LIBS"]: + Library("sqlite") # When folding libraries, sqlite is actually in the nss library. USE_LIBS += [ - 'nss', + "nss", ] else: - SharedLibrary('sqlite') - SHARED_LIBRARY_NAME = 'mozsqlite3' + SharedLibrary("sqlite") + SHARED_LIBRARY_NAME = "mozsqlite3" - SYMBOLS_FILE = '/third_party/sqlite3/src/sqlite.symbols' + SYMBOLS_FILE = "/third_party/sqlite3/src/sqlite.symbols" diff --git a/config/external/zlib/moz.build b/config/external/zlib/moz.build index c6b7cfe7d8035d..755400fa7f86cb 100644 --- a/config/external/zlib/moz.build +++ b/config/external/zlib/moz.build @@ -4,12 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('zlib') +Library("zlib") -if CONFIG['MOZ_SYSTEM_ZLIB']: - OS_LIBS += CONFIG['MOZ_ZLIB_LIBS'] +if CONFIG["MOZ_SYSTEM_ZLIB"]: + OS_LIBS += CONFIG["MOZ_ZLIB_LIBS"] else: - if CONFIG['ZLIB_IN_MOZGLUE']: + if CONFIG["ZLIB_IN_MOZGLUE"]: # Can't do this until mozglue is handled by moz.build instead of # config/rules.mk. # USE_LIBS += [ @@ -17,5 +17,5 @@ else: # ] pass DIRS += [ - '../../../modules/zlib', + "../../../modules/zlib", ] diff --git a/config/make-stl-wrappers.py b/config/make-stl-wrappers.py index 9ab022a5b80353..5102e1984ee888 100644 --- a/config/make-stl-wrappers.py +++ b/config/make-stl-wrappers.py @@ -13,7 +13,7 @@ def gen_wrappers(unused, outdir, compiler, template_file, *header_list): - template = open(template_file, 'r').read() + template = open(template_file, "r").read() for header in header_list: with FileAvoidWrite(os.path.join(outdir, header)) as f: diff --git a/config/make-system-wrappers.py b/config/make-system-wrappers.py index 8636e244e8d175..d46ec5cc0f5abd 100644 --- a/config/make-system-wrappers.py +++ b/config/make-system-wrappers.py @@ -6,13 +6,13 @@ import os from mozbuild.util import FileAvoidWrite -header_template = '''#pragma GCC system_header +header_template = """#pragma GCC system_header #pragma GCC visibility push(default) {includes} #pragma GCC visibility pop -''' +""" -include_next_template = '#include_next <{header}>' +include_next_template = "#include_next <{header}>" # The 'unused' arg is the output file from the file_generate action. We actually @@ -21,13 +21,13 @@ def gen_wrappers(unused, outdir, *header_list): for header in header_list: with FileAvoidWrite(os.path.join(outdir, header)) as f: includes = include_next_template.format(header=header) - if header == 'wayland-util.h': + if header == "wayland-util.h": # wayland-util.h in Wayland < 1.12 includes math.h inside an # extern "C" block, which breaks including the header from C++. # This was fixed in Wayland 1.12, but for versions earlier than # that, we work around that by force-including math.h first. - includes = '#include \n' + includes - elif header == 'wayland-client.h': + includes = "#include \n" + includes + elif header == "wayland-client.h": # The system wayland-client.h uses quote includes for # wayland-util.h, which means wayland-util.h is picked from the # directory containing wayland-client.h first, and there's no diff --git a/config/make-windows-h-wrapper.py b/config/make-windows-h-wrapper.py index 6219d84e5b7471..b77c7a879d08d2 100644 --- a/config/make-windows-h-wrapper.py +++ b/config/make-windows-h-wrapper.py @@ -7,21 +7,24 @@ import textwrap import string -comment_re = re.compile(r'//[^\n]*\n|/\*.*\*/', re.S) -decl_re = re.compile(r'''^(.+)\s+ # type +comment_re = re.compile(r"//[^\n]*\n|/\*.*\*/", re.S) +decl_re = re.compile( + r"""^(.+)\s+ # type (\w+)\s* # name (?:\((.*)\))?$ # optional param tys - ''', re.X | re.S) + """, + re.X | re.S, +) def read_decls(filename): """Parse & yield C-style decls from an input file""" - with open(filename, 'r') as fd: + with open(filename, "r") as fd: # Strip comments from the source text. - text = comment_re.sub('', fd.read()) + text = comment_re.sub("", fd.read()) # Parse individual declarations. - raw_decls = [d.strip() for d in text.split(';') if d.strip()] + raw_decls = [d.strip() for d in text.split(";") if d.strip()] for raw in raw_decls: match = decl_re.match(raw) if match is None: @@ -29,16 +32,16 @@ def read_decls(filename): ty, name, params = match.groups() if params is not None: - params = [a.strip() for a in params.split(',') if a.strip()] + params = [a.strip() for a in params.split(",") if a.strip()] yield ty, name, params def generate(fd, consts_path, unicodes_path, template_path, compiler): # Parse the template - with open(template_path, 'r') as template_fd: + with open(template_path, "r") as template_fd: template = string.Template(template_fd.read()) - decls = '' + decls = "" # Each constant should be saved to a temporary, and then re-assigned to a # constant with the correct name, allowing the value to be determined by @@ -46,13 +49,17 @@ def generate(fd, consts_path, unicodes_path, template_path, compiler): for ty, name, args in read_decls(consts_path): assert args is None, "parameters in const decl!" - decls += textwrap.dedent(""" + decls += textwrap.dedent( + """ #ifdef {name} constexpr {ty} _tmp_{name} = {name}; #undef {name} constexpr {ty} {name} = _tmp_{name}; #endif - """.format(ty=ty, name=name)) + """.format( + ty=ty, name=name + ) + ) # Each unicode declaration defines a static inline function with the # correct types which calls the 'A' or 'W'-suffixed versions of the @@ -62,10 +69,11 @@ def generate(fd, consts_path, unicodes_path, template_path, compiler): assert args is not None, "argument list required for unicode decl" # Parameter & argument string list - params = ', '.join('%s a%d' % (ty, i) for i, ty in enumerate(args)) - args = ', '.join('a%d' % i for i in range(len(args))) + params = ", ".join("%s a%d" % (ty, i) for i, ty in enumerate(args)) + args = ", ".join("a%d" % i for i in range(len(args))) - decls += textwrap.dedent(""" + decls += textwrap.dedent( + """ #ifdef {name} #undef {name} static inline {ty} WINAPI @@ -78,7 +86,10 @@ def generate(fd, consts_path, unicodes_path, template_path, compiler): #endif }} #endif - """.format(ty=ty, name=name, params=params, args=args)) + """.format( + ty=ty, name=name, params=params, args=args + ) + ) # Write out the resulting file fd.write(template.substitute(decls=decls)) diff --git a/config/moz.build b/config/moz.build index 2a14de6418b89f..7651bcd111ea69 100644 --- a/config/moz.build +++ b/config/moz.build @@ -4,11 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Firefox Build System', 'General') +with Files("**"): + BUG_COMPONENT = ("Firefox Build System", "General") -with Files('mozunit/**'): - BUG_COMPONENT = ('Testing', 'Python Test') +with Files("mozunit/**"): + BUG_COMPONENT = ("Testing", "Python Test") DIST_INSTALL = False # For sanity's sake, we compile nsinstall without the wrapped system @@ -16,73 +16,89 @@ DIST_INSTALL = False NoVisibilityFlags() CONFIGURE_SUBST_FILES += [ - 'tests/src-simple/Makefile', + "tests/src-simple/Makefile", ] -if CONFIG['HOST_OS_ARCH'] != 'WINNT': +if CONFIG["HOST_OS_ARCH"] != "WINNT": HOST_SOURCES += [ - 'nsinstall.c', - 'pathsub.c', + "nsinstall.c", + "pathsub.c", ] - HostProgram('nsinstall_real') + HostProgram("nsinstall_real") PYTHON_UNITTEST_MANIFESTS += [ - 'tests/python.ini', + "tests/python.ini", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc') and CONFIG['MOZ_OPTIMIZE']: - CFLAGS += ['-O3'] +if CONFIG["CC_TYPE"] in ("clang", "gcc") and CONFIG["MOZ_OPTIMIZE"]: + CFLAGS += ["-O3"] -HOST_DEFINES['UNICODE'] = True -HOST_DEFINES['_UNICODE'] = True +HOST_DEFINES["UNICODE"] = True +HOST_DEFINES["_UNICODE"] = True -include('stl-headers.mozbuild') -if CONFIG['WRAP_STL_INCLUDES']: +include("stl-headers.mozbuild") +if CONFIG["WRAP_STL_INCLUDES"]: stl_compiler = None - if CONFIG['OS_TARGET'] == 'WINNT': - stl_compiler = 'msvc' + if CONFIG["OS_TARGET"] == "WINNT": + stl_compiler = "msvc" else: - stl_compiler = 'gcc' + stl_compiler = "gcc" if stl_compiler: # Note that the 'stl_wrappers' folder is known to the build system as # containing generated files; if this is changed here then the code in # GeneratedFile.__init__ in python/mozbuild/mozbuild/frontend/data.py # might need to be updated accordingly as well. - template_file = SRCDIR + '/%s-stl-wrapper.template.h' % stl_compiler - output_dir = '/dist/stl_wrappers' + template_file = SRCDIR + "/%s-stl-wrapper.template.h" % stl_compiler + output_dir = "/dist/stl_wrappers" # We have to use a sentinel file as the first file because the # file_generate action will create it for us, but we want to create all # the files in gen_wrappers() - outputs = tuple(['stl.sentinel'] + ['%s/%s' % (output_dir, h) for h in stl_headers]) + outputs = tuple( + ["stl.sentinel"] + ["%s/%s" % (output_dir, h) for h in stl_headers] + ) GeneratedFile( - *outputs, script='make-stl-wrappers.py', entry_point='gen_wrappers', - flags=[TOPOBJDIR + output_dir, stl_compiler, template_file] + stl_headers) + *outputs, + script="make-stl-wrappers.py", + entry_point="gen_wrappers", + flags=[TOPOBJDIR + output_dir, stl_compiler, template_file] + stl_headers + ) # Wrap to make it easier to use correctly # NOTE: If we aren't wrapping STL includes, we're building part of the browser # which won't need this wrapper, such as L10N. Just don't try to generate the # wrapper in that case. - if CONFIG['OS_TARGET'] == 'WINNT': - GeneratedFile('/dist/stl_wrappers/windows.h', - script='make-windows-h-wrapper.py', - entry_point='generate', - inputs = ['windows-h-constant.decls.h', - 'windows-h-unicode.decls.h', - 'windows-h-wrapper.template.h'], - flags=[stl_compiler]) + if CONFIG["OS_TARGET"] == "WINNT": + GeneratedFile( + "/dist/stl_wrappers/windows.h", + script="make-windows-h-wrapper.py", + entry_point="generate", + inputs=[ + "windows-h-constant.decls.h", + "windows-h-unicode.decls.h", + "windows-h-wrapper.template.h", + ], + flags=[stl_compiler], + ) -if CONFIG['WRAP_SYSTEM_INCLUDES']: - include('system-headers.mozbuild') - output_dir = '/dist/system_wrappers' - outputs = tuple(['system-header.sentinel'] + ['%s/%s' % (output_dir, h) for h in stl_headers + system_headers]) - GeneratedFile(*outputs, script='make-system-wrappers.py', - entry_point='gen_wrappers', - flags = [TOPOBJDIR + output_dir] + stl_headers + system_headers) +if CONFIG["WRAP_SYSTEM_INCLUDES"]: + include("system-headers.mozbuild") + output_dir = "/dist/system_wrappers" + outputs = tuple( + ["system-header.sentinel"] + + ["%s/%s" % (output_dir, h) for h in stl_headers + system_headers] + ) + GeneratedFile( + *outputs, + script="make-system-wrappers.py", + entry_point="gen_wrappers", + flags=[TOPOBJDIR + output_dir] + stl_headers + system_headers + ) -if CONFIG['COMPILE_ENVIRONMENT'] and CONFIG['CBINDGEN']: +if CONFIG["COMPILE_ENVIRONMENT"] and CONFIG["CBINDGEN"]: GeneratedFile( - 'cbindgen-metadata.json', - script='/build/RunCbindgen.py', - entry_point='generate_metadata', - inputs=['!/.cargo/config']) + "cbindgen-metadata.json", + script="/build/RunCbindgen.py", + entry_point="generate_metadata", + inputs=["!/.cargo/config"], + ) diff --git a/config/mozunit/mozunit/mozunit.py b/config/mozunit/mozunit/mozunit.py index 260bbe9f3f8836..bc327e47bb489f 100644 --- a/config/mozunit/mozunit/mozunit.py +++ b/config/mozunit/mozunit/mozunit.py @@ -23,13 +23,15 @@ # buildconfig doesn't yet support Python 3, so we can use pathlib to # resolve the topsrcdir relative to our current location. from pathlib import Path + topsrcdir = Path(here).parents[2] except ImportError: from mozbuild.base import MozbuildObject + build = MozbuildObject.from_environment(cwd=here) topsrcdir = build.topsrcdir -'''Helper to make python unit tests report the way that the Mozilla +"""Helper to make python unit tests report the way that the Mozilla unit test infrastructure expects tests to report. Usage: @@ -38,7 +40,7 @@ if __name__ == '__main__': mozunit.main() -''' +""" class _MozTestResult(_TestResult): @@ -53,32 +55,32 @@ def getDescription(self, test): else: return str(test) - def printStatus(self, status, test, message=''): + def printStatus(self, status, test, message=""): line = "{status} | {file} | {klass}.{test}{sep}{message}".format( status=status, file=inspect.getfile(test.__class__), klass=test.__class__.__name__, test=test._testMethodName, - sep=', ' if message else '', + sep=", " if message else "", message=message, ) self.stream.writeln(line) def addSuccess(self, test): _TestResult.addSuccess(self, test) - self.printStatus('TEST-PASS', test) + self.printStatus("TEST-PASS", test) def addSkip(self, test, reason): _TestResult.addSkip(self, test, reason) - self.printStatus('TEST-SKIP', test) + self.printStatus("TEST-SKIP", test) def addExpectedFailure(self, test, err): _TestResult.addExpectedFailure(self, test, err) - self.printStatus('TEST-KNOWN-FAIL', test) + self.printStatus("TEST-KNOWN-FAIL", test) def addUnexpectedSuccess(self, test): _TestResult.addUnexpectedSuccess(self, test) - self.printStatus('TEST-UNEXPECTED-PASS', test) + self.printStatus("TEST-UNEXPECTED-PASS", test) def addError(self, test, err): _TestResult.addError(self, test, err) @@ -94,15 +96,15 @@ def addFailure(self, test, err): def printFail(self, test, err): exctype, value, tb = err - message = value or 'NO MESSAGE' - if hasattr(value, 'message'): + message = value or "NO MESSAGE" + if hasattr(value, "message"): message = value.message.splitlines()[0] # Skip test runner traceback levels while tb and self._is_relevant_tb_level(tb): tb = tb.tb_next if tb: _, ln, _ = inspect.getframeinfo(tb)[:3] - message = 'line {0}: {1}'.format(ln, message) + message = "line {0}: {1}".format(ln, message) self.printStatus("TEST-UNEXPECTED-FAIL", test, message) @@ -117,8 +119,8 @@ def run(self, test): def _mocked_file(cls): - '''Create a mocked file class that inherits from the given class. - ''' + """Create a mocked file class that inherits from the given class.""" + class MockedFile(cls): def __init__(self, context, filename, content): self.context = context @@ -143,73 +145,74 @@ def __exit__(self, type, value, traceback): def normcase(path): - ''' + """ Normalize the case of `path`. Don't use `os.path.normcase` because that also normalizes forward slashes to backslashes on Windows. - ''' - if sys.platform.startswith('win'): + """ + if sys.platform.startswith("win"): return path.lower() return path class _MockBaseOpen(object): - '''Callable that acts like the open() function; see MockedOpen for more + """Callable that acts like the open() function; see MockedOpen for more info. - ''' + """ + def __init__(self, open, files): self.open = open self.files = files - def __call__(self, name, mode='r', buffering=None, encoding=None, newline=None): + def __call__(self, name, mode="r", buffering=None, encoding=None, newline=None): # open() can be called with an integer "name" (i.e. a file descriptor). # We don't generally do this in our codebase, but internal Python # libraries sometimes do and we want to handle that cleanly. if isinstance(name, int): - return self.open(name, mode=mode, buffering=buffering, - encoding=encoding, newline=newline) + return self.open( + name, mode=mode, buffering=buffering, encoding=encoding, newline=newline + ) # buffering is ignored. absname = normcase(os.path.abspath(name)) - if 'w' in mode: + if "w" in mode: file = self._mocked_file(absname, mode) elif absname in self.files: content = self.files[absname] if content is None: - raise IOError(2, 'No such file or directory') + raise IOError(2, "No such file or directory") file = self._mocked_file(absname, mode, content) - elif 'a' in mode: - read_mode = 'rb' if 'b' in mode else 'r' - file = self._mocked_file( - absname, mode, self.open(name, read_mode).read()) + elif "a" in mode: + read_mode = "rb" if "b" in mode else "r" + file = self._mocked_file(absname, mode, self.open(name, read_mode).read()) else: file = self.open(name, mode) - if 'a' in mode: + if "a" in mode: file.seek(0, os.SEEK_END) return file def _mocked_file(self, name, mode, content=None): - raise NotImplementedError('subclass must implement') + raise NotImplementedError("subclass must implement") class _MockPy2Open(_MockBaseOpen): def _mocked_file(self, name, mode, content=None): - content = six.ensure_binary(content or b'') + content = six.ensure_binary(content or b"") return MockedBytesFile(self, name, content) class _MockOpen(_MockBaseOpen): def _mocked_file(self, name, mode, content=None): - if 'b' in mode: - content = six.ensure_binary(content or b'') + if "b" in mode: + content = six.ensure_binary(content or b"") return MockedBytesFile(self, name, content) else: - content = six.ensure_text(content or u'') + content = six.ensure_text(content or u"") return MockedStringFile(self, name, content) class MockedOpen(object): - ''' + """ Context manager diverting the open builtin such that opening files can open "virtual" file instances given when creating a MockedOpen. @@ -229,7 +232,7 @@ class MockedOpen(object): f = open('foo', 'w') f.write('foo') self.assertRaises(Exception,f.open('foo', 'r')) - ''' + """ def __init__(self, files={}): self.files = {} @@ -238,6 +241,7 @@ def __init__(self, files={}): def __enter__(self): import six.moves.builtins + self.open = six.moves.builtins.open self.io_open = io.open self._orig_path_exists = os.path.exists @@ -252,6 +256,7 @@ def __enter__(self): def __exit__(self, type, value, traceback): import six.moves.builtins + six.moves.builtins.open = self.open io.open = self.io_open os.path.exists = self._orig_path_exists @@ -259,8 +264,7 @@ def __exit__(self, type, value, traceback): os.path.isfile = self._orig_path_isfile def _wrapped_exists(self, p): - return (self._wrapped_isfile(p) or - self._wrapped_isdir(p)) + return self._wrapped_isfile(p) or self._wrapped_isdir(p) def _wrapped_isfile(self, p): p = normcase(p) @@ -275,7 +279,7 @@ def _wrapped_isfile(self, p): def _wrapped_isdir(self, p): p = normcase(p) - p = p if p.endswith(('/', '\\')) else p + os.sep + p = p if p.endswith(("/", "\\")) else p + os.sep if any(f.startswith(p) for f in self.files): return True @@ -287,24 +291,33 @@ def _wrapped_isdir(self, p): def main(*args, **kwargs): - runwith = kwargs.pop('runwith', 'pytest') + runwith = kwargs.pop("runwith", "pytest") - if runwith == 'unittest': + if runwith == "unittest": unittest.main(testRunner=MozTestRunner(), *args, **kwargs) else: args = list(args) - if os.environ.get('MACH_STDOUT_ISATTY') and not any(a.startswith('--color') for a in args): - args.append('--color=yes') - - module = __import__('__main__') - args.extend([ - '--rootdir', topsrcdir, - '-c', os.path.join(here, 'pytest.ini'), - '-vv', - '-p', 'mozlog.pytest_mozlog.plugin', - '-p', 'mozunit.pytest_plugin', - '-p', 'no:cacheprovider', - '-rsx', # show reasons for skip / xfail - module.__file__, - ]) + if os.environ.get("MACH_STDOUT_ISATTY") and not any( + a.startswith("--color") for a in args + ): + args.append("--color=yes") + + module = __import__("__main__") + args.extend( + [ + "--rootdir", + topsrcdir, + "-c", + os.path.join(here, "pytest.ini"), + "-vv", + "-p", + "mozlog.pytest_mozlog.plugin", + "-p", + "mozunit.pytest_plugin", + "-p", + "no:cacheprovider", + "-rsx", # show reasons for skip / xfail + module.__file__, + ] + ) sys.exit(pytest.main(args)) diff --git a/config/mozunit/setup.py b/config/mozunit/setup.py index d800429d57802d..b223c5116f9ca4 100644 --- a/config/mozunit/setup.py +++ b/config/mozunit/setup.py @@ -6,14 +6,16 @@ from setuptools import setup -setup(name='mozunit', - version='1.0', - description="Make unit tests report the way Mozilla infrastructure expects", - classifiers=['Programming Language :: Python :: 2.7'], - keywords='mozilla', - author='Mozilla Automation and Tools team', - author_email='tools@lists.mozilla.org', - license='MPL', - packages=['mozunit'], - include_package_data=True, - zip_safe=False) +setup( + name="mozunit", + version="1.0", + description="Make unit tests report the way Mozilla infrastructure expects", + classifiers=["Programming Language :: Python :: 2.7"], + keywords="mozilla", + author="Mozilla Automation and Tools team", + author_email="tools@lists.mozilla.org", + license="MPL", + packages=["mozunit"], + include_package_data=True, + zip_safe=False, +) diff --git a/config/nsinstall.py b/config/nsinstall.py index 1d5ead6ff867bd..49d57b795f7f39 100644 --- a/config/nsinstall.py +++ b/config/nsinstall.py @@ -24,33 +24,45 @@ def _nsinstall_internal(argv): usage = "usage: %prog [options] arg1 [arg2 ...] target-directory" p = OptionParser(usage=usage) - p.add_option('-D', action="store_true", - help="Create a single directory only") - p.add_option('-t', action="store_true", - help="Preserve time stamp") - p.add_option('-m', action="store", - help="Set mode", metavar="mode") - p.add_option('-d', action="store_true", - help="Create directories in target") - p.add_option('-R', action="store_true", - help="Use relative symbolic links (ignored)") - p.add_option('-L', action="store", metavar="linkprefix", - help="Link prefix (ignored)") - p.add_option('-X', action="append", metavar="file", - help="Ignore a file when installing a directory recursively.") + p.add_option("-D", action="store_true", help="Create a single directory only") + p.add_option("-t", action="store_true", help="Preserve time stamp") + p.add_option("-m", action="store", help="Set mode", metavar="mode") + p.add_option("-d", action="store_true", help="Create directories in target") + p.add_option( + "-R", action="store_true", help="Use relative symbolic links (ignored)" + ) + p.add_option( + "-L", action="store", metavar="linkprefix", help="Link prefix (ignored)" + ) + p.add_option( + "-X", + action="append", + metavar="file", + help="Ignore a file when installing a directory recursively.", + ) # The remaining arguments are not used in our tree, thus they're not # implented. def BadArg(option, opt, value, parser): - parser.error('option not supported: {0}'.format(opt)) - - p.add_option('-C', action="callback", metavar="CWD", - callback=BadArg, - help="NOT SUPPORTED") - p.add_option('-o', action="callback", callback=BadArg, - help="Set owner (NOT SUPPORTED)", metavar="owner") - p.add_option('-g', action="callback", callback=BadArg, - help="Set group (NOT SUPPORTED)", metavar="group") + parser.error("option not supported: {0}".format(opt)) + + p.add_option( + "-C", action="callback", metavar="CWD", callback=BadArg, help="NOT SUPPORTED" + ) + p.add_option( + "-o", + action="callback", + callback=BadArg, + help="Set owner (NOT SUPPORTED)", + metavar="owner", + ) + p.add_option( + "-g", + action="callback", + callback=BadArg, + help="Set group (NOT SUPPORTED)", + metavar="group", + ) (options, args) = p.parse_args(argv) @@ -59,8 +71,7 @@ def BadArg(option, opt, value, parser): try: options.m = int(options.m, 8) except Exception: - sys.stderr.write('nsinstall: {0} is not a valid mode\n' - .format(options.m)) + sys.stderr.write("nsinstall: {0} is not a valid mode\n".format(options.m)) return 1 # just create one directory? @@ -68,7 +79,7 @@ def maybe_create_dir(dir, mode, try_again): dir = os.path.abspath(dir) if os.path.exists(dir): if not os.path.isdir(dir): - print('nsinstall: {0} is not a directory'.format(dir), file=sys.stderr) + print("nsinstall: {0} is not a directory".format(dir), file=sys.stderr) return 1 if mode: os.chmod(dir, mode) @@ -83,8 +94,7 @@ def maybe_create_dir(dir, mode, try_again): # We might have hit EEXIST due to a race condition (see bug 463411) -- try again once if try_again: return maybe_create_dir(dir, mode, False) - print( - "nsinstall: failed to create directory {0}: {1}".format(dir, e)) + print("nsinstall: failed to create directory {0}: {1}".format(dir, e)) return 1 else: return 0 @@ -97,7 +107,7 @@ def maybe_create_dir(dir, mode, try_again): # nsinstall arg1 [...] directory if len(args) < 2: - p.error('not enough arguments') + p.error("not enough arguments") def copy_all_entries(entries, target): for e in entries: @@ -117,14 +127,14 @@ def copy_all_entries(entries, target): def handleTarget(srcpath, targetpath): # target directory was already created, just use mkdir os.mkdir(targetpath) + else: # we're supposed to copy files def handleTarget(srcpath, targetpath): if os.path.isdir(srcpath): if not os.path.exists(targetpath): os.mkdir(targetpath) - entries = [os.path.join(srcpath, e) - for e in os.listdir(srcpath)] + entries = [os.path.join(srcpath, e) for e in os.listdir(srcpath)] copy_all_entries(entries, targetpath) # options.t is not relevant for directories if options.m: @@ -151,6 +161,7 @@ def handleTarget(srcpath, targetpath): copy_all_entries(args, target) return 0 + # nsinstall as a native command is always UTF-8 @@ -158,5 +169,5 @@ def nsinstall(argv): return _nsinstall_internal([six.ensure_text(arg, "utf-8") for arg in argv]) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(_nsinstall_internal(sys.argv[1:])) diff --git a/config/printprereleasesuffix.py b/config/printprereleasesuffix.py index cf8be72e459e5e..aad6e40634df6e 100644 --- a/config/printprereleasesuffix.py +++ b/config/printprereleasesuffix.py @@ -20,13 +20,17 @@ def get_prerelease_suffix(version): """ Returns the prerelease suffix from the version string argument """ def mfunc(m): - return " {0} {1} {2}".format(m.group('prefix'), - {'a': 'Alpha', 'b': 'Beta'}[m.group('c')], - m.group('suffix')) - result, c = re.subn(r'^(?P(\d+\.)*\d+)(?P[ab])(?P\d+)$', - mfunc, version) + return " {0} {1} {2}".format( + m.group("prefix"), + {"a": "Alpha", "b": "Beta"}[m.group("c")], + m.group("suffix"), + ) + + result, c = re.subn( + r"^(?P(\d+\.)*\d+)(?P[ab])(?P\d+)$", mfunc, version + ) if c != 1: - return '' + return "" return result diff --git a/config/rebuild_check.py b/config/rebuild_check.py index 0bb2a6612d43ef..cea589a6a34ecd 100644 --- a/config/rebuild_check.py +++ b/config/rebuild_check.py @@ -40,28 +40,30 @@ def format_filelist(filelist): limit = 5 length = len(filelist) if length < limit: - return ', '.join(filelist) + return ", ".join(filelist) truncated = filelist[:limit] remaining = length - limit - return '%s (and %d other files)' % (', '.join(truncated), remaining) + return "%s (and %d other files)" % (", ".join(truncated), remaining) newer = format_filelist(newer) removed = format_filelist(removed) if newer and removed: - print('Rebuilding %s because %s changed and %s was removed' % ( - target, newer, removed)) + print( + "Rebuilding %s because %s changed and %s was removed" + % (target, newer, removed) + ) elif newer: - print('Rebuilding %s because %s changed' % (target, newer)) + print("Rebuilding %s because %s changed" % (target, newer)) elif removed: - print('Rebuilding %s because %s was removed' % ( - target, removed)) + print("Rebuilding %s because %s was removed" % (target, removed)) else: - print('Rebuilding %s for an unknown reason' % target) + print("Rebuilding %s for an unknown reason" % target) -if __name__ == '__main__': +if __name__ == "__main__": import sys + rebuild_check(sys.argv[1:]) diff --git a/config/run-and-prefix.py b/config/run-and-prefix.py index bc96eb76f016a4..d78fceb7ff3f94 100644 --- a/config/run-and-prefix.py +++ b/config/run-and-prefix.py @@ -12,24 +12,27 @@ import subprocess import sys -sys.stdout = os.fdopen(sys.stdout.fileno(), 'wb', 0) -sys.stderr = os.fdopen(sys.stderr.fileno(), 'wb', 0) +sys.stdout = os.fdopen(sys.stdout.fileno(), "wb", 0) +sys.stderr = os.fdopen(sys.stderr.fileno(), "wb", 0) -prefix = sys.argv[1].encode('utf-8') +prefix = sys.argv[1].encode("utf-8") args = sys.argv[2:] -p = subprocess.Popen(args, bufsize=0, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT, - stdin=sys.stdin.fileno(), - close_fds=False) +p = subprocess.Popen( + args, + bufsize=0, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT, + stdin=sys.stdin.fileno(), + close_fds=False, +) while True: data = p.stdout.readline() - if data == b'': + if data == b"": break - sys.stdout.write(b'%s> %s' % (prefix, data)) + sys.stdout.write(b"%s> %s" % (prefix, data)) sys.exit(p.wait()) diff --git a/config/run_spidermonkey_checks.py b/config/run_spidermonkey_checks.py index 9323fdf126f0df..0f842d9d7055d1 100644 --- a/config/run_spidermonkey_checks.py +++ b/config/run_spidermonkey_checks.py @@ -10,7 +10,6 @@ def main(output, lib_file, *scripts): for script in scripts: - retcode = subprocess.call( - [sys.executable, script], cwd=buildconfig.topsrcdir) + retcode = subprocess.call([sys.executable, script], cwd=buildconfig.topsrcdir) if retcode != 0: raise Exception(script + " failed") diff --git a/config/tests/src-simple/moz.build b/config/tests/src-simple/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/config/tests/src-simple/moz.build +++ b/config/tests/src-simple/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/config/tests/test_mozbuild_reading.py b/config/tests/test_mozbuild_reading.py index 1f90f4b010a316..89be9618bdd7c2 100644 --- a/config/tests/test_mozbuild_reading.py +++ b/config/tests/test_mozbuild_reading.py @@ -24,20 +24,20 @@ class TestMozbuildReading(unittest.TestCase): # This hack is needed to appease running in automation. def setUp(self): self._old_env = dict(os.environ) - os.environ.pop('MOZCONFIG', None) - os.environ.pop('MOZ_OBJDIR', None) + os.environ.pop("MOZCONFIG", None) + os.environ.pop("MOZ_OBJDIR", None) def tearDown(self): os.environ.clear() os.environ.update(self._old_env) def _mozbuilds(self, reader): - if not hasattr(self, '_mozbuild_paths'): + if not hasattr(self, "_mozbuild_paths"): self._mozbuild_paths = set(reader.all_mozbuild_paths()) return self._mozbuild_paths - @unittest.skip('failing in SpiderMonkey builds') + @unittest.skip("failing in SpiderMonkey builds") def test_filesystem_traversal_reading(self): """Reading moz.build according to filesystem traversal works. @@ -61,7 +61,7 @@ def test_filesystem_traversal_no_config(self): lot of moz.build files assumes certain variables are present. """ here = os.path.abspath(os.path.dirname(__file__)) - root = os.path.normpath(os.path.join(here, '..', '..')) + root = os.path.normpath(os.path.join(here, "..", "..")) config = EmptyConfig(root) reader = BuildReader(config) all_paths = self._mozbuilds(reader) @@ -70,26 +70,26 @@ def test_filesystem_traversal_no_config(self): self.assertGreaterEqual(len(contexts), len(paths)) def test_orphan_file_patterns(self): - if sys.platform == 'win32': - raise unittest.SkipTest('failing on windows builds') + if sys.platform == "win32": + raise unittest.SkipTest("failing on windows builds") mb = MozbuildObject.from_environment(detect_virtualenv_mozinfo=False) try: config = mb.config_environment except Exception as e: - if str(e) == 'config.status not available. Run configure.': - raise unittest.SkipTest('failing without config.status') + if str(e) == "config.status not available. Run configure.": + raise unittest.SkipTest("failing without config.status") raise - if config.substs['MOZ_BUILD_APP'] == 'js': - raise unittest.SkipTest('failing in Spidermonkey builds') + if config.substs["MOZ_BUILD_APP"] == "js": + raise unittest.SkipTest("failing in Spidermonkey builds") reader = BuildReader(config) all_paths = self._mozbuilds(reader) _, contexts = reader.read_relevant_mozbuilds(all_paths) - finder = FileFinder(config.topsrcdir, ignore=['obj*']) + finder = FileFinder(config.topsrcdir, ignore=["obj*"]) def pattern_exists(pat): return [p for p in finder.find(pat)] != [] @@ -100,11 +100,12 @@ def pattern_exists(pat): relsrcdir = ctx.relsrcdir for p in ctx.patterns: if not pattern_exists(os.path.join(relsrcdir, p)): - self.fail("The pattern '%s' in a Files() entry in " - "'%s' corresponds to no files in the tree.\n" - "Please update this entry." % - (p, ctx.main_path)) + self.fail( + "The pattern '%s' in a Files() entry in " + "'%s' corresponds to no files in the tree.\n" + "Please update this entry." % (p, ctx.main_path) + ) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/config/tests/unit-mozunit.py b/config/tests/unit-mozunit.py index 90fd2bc705aef8..0c18ec7227ae01 100644 --- a/config/tests/unit-mozunit.py +++ b/config/tests/unit-mozunit.py @@ -13,75 +13,74 @@ class TestMozUnit(unittest.TestCase): def test_mocked_open(self): # Create a temporary file on the file system. (fd, path) = mkstemp() - with os.fdopen(fd, 'w') as file: - file.write('foobar') + with os.fdopen(fd, "w") as file: + file.write("foobar") - self.assertFalse(os.path.exists('file1')) - self.assertFalse(os.path.exists('file2')) + self.assertFalse(os.path.exists("file1")) + self.assertFalse(os.path.exists("file2")) - with MockedOpen({'file1': 'content1', - 'file2': 'content2'}): - self.assertTrue(os.path.exists('file1')) - self.assertTrue(os.path.exists('file2')) - self.assertFalse(os.path.exists('foo/file1')) + with MockedOpen({"file1": "content1", "file2": "content2"}): + self.assertTrue(os.path.exists("file1")) + self.assertTrue(os.path.exists("file2")) + self.assertFalse(os.path.exists("foo/file1")) # Check the contents of the files given at MockedOpen creation. - self.assertEqual(open('file1', 'r').read(), 'content1') - self.assertEqual(open('file2', 'r').read(), 'content2') + self.assertEqual(open("file1", "r").read(), "content1") + self.assertEqual(open("file2", "r").read(), "content2") # Check that overwriting these files alters their content. - with open('file1', 'w') as file: - file.write('foo') - self.assertTrue(os.path.exists('file1')) - self.assertEqual(open('file1', 'r').read(), 'foo') + with open("file1", "w") as file: + file.write("foo") + self.assertTrue(os.path.exists("file1")) + self.assertEqual(open("file1", "r").read(), "foo") # ... but not until the file is closed. - file = open('file2', 'w') - file.write('bar') - self.assertEqual(open('file2', 'r').read(), 'content2') + file = open("file2", "w") + file.write("bar") + self.assertEqual(open("file2", "r").read(), "content2") file.close() - self.assertEqual(open('file2', 'r').read(), 'bar') + self.assertEqual(open("file2", "r").read(), "bar") # Check that appending to a file does append - with open('file1', 'a') as file: - file.write('bar') - self.assertEqual(open('file1', 'r').read(), 'foobar') + with open("file1", "a") as file: + file.write("bar") + self.assertEqual(open("file1", "r").read(), "foobar") - self.assertFalse(os.path.exists('file3')) + self.assertFalse(os.path.exists("file3")) # Opening a non-existing file ought to fail. - self.assertRaises(IOError, open, 'file3', 'r') - self.assertFalse(os.path.exists('file3')) + self.assertRaises(IOError, open, "file3", "r") + self.assertFalse(os.path.exists("file3")) # Check that writing a new file does create the file. - with open('file3', 'w') as file: - file.write('baz') - self.assertEqual(open('file3', 'r').read(), 'baz') - self.assertTrue(os.path.exists('file3')) + with open("file3", "w") as file: + file.write("baz") + self.assertEqual(open("file3", "r").read(), "baz") + self.assertTrue(os.path.exists("file3")) # Check the content of the file created outside MockedOpen. - self.assertEqual(open(path, 'r').read(), 'foobar') + self.assertEqual(open(path, "r").read(), "foobar") # Check that overwriting a file existing on the file system # does modify its content. - with open(path, 'w') as file: - file.write('bazqux') - self.assertEqual(open(path, 'r').read(), 'bazqux') + with open(path, "w") as file: + file.write("bazqux") + self.assertEqual(open(path, "r").read(), "bazqux") with MockedOpen(): # Check that appending to a file existing on the file system # does modify its content. - with open(path, 'a') as file: - file.write('bazqux') - self.assertEqual(open(path, 'r').read(), 'foobarbazqux') + with open(path, "a") as file: + file.write("bazqux") + self.assertEqual(open(path, "r").read(), "foobarbazqux") # Check that the file was not actually modified on the file system. - self.assertEqual(open(path, 'r').read(), 'foobar') + self.assertEqual(open(path, "r").read(), "foobar") os.remove(path) # Check that the file created inside MockedOpen wasn't actually # created. - self.assertRaises(IOError, open, 'file3', 'r') + self.assertRaises(IOError, open, "file3", "r") if __name__ == "__main__": diff --git a/config/tests/unit-nsinstall.py b/config/tests/unit-nsinstall.py index aff31e3b7688de..7f2b3328427bf8 100644 --- a/config/tests/unit-nsinstall.py +++ b/config/tests/unit-nsinstall.py @@ -13,14 +13,17 @@ from nsinstall import nsinstall import nsinstall as nsinstall_module + NSINSTALL_PATH = nsinstall_module.__file__ # Run the non-ASCII tests on (a) Windows, or (b) any platform with # sys.stdin.encoding set to UTF-8 import codecs -RUN_NON_ASCII_TESTS = (sys.platform == "win32" or - (sys.stdin.encoding is not None and - codecs.lookup(sys.stdin.encoding) == codecs.lookup("utf-8"))) + +RUN_NON_ASCII_TESTS = sys.platform == "win32" or ( + sys.stdin.encoding is not None + and codecs.lookup(sys.stdin.encoding) == codecs.lookup("utf-8") +) class TestNsinstall(unittest.TestCase): @@ -45,7 +48,7 @@ def touch(self, file, dir=None): if dir is None: dir = self.tmpdir f = os.path.join(dir, file) - open(f, 'w').close() + open(f, "w").close() return f def mkdirs(self, dir): @@ -78,29 +81,27 @@ def test_nsinstall_basic_recursive(self): destdir = self.mkdirs("destdir") - self.assertEqual(nsinstall([sourcedir, destdir, - '-X', Xfile, - '-X', Xdir]), 0) + self.assertEqual(nsinstall([sourcedir, destdir, "-X", Xfile, "-X", Xdir]), 0) testdir = os.path.join(destdir, "sourcedir") self.assert_(os.path.isdir(testdir)) self.assert_(os.path.isfile(os.path.join(testdir, "testfile"))) self.assert_(not os.path.exists(os.path.join(testdir, "Xfile"))) self.assert_(os.path.isdir(os.path.join(testdir, "copieddir"))) - self.assert_(os.path.isfile(os.path.join( - testdir, "copieddir", "testfile2"))) + self.assert_(os.path.isfile(os.path.join(testdir, "copieddir", "testfile2"))) self.assert_(not os.path.exists(os.path.join(testdir, "Xdir"))) def test_nsinstall_multiple(self): "Test nsinstall " - testfiles = [self.touch("testfile1"), - self.touch("testfile2"), - self.touch("testfile3")] + testfiles = [ + self.touch("testfile1"), + self.touch("testfile2"), + self.touch("testfile3"), + ] testdir = self.mkdirs("testdir") self.assertEqual(nsinstall(testfiles + [testdir]), 0) for f in testfiles: - self.assert_(os.path.isfile(os.path.join(testdir, - os.path.basename(f)))) + self.assert_(os.path.isfile(os.path.join(testdir, os.path.basename(f)))) def test_nsinstall_dir_exists(self): "Test nsinstall , where / already exists" @@ -119,8 +120,7 @@ def test_nsinstall_t(self): self.assertEqual(nsinstall(["-t", testfile, testdir]), 0) destfile = os.path.join(testdir, "testfile") self.assert_(os.path.isfile(destfile)) - self.assertEqual(os.stat(testfile).st_mtime, - os.stat(destfile).st_mtime) + self.assertEqual(os.stat(testfile).st_mtime, os.stat(destfile).st_mtime) @unittest.skipIf(sys.platform == "win32", "Windows doesn't have real file modes") def test_nsinstall_m(self): @@ -129,12 +129,12 @@ def test_nsinstall_m(self): mode = 0o600 os.chmod(testfile, mode) testdir = self.mkdirs("testdir") - self.assertEqual(nsinstall(["-m", "{0:04o}" - .format(mode), testfile, testdir]), 0) + self.assertEqual( + nsinstall(["-m", "{0:04o}".format(mode), testfile, testdir]), 0 + ) destfile = os.path.join(testdir, "testfile") self.assert_(os.path.isfile(destfile)) - self.assertEqual(os.stat(testfile).st_mode, - os.stat(destfile).st_mode) + self.assertEqual(os.stat(testfile).st_mode, os.stat(destfile).st_mode) def test_nsinstall_d(self): "Test that nsinstall -d works (create directories in target)" @@ -151,15 +151,17 @@ def test_nsinstall_non_ascii(self): filename = u"\u2325\u3452\u2415\u5081" testfile = self.touch(filename) testdir = self.mkdirs(u"\u4241\u1D04\u1414") - self.assertEqual(nsinstall([testfile.encode("utf-8"), - testdir.encode("utf-8")]), 0) + self.assertEqual( + nsinstall([testfile.encode("utf-8"), testdir.encode("utf-8")]), 0 + ) destfile = os.path.join(testdir, filename) self.assert_(os.path.isfile(destfile)) # Executing nsinstall.py with python 2 is not supported. - @unittest.skipIf(not RUN_NON_ASCII_TESTS or sys.version_info[0] == 2, - "Skipping non ascii tests") + @unittest.skipIf( + not RUN_NON_ASCII_TESTS or sys.version_info[0] == 2, "Skipping non ascii tests" + ) def test_nsinstall_non_ascii_subprocess(self): "Test that nsinstall as a subprocess handles non-ASCII files" filename = u"\u2325\u3452\u2415\u5081" @@ -168,9 +170,9 @@ def test_nsinstall_non_ascii_subprocess(self): # We don't use subprocess because it can't handle Unicode on # Windows . mozprocess calls # CreateProcessW directly so it's perfect. - p = processhandler.ProcessHandlerMixin([sys.executable, - NSINSTALL_PATH, - testfile, testdir]) + p = processhandler.ProcessHandlerMixin( + [sys.executable, NSINSTALL_PATH, testfile, testdir] + ) p.run() rv = p.wait() @@ -181,5 +183,5 @@ def test_nsinstall_non_ascii_subprocess(self): # TODO: implement -R, -l, -L and test them! -if __name__ == '__main__': +if __name__ == "__main__": mozunit.main() diff --git a/config/tests/unit-printprereleasesuffix.py b/config/tests/unit-printprereleasesuffix.py index 4a2382ece02973..9b68312170204d 100644 --- a/config/tests/unit-printprereleasesuffix.py +++ b/config/tests/unit-printprereleasesuffix.py @@ -13,69 +13,69 @@ class TestGetPreReleaseSuffix(unittest.TestCase): def test_alpha_1(self): """test 1a1 version string""" - self.c = get_prerelease_suffix('1a1') - self.assertEqual(self.c, ' 1 Alpha 1') + self.c = get_prerelease_suffix("1a1") + self.assertEqual(self.c, " 1 Alpha 1") def test_alpha_10(self): """test 1.2a10 version string""" - self.c = get_prerelease_suffix('1.2a10') - self.assertEqual(self.c, ' 1.2 Alpha 10') + self.c = get_prerelease_suffix("1.2a10") + self.assertEqual(self.c, " 1.2 Alpha 10") def test_beta_3(self): """test 1.2.3b3 version string""" - self.c = get_prerelease_suffix('1.2.3b3') - self.assertEqual(self.c, ' 1.2.3 Beta 3') + self.c = get_prerelease_suffix("1.2.3b3") + self.assertEqual(self.c, " 1.2.3 Beta 3") def test_beta_30(self): """test 1.2.3.4b30 version string""" - self.c = get_prerelease_suffix('1.2.3.4b30') - self.assertEqual(self.c, ' 1.2.3.4 Beta 30') + self.c = get_prerelease_suffix("1.2.3.4b30") + self.assertEqual(self.c, " 1.2.3.4 Beta 30") def test_release_1(self): """test 1.2.3.4 version string""" - self.c = get_prerelease_suffix('1.2.3.4') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2.3.4") + self.assertEqual(self.c, "") def test_alpha_1_pre(self): """test 1.2a1pre version string""" - self.c = get_prerelease_suffix('1.2a1pre') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2a1pre") + self.assertEqual(self.c, "") def test_beta_10_pre(self): """test 3.4b10pre version string""" - self.c = get_prerelease_suffix('3.4b10pre') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("3.4b10pre") + self.assertEqual(self.c, "") def test_pre_0(self): """test 1.2pre0 version string""" - self.c = get_prerelease_suffix('1.2pre0') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2pre0") + self.assertEqual(self.c, "") def test_pre_1_b(self): """test 1.2pre1b version string""" - self.c = get_prerelease_suffix('1.2pre1b') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2pre1b") + self.assertEqual(self.c, "") def test_a_a(self): """test 1.2aa version string""" - self.c = get_prerelease_suffix('1.2aa') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2aa") + self.assertEqual(self.c, "") def test_b_b(self): """test 1.2bb version string""" - self.c = get_prerelease_suffix('1.2bb') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2bb") + self.assertEqual(self.c, "") def test_a_b(self): """test 1.2ab version string""" - self.c = get_prerelease_suffix('1.2ab') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2ab") + self.assertEqual(self.c, "") def test_plus(self): """test 1.2+ version string """ - self.c = get_prerelease_suffix('1.2+') - self.assertEqual(self.c, '') + self.c = get_prerelease_suffix("1.2+") + self.assertEqual(self.c, "") -if __name__ == '__main__': +if __name__ == "__main__": mozunit.main() diff --git a/config/tests/unitMozZipFile.py b/config/tests/unitMozZipFile.py index e1765a2ab37bb4..53cd05dcfe9889 100644 --- a/config/tests/unitMozZipFile.py +++ b/config/tests/unitMozZipFile.py @@ -12,7 +12,7 @@ import copy from string import letters -''' +""" Test case infrastructure for MozZipFile. This isn't really a unit test, but a test case generator and runner. @@ -25,37 +25,34 @@ get tried. The content written to the jars is pseudorandom with a fixed seed. -''' +""" if not __file__: __file__ = sys.argv[0] -sys.path.append(os.path.join(os.path.dirname(__file__), '..')) +sys.path.append(os.path.join(os.path.dirname(__file__), "..")) from MozZipFile import ZipFile import zipfile -leafs = ( - 'firstdir/oneleaf', - 'seconddir/twoleaf', - 'thirddir/with/sub/threeleaf') +leafs = ("firstdir/oneleaf", "seconddir/twoleaf", "thirddir/with/sub/threeleaf") _lengths = map(lambda n: n * 64, [16, 64, 80]) lengths = 3 writes = 5 def givenlength(i): - '''Return a length given in the _lengths array to allow manual + """Return a length given in the _lengths array to allow manual tuning of which lengths of zip entries to use. - ''' + """ return _lengths[i] def prod(*iterables): - ''''Tensor product of a list of iterables. + """'Tensor product of a list of iterables. This generator returns lists of items, one of each given iterable. It iterates over all possible combinations. - ''' + """ for item in iterables[0]: if len(iterables) == 1: yield [item] @@ -65,40 +62,41 @@ def prod(*iterables): def getid(descs): - 'Convert a list of ints to a string.' - return reduce(lambda x, y: x+'{0}{1}'.format(*tuple(y)), descs, '') + "Convert a list of ints to a string." + return reduce(lambda x, y: x + "{0}{1}".format(*tuple(y)), descs, "") def getContent(length): - 'Get pseudo random content of given length.' + "Get pseudo random content of given length." rv = [None] * length for i in xrange(length): rv[i] = random.choice(letters) - return ''.join(rv) + return "".join(rv) def createWriter(sizer, *items): - 'Helper method to fill in tests, one set of writes, one for each item' + "Helper method to fill in tests, one set of writes, one for each item" locitems = copy.deepcopy(items) for item in locitems: - item['length'] = sizer(item.pop('length', 0)) + item["length"] = sizer(item.pop("length", 0)) def helper(self): - mode = 'w' + mode = "w" if os.path.isfile(self.f): - mode = 'a' + mode = "a" zf = ZipFile(self.f, mode, self.compression) for item in locitems: self._write(zf, **item) zf = None pass + return helper def createTester(name, *writes): - '''Helper method to fill in tests, calls into a list of write + """Helper method to fill in tests, calls into a list of write helper methods. - ''' + """ _writes = copy.copy(writes) def tester(self): @@ -106,17 +104,18 @@ def tester(self): getattr(self, w)() self._verifyZip() pass + # unit tests get confused if the method name isn't test... tester.__name__ = name return tester class TestExtensiveStored(unittest.TestCase): - '''Unit tests for MozZipFile + """Unit tests for MozZipFile The testcase are actually populated by code following the class definition. - ''' + """ stage = "mozzipfilestage" compression = zipfile.ZIP_STORED @@ -128,7 +127,7 @@ def setUp(self): if os.path.exists(self.stage): shutil.rmtree(self.stage) os.mkdir(self.stage) - self.f = self.leaf('test.jar') + self.f = self.leaf("test.jar") self.ref = {} self.seed = 0 @@ -158,10 +157,10 @@ def _write(self, zf, seed=None, leaf=0, length=0): content = getContent(length) self.ref[leaf] = content zf.writestr(leaf, content) - dir = os.path.dirname(self.leaf('stage', leaf)) + dir = os.path.dirname(self.leaf("stage", leaf)) if not os.path.isdir(dir): os.makedirs(dir) - open(self.leaf('stage', leaf), 'w').write(content) + open(self.leaf("stage", leaf), "w").write(content) # all leafs in all lengths @@ -173,15 +172,20 @@ def _write(self, zf, seed=None, leaf=0, length=0): # those are redundant as long as w < lengths. # There are symmetries in the trailing end, too, but I don't know # how to reduce those out right now. - nonatomics = [list(prod(range(min(i, len(leafs))), xrange(lengths))) - for i in xrange(1, w+1)] + [atomics] + nonatomics = [ + list(prod(range(min(i, len(leafs))), xrange(lengths))) for i in xrange(1, w + 1) + ] + [atomics] for descs in prod(*nonatomics): suffix = getid(descs) dicts = [dict(leaf=leaf, length=length) for leaf, length in descs] - setattr(TestExtensiveStored, '_write' + suffix, - createWriter(givenlength, *dicts)) - setattr(TestExtensiveStored, 'test' + suffix, - createTester('test' + suffix, '_write' + suffix)) + setattr( + TestExtensiveStored, "_write" + suffix, createWriter(givenlength, *dicts) + ) + setattr( + TestExtensiveStored, + "test" + suffix, + createTester("test" + suffix, "_write" + suffix), + ) # now create another round of tests, with two writing passes # first, write all file combinations into the jar, close it, @@ -189,22 +193,23 @@ def _write(self, zf, seed=None, leaf=0, length=0): # This should catch more or less all artifacts generated # by the final ordering step when closing the jar. files = [list(prod([i], xrange(lengths))) for i in xrange(len(leafs))] -allfiles = reduce(lambda l, r: l+r, - [list(prod(*files[:(i+1)])) for i in xrange(len(leafs))]) +allfiles = reduce( + lambda l, r: l + r, [list(prod(*files[: (i + 1)])) for i in xrange(len(leafs))] +) for first in allfiles: - testbasename = 'test{0}_'.format(getid(first)) - test = [None, '_write' + getid(first), None] + testbasename = "test{0}_".format(getid(first)) + test = [None, "_write" + getid(first), None] for second in atomics: test[0] = testbasename + getid([second]) - test[2] = '_write' + getid([second]) + test[2] = "_write" + getid([second]) setattr(TestExtensiveStored, test[0], createTester(*test)) class TestExtensiveDeflated(TestExtensiveStored): - 'Test all that has been tested with ZIP_STORED with DEFLATED, too.' + "Test all that has been tested with ZIP_STORED with DEFLATED, too." compression = zipfile.ZIP_DEFLATED -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/configure.py b/configure.py index 394eadfc13cfc4..5622500a0f6f05 100644 --- a/configure.py +++ b/configure.py @@ -21,18 +21,16 @@ base_dir = os.path.abspath(os.path.dirname(__file__)) -sys.path.insert(0, os.path.join(base_dir, 'python', 'mozboot')) -sys.path.insert(0, os.path.join(base_dir, 'python', 'mozbuild')) -sys.path.insert(0, os.path.join(base_dir, 'third_party', 'python', 'six')) +sys.path.insert(0, os.path.join(base_dir, "python", "mozboot")) +sys.path.insert(0, os.path.join(base_dir, "python", "mozbuild")) +sys.path.insert(0, os.path.join(base_dir, "third_party", "python", "six")) from mozbuild.configure import ( ConfigureSandbox, TRACE, ) from mozbuild.pythonutil import iter_modules_in_path from mozbuild.backend.configenvironment import PartialConfigEnvironment -from mozbuild.util import ( - write_indented_repr, -) +from mozbuild.util import write_indented_repr import mozpack.path as mozpath import six @@ -40,46 +38,46 @@ def main(argv): config = {} - if 'OLD_CONFIGURE' not in os.environ: - os.environ['OLD_CONFIGURE'] = os.path.join(base_dir, 'old-configure') + if "OLD_CONFIGURE" not in os.environ: + os.environ["OLD_CONFIGURE"] = os.path.join(base_dir, "old-configure") sandbox = ConfigureSandbox(config, os.environ, argv) - clobber_file = 'CLOBBER' + clobber_file = "CLOBBER" if not os.path.exists(clobber_file): # Simply touch the file. - with open(clobber_file, 'a'): + with open(clobber_file, "a"): pass - if os.environ.get('MOZ_CONFIGURE_TRACE'): + if os.environ.get("MOZ_CONFIGURE_TRACE"): sandbox._logger.setLevel(TRACE) - sandbox.run(os.path.join(os.path.dirname(__file__), 'moz.configure')) + sandbox.run(os.path.join(os.path.dirname(__file__), "moz.configure")) if sandbox._help: return 0 - logging.getLogger('moz.configure').info('Creating config.status') + logging.getLogger("moz.configure").info("Creating config.status") - old_js_configure_substs = config.pop('OLD_JS_CONFIGURE_SUBSTS', None) - old_js_configure_defines = config.pop('OLD_JS_CONFIGURE_DEFINES', None) + old_js_configure_substs = config.pop("OLD_JS_CONFIGURE_SUBSTS", None) + old_js_configure_defines = config.pop("OLD_JS_CONFIGURE_DEFINES", None) if old_js_configure_substs or old_js_configure_defines: js_config = config.copy() pwd = os.getcwd() try: try: - os.makedirs('js/src') + os.makedirs("js/src") except OSError as e: if e.errno != errno.EEXIST: raise - os.chdir('js/src') - js_config['OLD_CONFIGURE_SUBSTS'] = old_js_configure_substs - js_config['OLD_CONFIGURE_DEFINES'] = old_js_configure_defines + os.chdir("js/src") + js_config["OLD_CONFIGURE_SUBSTS"] = old_js_configure_substs + js_config["OLD_CONFIGURE_DEFINES"] = old_js_configure_defines # The build system frontend expects $objdir/js/src/config.status # to have $objdir/js/src as topobjdir. # We want forward slashes on all platforms. - js_config['TOPOBJDIR'] += '/js/src' + js_config["TOPOBJDIR"] += "/js/src" config_status(js_config, execute=False) finally: os.chdir(pwd) @@ -88,7 +86,7 @@ def main(argv): def check_unicode(obj): - '''Recursively check that all strings in the object are unicode strings.''' + """Recursively check that all strings in the object are unicode strings.""" if isinstance(obj, dict): result = True for k, v in six.iteritems(obj): @@ -115,30 +113,38 @@ def config_status(config, execute=True): # untouched for now. def sanitize_config(v): if v is True: - return '1' + return "1" if v is False: - return '' + return "" # Serialize types that look like lists and tuples as lists. if not isinstance(v, (bytes, six.text_type, dict)) and isinstance(v, Iterable): return list(v) return v sanitized_config = {} - sanitized_config['substs'] = { - k: sanitize_config(v) for k, v in six.iteritems(config) - if k not in ('DEFINES', 'TOPSRCDIR', 'TOPOBJDIR', 'CONFIG_STATUS_DEPS', - 'OLD_CONFIGURE_SUBSTS', 'OLD_CONFIGURE_DEFINES') + sanitized_config["substs"] = { + k: sanitize_config(v) + for k, v in six.iteritems(config) + if k + not in ( + "DEFINES", + "TOPSRCDIR", + "TOPOBJDIR", + "CONFIG_STATUS_DEPS", + "OLD_CONFIGURE_SUBSTS", + "OLD_CONFIGURE_DEFINES", + ) } - for k, v in config['OLD_CONFIGURE_SUBSTS']: - sanitized_config['substs'][k] = sanitize_config(v) - sanitized_config['defines'] = { - k: sanitize_config(v) for k, v in six.iteritems(config['DEFINES']) + for k, v in config["OLD_CONFIGURE_SUBSTS"]: + sanitized_config["substs"][k] = sanitize_config(v) + sanitized_config["defines"] = { + k: sanitize_config(v) for k, v in six.iteritems(config["DEFINES"]) } - for k, v in config['OLD_CONFIGURE_DEFINES']: - sanitized_config['defines'][k] = sanitize_config(v) - sanitized_config['topsrcdir'] = config['TOPSRCDIR'] - sanitized_config['topobjdir'] = config['TOPOBJDIR'] - sanitized_config['mozconfig'] = config.get('MOZCONFIG') + for k, v in config["OLD_CONFIGURE_DEFINES"]: + sanitized_config["defines"][k] = sanitize_config(v) + sanitized_config["topsrcdir"] = config["TOPSRCDIR"] + sanitized_config["topobjdir"] = config["TOPOBJDIR"] + sanitized_config["mozconfig"] = config.get("MOZCONFIG") if not check_unicode(sanitized_config): print("Configuration should be all unicode.", file=sys.stderr) @@ -153,10 +159,7 @@ def sanitize_config(v): # converted to lists. def normalize(obj): if isinstance(obj, dict): - return { - k: normalize(v) - for k, v in six.iteritems(obj) - } + return {k: normalize(v) for k, v in six.iteritems(obj)} if isinstance(obj, six.text_type): return six.text_type(obj) if isinstance(obj, Iterable): @@ -168,49 +171,61 @@ def normalize(obj): # Create config.status. Eventually, we'll want to just do the work it does # here, when we're able to skip configure tests/use cached results/not rely # on autoconf. - with codecs.open('config.status', 'w', 'utf-8') as fh: - fh.write(textwrap.dedent('''\ + with codecs.open("config.status", "w", "utf-8") as fh: + fh.write( + textwrap.dedent( + """\ #!%(python)s # coding=utf-8 from __future__ import unicode_literals - ''') % {'python': config['PYTHON3']}) + """ + ) + % {"python": config["PYTHON3"]} + ) for k, v in sorted(six.iteritems(sanitized_config)): - fh.write('%s = ' % k) + fh.write("%s = " % k) write_indented_repr(fh, v) - fh.write("__all__ = ['topobjdir', 'topsrcdir', 'defines', " - "'substs', 'mozconfig']") + fh.write( + "__all__ = ['topobjdir', 'topsrcdir', 'defines', " "'substs', 'mozconfig']" + ) if execute: - fh.write(textwrap.dedent(''' + fh.write( + textwrap.dedent( + """ if __name__ == '__main__': from mozbuild.util import patch_main patch_main() from mozbuild.config_status import config_status args = dict([(name, globals()[name]) for name in __all__]) config_status(**args) - ''')) + """ + ) + ) - partial_config = PartialConfigEnvironment(config['TOPOBJDIR']) + partial_config = PartialConfigEnvironment(config["TOPOBJDIR"]) partial_config.write_vars(sanitized_config) # Write out a file so the build backend knows to re-run configure when # relevant Python changes. - with io.open('config_status_deps.in', 'w', encoding='utf-8', - newline='\n') as fh: + with io.open("config_status_deps.in", "w", encoding="utf-8", newline="\n") as fh: for f in sorted( - itertools.chain(config['CONFIG_STATUS_DEPS'], - iter_modules_in_path(config['TOPOBJDIR'], - config['TOPSRCDIR']))): - fh.write('%s\n' % mozpath.normpath(f)) + itertools.chain( + config["CONFIG_STATUS_DEPS"], + iter_modules_in_path(config["TOPOBJDIR"], config["TOPSRCDIR"]), + ) + ): + fh.write("%s\n" % mozpath.normpath(f)) # Other things than us are going to run this file, so we need to give it # executable permissions. - os.chmod('config.status', 0o755) + os.chmod("config.status", 0o755) if execute: from mozbuild.config_status import config_status + return config_status(args=[], **sanitized_config) return 0 -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main(sys.argv)) diff --git a/devtools/client/aboutdebugging/moz.build b/devtools/client/aboutdebugging/moz.build index 72139fa5debe63..0ab3b8061333b7 100644 --- a/devtools/client/aboutdebugging/moz.build +++ b/devtools/client/aboutdebugging/moz.build @@ -3,22 +3,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'aboutdebugging.js', - 'initializer.js', + "aboutdebugging.js", + "initializer.js", ) DIRS += [ - 'src', + "src", ] -XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini' -] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'about:debugging') \ No newline at end of file +with Files("**"): + BUG_COMPONENT = ("DevTools", "about:debugging") diff --git a/devtools/client/aboutdebugging/src/actions/moz.build b/devtools/client/aboutdebugging/src/actions/moz.build index 198797abfeaa9c..a750640d064d04 100644 --- a/devtools/client/aboutdebugging/src/actions/moz.build +++ b/devtools/client/aboutdebugging/src/actions/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'debug-targets.js', - 'index.js', - 'runtimes.js', - 'telemetry.js', - 'ui.js', + "debug-targets.js", + "index.js", + "runtimes.js", + "telemetry.js", + "ui.js", ) diff --git a/devtools/client/aboutdebugging/src/components/connect/moz.build b/devtools/client/aboutdebugging/src/components/connect/moz.build index b33d65ae43413f..9228e80125ce44 100644 --- a/devtools/client/aboutdebugging/src/components/connect/moz.build +++ b/devtools/client/aboutdebugging/src/components/connect/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ConnectPage.js', - 'ConnectSection.js', - 'ConnectSteps.js', - 'NetworkLocationsForm.js', - 'NetworkLocationsList.js', + "ConnectPage.js", + "ConnectSection.js", + "ConnectSteps.js", + "NetworkLocationsForm.js", + "NetworkLocationsList.js", ) diff --git a/devtools/client/aboutdebugging/src/components/debugtarget/moz.build b/devtools/client/aboutdebugging/src/components/debugtarget/moz.build index 3fb31027586818..981e6887a21835 100644 --- a/devtools/client/aboutdebugging/src/components/debugtarget/moz.build +++ b/devtools/client/aboutdebugging/src/components/debugtarget/moz.build @@ -3,20 +3,20 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'DebugTargetItem.js', - 'DebugTargetList.js', - 'DebugTargetPane.js', - 'ExtensionDetail.js', - 'FieldPair.js', - 'InspectAction.js', - 'ProcessDetail.js', - 'ServiceWorkerAction.js', - 'ServiceWorkerAdditionalActions.js', - 'TabAction.js', - 'TabDetail.js', - 'TemporaryExtensionAdditionalActions.js', - 'TemporaryExtensionDetail.js', - 'TemporaryExtensionInstaller.js', - 'TemporaryExtensionInstallSection.js', - 'WorkerDetail.js', + "DebugTargetItem.js", + "DebugTargetList.js", + "DebugTargetPane.js", + "ExtensionDetail.js", + "FieldPair.js", + "InspectAction.js", + "ProcessDetail.js", + "ServiceWorkerAction.js", + "ServiceWorkerAdditionalActions.js", + "TabAction.js", + "TabDetail.js", + "TemporaryExtensionAdditionalActions.js", + "TemporaryExtensionDetail.js", + "TemporaryExtensionInstaller.js", + "TemporaryExtensionInstallSection.js", + "WorkerDetail.js", ) diff --git a/devtools/client/aboutdebugging/src/components/moz.build b/devtools/client/aboutdebugging/src/components/moz.build index c429da00f68b01..c48d384b3dc67a 100644 --- a/devtools/client/aboutdebugging/src/components/moz.build +++ b/devtools/client/aboutdebugging/src/components/moz.build @@ -3,19 +3,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'connect', - 'debugtarget', - 'shared', - 'sidebar', + "connect", + "debugtarget", + "shared", + "sidebar", ] DevToolsModules( - 'App.js', - 'CompatibilityWarning.js', - 'ConnectionPromptSetting.js', - 'ProfilerDialog.js', - 'RuntimeActions.js', - 'RuntimeInfo.js', - 'RuntimePage.js', - 'ServiceWorkersWarning.js', + "App.js", + "CompatibilityWarning.js", + "ConnectionPromptSetting.js", + "ProfilerDialog.js", + "RuntimeActions.js", + "RuntimeInfo.js", + "RuntimePage.js", + "ServiceWorkersWarning.js", ) diff --git a/devtools/client/aboutdebugging/src/components/shared/moz.build b/devtools/client/aboutdebugging/src/components/shared/moz.build index 22caad270f1e5e..7e0e89f2a0073b 100644 --- a/devtools/client/aboutdebugging/src/components/shared/moz.build +++ b/devtools/client/aboutdebugging/src/components/shared/moz.build @@ -3,7 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'DetailsLog.js', - 'IconLabel.js', - 'Message.js', + "DetailsLog.js", + "IconLabel.js", + "Message.js", ) diff --git a/devtools/client/aboutdebugging/src/components/sidebar/moz.build b/devtools/client/aboutdebugging/src/components/sidebar/moz.build index d308dbbb6d457f..081ea2a8482be4 100644 --- a/devtools/client/aboutdebugging/src/components/sidebar/moz.build +++ b/devtools/client/aboutdebugging/src/components/sidebar/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'RefreshDevicesButton.js', - 'Sidebar.js', - 'SidebarFixedItem.js', - 'SidebarItem.js', - 'SidebarRuntimeItem.js', + "RefreshDevicesButton.js", + "Sidebar.js", + "SidebarFixedItem.js", + "SidebarItem.js", + "SidebarRuntimeItem.js", ) diff --git a/devtools/client/aboutdebugging/src/middleware/moz.build b/devtools/client/aboutdebugging/src/middleware/moz.build index a400bd509ded14..f50150f56982ba 100644 --- a/devtools/client/aboutdebugging/src/middleware/moz.build +++ b/devtools/client/aboutdebugging/src/middleware/moz.build @@ -3,11 +3,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'debug-target-listener.js', - 'error-logging.js', - 'event-recording.js', - 'extension-component-data.js', - 'process-component-data.js', - 'tab-component-data.js', - 'worker-component-data.js', + "debug-target-listener.js", + "error-logging.js", + "event-recording.js", + "extension-component-data.js", + "process-component-data.js", + "tab-component-data.js", + "worker-component-data.js", ) diff --git a/devtools/client/aboutdebugging/src/modules/moz.build b/devtools/client/aboutdebugging/src/modules/moz.build index 7b5aeeddb00090..909e1817143c61 100644 --- a/devtools/client/aboutdebugging/src/modules/moz.build +++ b/devtools/client/aboutdebugging/src/modules/moz.build @@ -3,14 +3,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'client-wrapper.js', - 'debug-target-collapsibilities.js', - 'debug-target-support.js', - 'extensions-helper.js', - 'l10n.js', - 'network-locations.js', - 'runtime-client-factory.js', - 'runtime-default-preferences.js', - 'runtimes-state-helper.js', - 'usb-runtimes.js', + "client-wrapper.js", + "debug-target-collapsibilities.js", + "debug-target-support.js", + "extensions-helper.js", + "l10n.js", + "network-locations.js", + "runtime-client-factory.js", + "runtime-default-preferences.js", + "runtimes-state-helper.js", + "usb-runtimes.js", ) diff --git a/devtools/client/aboutdebugging/src/moz.build b/devtools/client/aboutdebugging/src/moz.build index fbb4b77a6315f1..58e6f92857d955 100644 --- a/devtools/client/aboutdebugging/src/moz.build +++ b/devtools/client/aboutdebugging/src/moz.build @@ -3,15 +3,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'middleware', - 'modules', - 'reducers', - 'types', + "actions", + "components", + "middleware", + "modules", + "reducers", + "types", ] DevToolsModules( - 'constants.js', - 'create-store.js', + "constants.js", + "create-store.js", ) diff --git a/devtools/client/aboutdebugging/src/reducers/moz.build b/devtools/client/aboutdebugging/src/reducers/moz.build index a0531dd3e931e7..24d3382f5b036d 100644 --- a/devtools/client/aboutdebugging/src/reducers/moz.build +++ b/devtools/client/aboutdebugging/src/reducers/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'debug-targets-state.js', - 'index.js', - 'runtimes-state.js', - 'ui-state.js', + "debug-targets-state.js", + "index.js", + "runtimes-state.js", + "ui-state.js", ) diff --git a/devtools/client/aboutdebugging/src/types/moz.build b/devtools/client/aboutdebugging/src/types/moz.build index e348104ec2ed18..a58a6e0e28ad81 100644 --- a/devtools/client/aboutdebugging/src/types/moz.build +++ b/devtools/client/aboutdebugging/src/types/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'debug-target.js', - 'index.js', - 'runtime.js', - 'ui.js', -) \ No newline at end of file + "debug-target.js", + "index.js", + "runtime.js", + "ui.js", +) diff --git a/devtools/client/accessibility/actions/moz.build b/devtools/client/accessibility/actions/moz.build index 887fc12ab06ef2..c4571d79985f11 100644 --- a/devtools/client/accessibility/actions/moz.build +++ b/devtools/client/accessibility/actions/moz.build @@ -2,10 +2,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'accessibles.js', - 'audit.js', - 'details.js', - 'simulation.js', - 'ui.js' -) +DevToolsModules("accessibles.js", "audit.js", "details.js", "simulation.js", "ui.js") diff --git a/devtools/client/accessibility/components/moz.build b/devtools/client/accessibility/components/moz.build index bae6cd9bbed190..f43ea64cd95a93 100644 --- a/devtools/client/accessibility/components/moz.build +++ b/devtools/client/accessibility/components/moz.build @@ -3,30 +3,30 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'AccessibilityPrefs.js', - 'AccessibilityRow.js', - 'AccessibilityRowValue.js', - 'AccessibilityTree.js', - 'AccessibilityTreeFilter.js', - 'Accessible.js', - 'AuditController.js', - 'AuditFilter.js', - 'AuditProgressOverlay.js', - 'Badge.js', - 'Badges.js', - 'Button.js', - 'Check.js', - 'Checks.js', - 'ColorContrastAccessibility.js', - 'ContrastBadge.js', - 'Description.js', - 'KeyboardBadge.js', - 'KeyboardCheck.js', - 'LearnMoreLink.js', - 'MainFrame.js', - 'RightSidebar.js', - 'SimulationMenuButton.js', - 'TextLabelBadge.js', - 'TextLabelCheck.js', - 'Toolbar.js' + "AccessibilityPrefs.js", + "AccessibilityRow.js", + "AccessibilityRowValue.js", + "AccessibilityTree.js", + "AccessibilityTreeFilter.js", + "Accessible.js", + "AuditController.js", + "AuditFilter.js", + "AuditProgressOverlay.js", + "Badge.js", + "Badges.js", + "Button.js", + "Check.js", + "Checks.js", + "ColorContrastAccessibility.js", + "ContrastBadge.js", + "Description.js", + "KeyboardBadge.js", + "KeyboardCheck.js", + "LearnMoreLink.js", + "MainFrame.js", + "RightSidebar.js", + "SimulationMenuButton.js", + "TextLabelBadge.js", + "TextLabelCheck.js", + "Toolbar.js", ) diff --git a/devtools/client/accessibility/moz.build b/devtools/client/accessibility/moz.build index 19f7555627fb53..0b6874dfeed7ec 100644 --- a/devtools/client/accessibility/moz.build +++ b/devtools/client/accessibility/moz.build @@ -2,24 +2,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_CHROME_MANIFESTS += ['test/chrome/chrome.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils' -] +DIRS += ["actions", "components", "reducers", "utils"] DevToolsModules( - 'accessibility-proxy.js', - 'accessibility-view.js', - 'constants.js', - 'panel.js', - 'picker.js', - 'provider.js', + "accessibility-proxy.js", + "accessibility-view.js", + "constants.js", + "panel.js", + "picker.js", + "provider.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Accessibility Tools') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Accessibility Tools") diff --git a/devtools/client/accessibility/reducers/moz.build b/devtools/client/accessibility/reducers/moz.build index 3687ab0de0f8ef..0c7398f3976a2f 100644 --- a/devtools/client/accessibility/reducers/moz.build +++ b/devtools/client/accessibility/reducers/moz.build @@ -3,10 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'accessibles.js', - 'audit.js', - 'details.js', - 'index.js', - 'simulation.js', - 'ui.js' + "accessibles.js", "audit.js", "details.js", "index.js", "simulation.js", "ui.js" ) diff --git a/devtools/client/accessibility/utils/moz.build b/devtools/client/accessibility/utils/moz.build index 55fa830301b00f..8129eea8e567bb 100644 --- a/devtools/client/accessibility/utils/moz.build +++ b/devtools/client/accessibility/utils/moz.build @@ -2,7 +2,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'audit.js', - 'l10n.js' -) +DevToolsModules("audit.js", "l10n.js") diff --git a/devtools/client/application/moz.build b/devtools/client/application/moz.build index 1d3565991ec81e..7e1b0e26a3ab68 100644 --- a/devtools/client/application/moz.build +++ b/devtools/client/application/moz.build @@ -3,18 +3,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'src', + "src", ] -DevToolsModules( - 'initializer.js', - 'panel.js' -) +DevToolsModules("initializer.js", "panel.js") -BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini' -] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] diff --git a/devtools/client/application/src/actions/moz.build b/devtools/client/application/src/actions/moz.build index 9eb9a03b7500cf..f2a41f86740a0e 100644 --- a/devtools/client/application/src/actions/moz.build +++ b/devtools/client/application/src/actions/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'manifest.js', - 'page.js', - 'ui.js', - 'workers.js', + "index.js", + "manifest.js", + "page.js", + "ui.js", + "workers.js", ) diff --git a/devtools/client/application/src/components/manifest/moz.build b/devtools/client/application/src/components/manifest/moz.build index 3d0cc72ed778e3..bb799cbfc4de20 100644 --- a/devtools/client/application/src/components/manifest/moz.build +++ b/devtools/client/application/src/components/manifest/moz.build @@ -3,16 +3,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Manifest.js', - 'ManifestColorItem.js', - 'ManifestEmpty.js', - 'ManifestIconItem.js', - 'ManifestIssue.js', - 'ManifestIssueList.js', - 'ManifestItem.js', - 'ManifestJsonLink.js', - 'ManifestLoader.js', - 'ManifestPage.js', - 'ManifestSection.js', - 'ManifestUrlItem.js', + "Manifest.js", + "ManifestColorItem.js", + "ManifestEmpty.js", + "ManifestIconItem.js", + "ManifestIssue.js", + "ManifestIssueList.js", + "ManifestItem.js", + "ManifestJsonLink.js", + "ManifestLoader.js", + "ManifestPage.js", + "ManifestSection.js", + "ManifestUrlItem.js", ) diff --git a/devtools/client/application/src/components/moz.build b/devtools/client/application/src/components/moz.build index eaedf97ac7896d..361ec01204ae17 100644 --- a/devtools/client/application/src/components/moz.build +++ b/devtools/client/application/src/components/moz.build @@ -3,12 +3,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'routing', - 'manifest', - 'service-workers', - 'ui', + "routing", + "manifest", + "service-workers", + "ui", ] DevToolsModules( - 'App.js', + "App.js", ) diff --git a/devtools/client/application/src/components/routing/moz.build b/devtools/client/application/src/components/routing/moz.build index 4fa4f49057c8e1..7e229856144145 100644 --- a/devtools/client/application/src/components/routing/moz.build +++ b/devtools/client/application/src/components/routing/moz.build @@ -2,8 +2,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'PageSwitcher.js', - 'Sidebar.js', - 'SidebarItem.js' -) +DevToolsModules("PageSwitcher.js", "Sidebar.js", "SidebarItem.js") diff --git a/devtools/client/application/src/components/service-workers/moz.build b/devtools/client/application/src/components/service-workers/moz.build index 187758a5920c00..f9704b9df86417 100644 --- a/devtools/client/application/src/components/service-workers/moz.build +++ b/devtools/client/application/src/components/service-workers/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Registration.js', - 'RegistrationList.js', - 'RegistrationListEmpty.js', - 'Worker.js', - 'WorkersPage.js', + "Registration.js", + "RegistrationList.js", + "RegistrationListEmpty.js", + "Worker.js", + "WorkersPage.js", ) diff --git a/devtools/client/application/src/components/ui/moz.build b/devtools/client/application/src/components/ui/moz.build index d1e10ee710a5d3..f62f66d3104286 100644 --- a/devtools/client/application/src/components/ui/moz.build +++ b/devtools/client/application/src/components/ui/moz.build @@ -3,5 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'UIButton.js', + "UIButton.js", ) diff --git a/devtools/client/application/src/middleware/moz.build b/devtools/client/application/src/middleware/moz.build index f9d14627969cbb..5041f3ca13da72 100644 --- a/devtools/client/application/src/middleware/moz.build +++ b/devtools/client/application/src/middleware/moz.build @@ -3,5 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'event-telemetry.js', + "event-telemetry.js", ) diff --git a/devtools/client/application/src/modules/moz.build b/devtools/client/application/src/modules/moz.build index af4edb8009523d..778345fb1f96d9 100644 --- a/devtools/client/application/src/modules/moz.build +++ b/devtools/client/application/src/modules/moz.build @@ -3,6 +3,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'application-services.js', - 'l10n.js', + "application-services.js", + "l10n.js", ) diff --git a/devtools/client/application/src/moz.build b/devtools/client/application/src/moz.build index fbb4b77a6315f1..58e6f92857d955 100644 --- a/devtools/client/application/src/moz.build +++ b/devtools/client/application/src/moz.build @@ -3,15 +3,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'middleware', - 'modules', - 'reducers', - 'types', + "actions", + "components", + "middleware", + "modules", + "reducers", + "types", ] DevToolsModules( - 'constants.js', - 'create-store.js', + "constants.js", + "create-store.js", ) diff --git a/devtools/client/application/src/reducers/moz.build b/devtools/client/application/src/reducers/moz.build index a80fc82885a002..752b27a685e8fc 100644 --- a/devtools/client/application/src/reducers/moz.build +++ b/devtools/client/application/src/reducers/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'manifest-state.js', - 'page-state.js', - 'ui-state.js', - 'workers-state.js', + "index.js", + "manifest-state.js", + "page-state.js", + "ui-state.js", + "workers-state.js", ) diff --git a/devtools/client/application/src/types/moz.build b/devtools/client/application/src/types/moz.build index f3f5f0cac0c204..c8161f448d92df 100644 --- a/devtools/client/application/src/types/moz.build +++ b/devtools/client/application/src/types/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'manifest.js', - 'routing.js', - 'service-workers.js', + "index.js", + "manifest.js", + "routing.js", + "service-workers.js", ) diff --git a/devtools/client/debugger/dist/moz.build b/devtools/client/debugger/dist/moz.build index 471f1445f0077c..64187f8ff68201 100644 --- a/devtools/client/debugger/dist/moz.build +++ b/devtools/client/debugger/dist/moz.build @@ -4,8 +4,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'parser-worker.js', - 'pretty-print-worker.js', - 'search-worker.js', - 'vendors.js', + "parser-worker.js", + "pretty-print-worker.js", + "search-worker.js", + "vendors.js", ) diff --git a/devtools/client/debugger/moz.build b/devtools/client/debugger/moz.build index 7117321680e77f..f4922fa1b3aa59 100644 --- a/devtools/client/debugger/moz.build +++ b/devtools/client/debugger/moz.build @@ -4,20 +4,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'dist', - 'src', + "dist", + "src", ] -include('../shared/build/node-templates.mozbuild') +include("../shared/build/node-templates.mozbuild") BROWSER_CHROME_MANIFESTS += [ - 'test/mochitest/browser.ini', + "test/mochitest/browser.ini", ] DevToolsModules( - 'panel.js', + "panel.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Debugger') - +with Files("**"): + BUG_COMPONENT = ("DevTools", "Debugger") diff --git a/devtools/client/debugger/src/actions/ast/moz.build b/devtools/client/debugger/src/actions/ast/moz.build index bc2fd001d73f1b..5b0152d2add841 100644 --- a/devtools/client/debugger/src/actions/ast/moz.build +++ b/devtools/client/debugger/src/actions/ast/moz.build @@ -3,11 +3,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'index.js', - 'setInScopeLines.js', + "index.js", + "setInScopeLines.js", ) diff --git a/devtools/client/debugger/src/actions/breakpoints/moz.build b/devtools/client/debugger/src/actions/breakpoints/moz.build index b71a40ba286601..b6865291992d6c 100644 --- a/devtools/client/debugger/src/actions/breakpoints/moz.build +++ b/devtools/client/debugger/src/actions/breakpoints/moz.build @@ -3,14 +3,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'breakpointPositions.js', - 'index.js', - 'modify.js', - 'remapLocations.js', - 'syncBreakpoint.js', + "breakpointPositions.js", + "index.js", + "modify.js", + "remapLocations.js", + "syncBreakpoint.js", ) diff --git a/devtools/client/debugger/src/actions/moz.build b/devtools/client/debugger/src/actions/moz.build index d107cf63170e8e..32978f266b12b8 100644 --- a/devtools/client/debugger/src/actions/moz.build +++ b/devtools/client/debugger/src/actions/moz.build @@ -4,27 +4,27 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'ast', - 'breakpoints', - 'pause', - 'sources', - 'utils', + "ast", + "breakpoints", + "pause", + "sources", + "utils", ] CompiledModules( - 'event-listeners.js', - 'exceptions.js', - 'expressions.js', - 'file-search.js', - 'index.js', - 'navigation.js', - 'preview.js', - 'project-text-search.js', - 'quick-open.js', - 'source-actors.js', - 'source-tree.js', - 'tabs.js', - 'toolbox.js', - 'threads.js', - 'ui.js', + "event-listeners.js", + "exceptions.js", + "expressions.js", + "file-search.js", + "index.js", + "navigation.js", + "preview.js", + "project-text-search.js", + "quick-open.js", + "source-actors.js", + "source-tree.js", + "tabs.js", + "toolbox.js", + "threads.js", + "ui.js", ) diff --git a/devtools/client/debugger/src/actions/pause/moz.build b/devtools/client/debugger/src/actions/pause/moz.build index 6a926cb7bf76c3..ab509b9d16780e 100644 --- a/devtools/client/debugger/src/actions/pause/moz.build +++ b/devtools/client/debugger/src/actions/pause/moz.build @@ -3,27 +3,25 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'breakOnNext.js', - 'commands.js', - 'continueToHere.js', - 'expandScopes.js', - 'fetchFrames.js', - 'fetchScopes.js', - 'index.js', - 'inlinePreview.js', - 'mapDisplayNames.js', - 'mapFrames.js', - 'mapScopes.js', - 'paused.js', - 'pauseOnExceptions.js', - 'previewPausedLocation.js', - 'resumed.js', - 'selectFrame.js', - 'highlightCalls.js', - 'skipPausing.js', + "breakOnNext.js", + "commands.js", + "continueToHere.js", + "expandScopes.js", + "fetchFrames.js", + "fetchScopes.js", + "index.js", + "inlinePreview.js", + "mapDisplayNames.js", + "mapFrames.js", + "mapScopes.js", + "paused.js", + "pauseOnExceptions.js", + "previewPausedLocation.js", + "resumed.js", + "selectFrame.js", + "highlightCalls.js", + "skipPausing.js", ) diff --git a/devtools/client/debugger/src/actions/sources/moz.build b/devtools/client/debugger/src/actions/sources/moz.build index 24ce2829f14cf9..9972e9f09b973f 100644 --- a/devtools/client/debugger/src/actions/sources/moz.build +++ b/devtools/client/debugger/src/actions/sources/moz.build @@ -3,17 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'blackbox.js', - 'breakableLines.js', - 'index.js', - 'loadSourceText.js', - 'newSources.js', - 'prettyPrint.js', - 'select.js', - 'symbols.js' + "blackbox.js", + "breakableLines.js", + "index.js", + "loadSourceText.js", + "newSources.js", + "prettyPrint.js", + "select.js", + "symbols.js", ) diff --git a/devtools/client/debugger/src/actions/utils/middleware/moz.build b/devtools/client/debugger/src/actions/utils/middleware/moz.build index 77097a5a385287..0dcf3118d806b7 100644 --- a/devtools/client/debugger/src/actions/utils/middleware/moz.build +++ b/devtools/client/debugger/src/actions/utils/middleware/moz.build @@ -3,16 +3,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'context.js', - 'history.js', - 'log.js', - 'promise.js', - 'thunk.js', - 'timing.js', - 'wait-service.js', + "context.js", + "history.js", + "log.js", + "promise.js", + "thunk.js", + "timing.js", + "wait-service.js", ) diff --git a/devtools/client/debugger/src/actions/utils/moz.build b/devtools/client/debugger/src/actions/utils/moz.build index bd1580d4d5772d..08a43a218ce573 100644 --- a/devtools/client/debugger/src/actions/utils/moz.build +++ b/devtools/client/debugger/src/actions/utils/moz.build @@ -4,9 +4,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'middleware', + "middleware", ] CompiledModules( - 'create-store.js', + "create-store.js", ) diff --git a/devtools/client/debugger/src/client/firefox/moz.build b/devtools/client/debugger/src/client/firefox/moz.build index 2a9cedee927223..dfbb77fc678187 100644 --- a/devtools/client/debugger/src/client/firefox/moz.build +++ b/devtools/client/debugger/src/client/firefox/moz.build @@ -3,12 +3,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'commands.js', - 'create.js', - 'events.js', + "commands.js", + "create.js", + "events.js", ) diff --git a/devtools/client/debugger/src/client/moz.build b/devtools/client/debugger/src/client/moz.build index 196ea10bad3298..818f791e1bde81 100644 --- a/devtools/client/debugger/src/client/moz.build +++ b/devtools/client/debugger/src/client/moz.build @@ -4,10 +4,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'firefox', + "firefox", ] CompiledModules( - 'firefox.js', - 'index.js', + "firefox.js", + "index.js", ) diff --git a/devtools/client/debugger/src/components/Editor/Preview/moz.build b/devtools/client/debugger/src/components/Editor/Preview/moz.build index 3e77d88561bb04..362faadc42ce15 100644 --- a/devtools/client/debugger/src/components/Editor/Preview/moz.build +++ b/devtools/client/debugger/src/components/Editor/Preview/moz.build @@ -3,12 +3,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'ExceptionPopup.js', - 'index.js', - 'Popup.js', + "ExceptionPopup.js", + "index.js", + "Popup.js", ) diff --git a/devtools/client/debugger/src/components/Editor/menus/moz.build b/devtools/client/debugger/src/components/Editor/menus/moz.build index 779cd711b3a0bc..18009aa2db00e4 100644 --- a/devtools/client/debugger/src/components/Editor/menus/moz.build +++ b/devtools/client/debugger/src/components/Editor/menus/moz.build @@ -3,12 +3,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'breakpoints.js', - 'editor.js', - 'source.js', + "breakpoints.js", + "editor.js", + "source.js", ) diff --git a/devtools/client/debugger/src/components/Editor/moz.build b/devtools/client/debugger/src/components/Editor/moz.build index 4797ea870187b7..7b9437386fcf1c 100644 --- a/devtools/client/debugger/src/components/Editor/moz.build +++ b/devtools/client/debugger/src/components/Editor/moz.build @@ -4,30 +4,30 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'menus', - 'Preview', + "menus", + "Preview", ] CompiledModules( - 'Breakpoint.js', - 'Breakpoints.js', - 'ColumnBreakpoint.js', - 'ColumnBreakpoints.js', - 'ConditionalPanel.js', - 'DebugLine.js', - 'EditorMenu.js', - 'EmptyLines.js', - 'Exception.js', - 'Exceptions.js', - 'Footer.js', - 'HighlightCalls.js', - 'HighlightLine.js', - 'HighlightLines.js', - 'index.js', - 'InlinePreview.js', - 'InlinePreviewRow.js', - 'InlinePreviews.js', - 'SearchBar.js', - 'Tab.js', - 'Tabs.js', + "Breakpoint.js", + "Breakpoints.js", + "ColumnBreakpoint.js", + "ColumnBreakpoints.js", + "ConditionalPanel.js", + "DebugLine.js", + "EditorMenu.js", + "EmptyLines.js", + "Exception.js", + "Exceptions.js", + "Footer.js", + "HighlightCalls.js", + "HighlightLine.js", + "HighlightLines.js", + "index.js", + "InlinePreview.js", + "InlinePreviewRow.js", + "InlinePreviews.js", + "SearchBar.js", + "Tab.js", + "Tabs.js", ) diff --git a/devtools/client/debugger/src/components/PrimaryPanes/moz.build b/devtools/client/debugger/src/components/PrimaryPanes/moz.build index 74109f29921cec..e0886041964c82 100644 --- a/devtools/client/debugger/src/components/PrimaryPanes/moz.build +++ b/devtools/client/debugger/src/components/PrimaryPanes/moz.build @@ -3,14 +3,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'index.js', - 'Outline.js', - 'OutlineFilter.js', - 'SourcesTree.js', - 'SourcesTreeItem.js', + "index.js", + "Outline.js", + "OutlineFilter.js", + "SourcesTree.js", + "SourcesTreeItem.js", ) diff --git a/devtools/client/debugger/src/components/SecondaryPanes/Breakpoints/moz.build b/devtools/client/debugger/src/components/SecondaryPanes/Breakpoints/moz.build index 41c50f67965d58..2b075efdd46a62 100644 --- a/devtools/client/debugger/src/components/SecondaryPanes/Breakpoints/moz.build +++ b/devtools/client/debugger/src/components/SecondaryPanes/Breakpoints/moz.build @@ -3,15 +3,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'Breakpoint.js', - 'BreakpointHeading.js', - 'BreakpointHeadingsContextMenu.js', - 'BreakpointsContextMenu.js', - 'ExceptionOption.js', - 'index.js', + "Breakpoint.js", + "BreakpointHeading.js", + "BreakpointHeadingsContextMenu.js", + "BreakpointsContextMenu.js", + "ExceptionOption.js", + "index.js", ) diff --git a/devtools/client/debugger/src/components/SecondaryPanes/Frames/moz.build b/devtools/client/debugger/src/components/SecondaryPanes/Frames/moz.build index 7855fc896ba66a..f775363b14dc02 100644 --- a/devtools/client/debugger/src/components/SecondaryPanes/Frames/moz.build +++ b/devtools/client/debugger/src/components/SecondaryPanes/Frames/moz.build @@ -3,14 +3,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'Frame.js', - 'FrameIndent.js', - 'FrameMenu.js', - 'Group.js', - 'index.js', + "Frame.js", + "FrameIndent.js", + "FrameMenu.js", + "Group.js", + "index.js", ) diff --git a/devtools/client/debugger/src/components/SecondaryPanes/moz.build b/devtools/client/debugger/src/components/SecondaryPanes/moz.build index 664d2b80d84bd9..33cfa2e316b35b 100644 --- a/devtools/client/debugger/src/components/SecondaryPanes/moz.build +++ b/devtools/client/debugger/src/components/SecondaryPanes/moz.build @@ -4,19 +4,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'Breakpoints', - 'Frames', + "Breakpoints", + "Frames", ] CompiledModules( - 'CommandBar.js', - 'DOMMutationBreakpoints.js', - 'EventListeners.js', - 'Expressions.js', - 'index.js', - 'Scopes.js', - 'Thread.js', - 'Threads.js', - 'WhyPaused.js', - 'XHRBreakpoints.js', + "CommandBar.js", + "DOMMutationBreakpoints.js", + "EventListeners.js", + "Expressions.js", + "index.js", + "Scopes.js", + "Thread.js", + "Threads.js", + "WhyPaused.js", + "XHRBreakpoints.js", ) diff --git a/devtools/client/debugger/src/components/moz.build b/devtools/client/debugger/src/components/moz.build index 8630b23e784147..cf7841897315ee 100644 --- a/devtools/client/debugger/src/components/moz.build +++ b/devtools/client/debugger/src/components/moz.build @@ -4,17 +4,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'Editor', - 'PrimaryPanes', - 'SecondaryPanes', - 'shared', + "Editor", + "PrimaryPanes", + "SecondaryPanes", + "shared", ] CompiledModules( - 'A11yIntention.js', - 'App.js', - 'ProjectSearch.js', - 'QuickOpenModal.js', - 'ShortcutsModal.js', - 'WelcomeBox.js', + "A11yIntention.js", + "App.js", + "ProjectSearch.js", + "QuickOpenModal.js", + "ShortcutsModal.js", + "WelcomeBox.js", ) diff --git a/devtools/client/debugger/src/components/shared/Button/moz.build b/devtools/client/debugger/src/components/shared/Button/moz.build index 324c2bf57a0206..c6e652d5dc357e 100644 --- a/devtools/client/debugger/src/components/shared/Button/moz.build +++ b/devtools/client/debugger/src/components/shared/Button/moz.build @@ -4,13 +4,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'styles', + "styles", ] CompiledModules( - 'CloseButton.js', - 'CommandBarButton.js', - 'index.js', - 'PaneToggleButton.js', + "CloseButton.js", + "CommandBarButton.js", + "index.js", + "PaneToggleButton.js", ) - diff --git a/devtools/client/debugger/src/components/shared/Button/styles/moz.build b/devtools/client/debugger/src/components/shared/Button/styles/moz.build index 3ebd4a0c9b4e3d..7d80140dbedf7e 100644 --- a/devtools/client/debugger/src/components/shared/Button/styles/moz.build +++ b/devtools/client/debugger/src/components/shared/Button/styles/moz.build @@ -3,10 +3,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ +DIRS += [] -] - -CompiledModules( - -) +CompiledModules() diff --git a/devtools/client/debugger/src/components/shared/moz.build b/devtools/client/debugger/src/components/shared/moz.build index 104fb3f7c3221f..97fc3e54656c62 100644 --- a/devtools/client/debugger/src/components/shared/moz.build +++ b/devtools/client/debugger/src/components/shared/moz.build @@ -4,21 +4,21 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'Button', + "Button", ] CompiledModules( - 'AccessibleImage.js', - 'Accordion.js', - 'Badge.js', - 'BracketArrow.js', - 'Dropdown.js', - 'ManagedTree.js', - 'Modal.js', - 'Popover.js', - 'PreviewFunction.js', - 'ResultList.js', - 'SearchInput.js', - 'SourceIcon.js', - 'SmartGap.js', + "AccessibleImage.js", + "Accordion.js", + "Badge.js", + "BracketArrow.js", + "Dropdown.js", + "ManagedTree.js", + "Modal.js", + "Popover.js", + "PreviewFunction.js", + "ResultList.js", + "SearchInput.js", + "SourceIcon.js", + "SmartGap.js", ) diff --git a/devtools/client/debugger/src/context-menu/moz.build b/devtools/client/debugger/src/context-menu/moz.build index 285566ddf5636a..48089353f1f916 100644 --- a/devtools/client/debugger/src/context-menu/moz.build +++ b/devtools/client/debugger/src/context-menu/moz.build @@ -4,5 +4,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. CompiledModules( - 'menu.js', + "menu.js", ) diff --git a/devtools/client/debugger/src/moz.build b/devtools/client/debugger/src/moz.build index af54547e379e15..af57683ac64e53 100644 --- a/devtools/client/debugger/src/moz.build +++ b/devtools/client/debugger/src/moz.build @@ -4,17 +4,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'client', - 'components', - 'context-menu', - 'reducers', - 'selectors', - 'utils', - 'workers', + "actions", + "client", + "components", + "context-menu", + "reducers", + "selectors", + "utils", + "workers", ] CompiledModules( - 'main.js', - 'vendors.js', + "main.js", + "vendors.js", ) diff --git a/devtools/client/debugger/src/reducers/moz.build b/devtools/client/debugger/src/reducers/moz.build index 6924de443d620c..bada00cbfcbeb7 100644 --- a/devtools/client/debugger/src/reducers/moz.build +++ b/devtools/client/debugger/src/reducers/moz.build @@ -3,28 +3,26 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'ast.js', - 'async-requests.js', - 'breakpoints.js', - 'event-listeners.js', - 'exceptions.js', - 'expressions.js', - 'file-search.js', - 'index.js', - 'pause.js', - 'pending-breakpoints.js', - 'preview.js', - 'project-text-search.js', - 'quick-open.js', - 'source-actors.js', - 'source-tree.js', - 'sources.js', - 'tabs.js', - 'threads.js', - 'ui.js', + "ast.js", + "async-requests.js", + "breakpoints.js", + "event-listeners.js", + "exceptions.js", + "expressions.js", + "file-search.js", + "index.js", + "pause.js", + "pending-breakpoints.js", + "preview.js", + "project-text-search.js", + "quick-open.js", + "source-actors.js", + "source-tree.js", + "sources.js", + "tabs.js", + "threads.js", + "ui.js", ) diff --git a/devtools/client/debugger/src/selectors/moz.build b/devtools/client/debugger/src/selectors/moz.build index 0033cc75e7c2aa..c3db09bc4b34ef 100644 --- a/devtools/client/debugger/src/selectors/moz.build +++ b/devtools/client/debugger/src/selectors/moz.build @@ -3,20 +3,18 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'breakpointAtLocation.js', - 'breakpoints.js', - 'breakpointSources.js', - 'getCallStackFrames.js', - 'inComponent.js', - 'index.js', - 'isLineInScope.js', - 'isSelectedFrameVisible.js', - 'pause.js', - 'visibleBreakpoints.js', - 'visibleColumnBreakpoints.js', + "breakpointAtLocation.js", + "breakpoints.js", + "breakpointSources.js", + "getCallStackFrames.js", + "inComponent.js", + "index.js", + "isLineInScope.js", + "isSelectedFrameVisible.js", + "pause.js", + "visibleBreakpoints.js", + "visibleColumnBreakpoints.js", ) diff --git a/devtools/client/debugger/src/utils/breakpoint/moz.build b/devtools/client/debugger/src/utils/breakpoint/moz.build index ae2a60f590e3d7..24f31beb711a12 100644 --- a/devtools/client/debugger/src/utils/breakpoint/moz.build +++ b/devtools/client/debugger/src/utils/breakpoint/moz.build @@ -3,12 +3,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'astBreakpointLocation.js', - 'breakpointPositions.js', - 'index.js', + "astBreakpointLocation.js", + "breakpointPositions.js", + "index.js", ) diff --git a/devtools/client/debugger/src/utils/editor/moz.build b/devtools/client/debugger/src/utils/editor/moz.build index 2727b06ffc463f..655c0dae43ec31 100644 --- a/devtools/client/debugger/src/utils/editor/moz.build +++ b/devtools/client/debugger/src/utils/editor/moz.build @@ -3,17 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'create-editor.js', - 'get-expression.js', - 'get-token-location.js', - 'index.js', - 'source-documents.js', - 'source-editor.js', - 'source-search.js', - 'token-events.js', + "create-editor.js", + "get-expression.js", + "get-token-location.js", + "index.js", + "source-documents.js", + "source-editor.js", + "source-search.js", + "token-events.js", ) diff --git a/devtools/client/debugger/src/utils/moz.build b/devtools/client/debugger/src/utils/moz.build index 4cf3d393b2d542..eb3479978f7fe2 100644 --- a/devtools/client/debugger/src/utils/moz.build +++ b/devtools/client/debugger/src/utils/moz.build @@ -4,53 +4,53 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'breakpoint', - 'editor', - 'pause', - 'resource', - 'sources-tree', + "breakpoint", + "editor", + "pause", + "resource", + "sources-tree", ] CompiledModules( - 'assert.js', - 'ast.js', - 'async-value.js', - 'bootstrap.js', - 'build-query.js', - 'clipboard.js', - 'connect.js', - 'context.js', - 'dbg.js', - 'defer.js', - 'DevToolsUtils.js', - 'expressions.js', - 'evaluation-result.js', - 'function.js', - 'indentation.js', - 'isMinified.js', - 'location.js', - 'log.js', - 'memoize.js', - 'memoizeLast.js', - 'memoizableAction.js', - 'path.js', - 'prefs.js', - 'preview.js', - 'project-search.js', - 'quick-open.js', - 'result-list.js', - 'selected-location.js', - 'source-maps.js', - 'source-queue.js', - 'source.js', - 'tabs.js', - 'task.js', - 'telemetry.js', - 'text.js', - 'timings.js', - 'ui.js', - 'url.js', - 'utils.js', - 'wasm.js', - 'worker.js', + "assert.js", + "ast.js", + "async-value.js", + "bootstrap.js", + "build-query.js", + "clipboard.js", + "connect.js", + "context.js", + "dbg.js", + "defer.js", + "DevToolsUtils.js", + "expressions.js", + "evaluation-result.js", + "function.js", + "indentation.js", + "isMinified.js", + "location.js", + "log.js", + "memoize.js", + "memoizeLast.js", + "memoizableAction.js", + "path.js", + "prefs.js", + "preview.js", + "project-search.js", + "quick-open.js", + "result-list.js", + "selected-location.js", + "source-maps.js", + "source-queue.js", + "source.js", + "tabs.js", + "task.js", + "telemetry.js", + "text.js", + "timings.js", + "ui.js", + "url.js", + "utils.js", + "wasm.js", + "worker.js", ) diff --git a/devtools/client/debugger/src/utils/pause/frames/moz.build b/devtools/client/debugger/src/utils/pause/frames/moz.build index 98e5dfdde9177d..5bb330a57f334d 100644 --- a/devtools/client/debugger/src/utils/pause/frames/moz.build +++ b/devtools/client/debugger/src/utils/pause/frames/moz.build @@ -3,15 +3,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'annotateFrames.js', - 'collapseFrames.js', - 'displayName.js', - 'getFrameUrl.js', - 'getLibraryFromUrl.js', - 'index.js', + "annotateFrames.js", + "collapseFrames.js", + "displayName.js", + "getFrameUrl.js", + "getLibraryFromUrl.js", + "index.js", ) diff --git a/devtools/client/debugger/src/utils/pause/mapScopes/moz.build b/devtools/client/debugger/src/utils/pause/mapScopes/moz.build index e701760a42f3ab..05f2b7e3d89634 100644 --- a/devtools/client/debugger/src/utils/pause/mapScopes/moz.build +++ b/devtools/client/debugger/src/utils/pause/mapScopes/moz.build @@ -3,19 +3,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'buildGeneratedBindingList.js', - 'filtering.js', - 'findGeneratedBindingFromPosition.js', - 'getApplicableBindingsForOriginalPosition.js', - 'index.js', - 'locColumn.js', - 'mappingContains.js', - 'optimizedOut.js', - 'positionCmp.js', - 'rangeMetadata.js', + "buildGeneratedBindingList.js", + "filtering.js", + "findGeneratedBindingFromPosition.js", + "getApplicableBindingsForOriginalPosition.js", + "index.js", + "locColumn.js", + "mappingContains.js", + "optimizedOut.js", + "positionCmp.js", + "rangeMetadata.js", ) diff --git a/devtools/client/debugger/src/utils/pause/moz.build b/devtools/client/debugger/src/utils/pause/moz.build index c5c0b22159106f..e0705d3115f461 100644 --- a/devtools/client/debugger/src/utils/pause/moz.build +++ b/devtools/client/debugger/src/utils/pause/moz.build @@ -4,12 +4,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'frames', - 'mapScopes', - 'scopes', + "frames", + "mapScopes", + "scopes", ] CompiledModules( - 'index.js', - 'why.js', + "index.js", + "why.js", ) diff --git a/devtools/client/debugger/src/utils/pause/scopes/moz.build b/devtools/client/debugger/src/utils/pause/scopes/moz.build index 0e44a6feea825c..059d187e3d7d2b 100644 --- a/devtools/client/debugger/src/utils/pause/scopes/moz.build +++ b/devtools/client/debugger/src/utils/pause/scopes/moz.build @@ -3,13 +3,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'getScope.js', - 'getVariables.js', - 'index.js', - 'utils.js', + "getScope.js", + "getVariables.js", + "index.js", + "utils.js", ) diff --git a/devtools/client/debugger/src/utils/resource/moz.build b/devtools/client/debugger/src/utils/resource/moz.build index d201158b108431..7fa8b2a810f60a 100644 --- a/devtools/client/debugger/src/utils/resource/moz.build +++ b/devtools/client/debugger/src/utils/resource/moz.build @@ -3,16 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ -] +DIRS += [] CompiledModules( - 'base-query.js', - 'compare.js', - 'core.js', - 'index.js', - 'memoize.js', - 'query-cache.js', - 'query.js', - 'selector.js', + "base-query.js", + "compare.js", + "core.js", + "index.js", + "memoize.js", + "query-cache.js", + "query.js", + "selector.js", ) diff --git a/devtools/client/debugger/src/utils/sources-tree/moz.build b/devtools/client/debugger/src/utils/sources-tree/moz.build index 0288b56d311aef..f1439a46e31ffe 100644 --- a/devtools/client/debugger/src/utils/sources-tree/moz.build +++ b/devtools/client/debugger/src/utils/sources-tree/moz.build @@ -3,19 +3,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'addToTree.js', - 'collapseTree.js', - 'formatTree.js', - 'getDirectories.js', - 'getURL.js', - 'index.js', - 'sortTree.js', - 'treeOrder.js', - 'updateTree.js', - 'utils.js', + "addToTree.js", + "collapseTree.js", + "formatTree.js", + "getDirectories.js", + "getURL.js", + "index.js", + "sortTree.js", + "treeOrder.js", + "updateTree.js", + "utils.js", ) diff --git a/devtools/client/debugger/src/workers/moz.build b/devtools/client/debugger/src/workers/moz.build index 659379c30c46eb..12327bf177204c 100644 --- a/devtools/client/debugger/src/workers/moz.build +++ b/devtools/client/debugger/src/workers/moz.build @@ -4,11 +4,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'parser', - 'pretty-print', - 'search', + "parser", + "pretty-print", + "search", ] -CompiledModules( - -) +CompiledModules() diff --git a/devtools/client/debugger/src/workers/parser/moz.build b/devtools/client/debugger/src/workers/parser/moz.build index 244c4f2b980dc1..b7223ac81ae4f7 100644 --- a/devtools/client/debugger/src/workers/parser/moz.build +++ b/devtools/client/debugger/src/workers/parser/moz.build @@ -3,10 +3,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'index.js', + "index.js", ) diff --git a/devtools/client/debugger/src/workers/pretty-print/moz.build b/devtools/client/debugger/src/workers/pretty-print/moz.build index 244c4f2b980dc1..b7223ac81ae4f7 100644 --- a/devtools/client/debugger/src/workers/pretty-print/moz.build +++ b/devtools/client/debugger/src/workers/pretty-print/moz.build @@ -3,10 +3,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'index.js', + "index.js", ) diff --git a/devtools/client/debugger/src/workers/search/moz.build b/devtools/client/debugger/src/workers/search/moz.build index 244c4f2b980dc1..b7223ac81ae4f7 100644 --- a/devtools/client/debugger/src/workers/search/moz.build +++ b/devtools/client/debugger/src/workers/search/moz.build @@ -3,10 +3,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - -] +DIRS += [] CompiledModules( - 'index.js', + "index.js", ) diff --git a/devtools/client/dom/content/actions/moz.build b/devtools/client/dom/content/actions/moz.build index 6454c00ccb1e1f..8c1c56d39af82e 100644 --- a/devtools/client/dom/content/actions/moz.build +++ b/devtools/client/dom/content/actions/moz.build @@ -4,6 +4,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'filter.js', - 'grips.js', + "filter.js", + "grips.js", ) diff --git a/devtools/client/dom/content/components/moz.build b/devtools/client/dom/content/components/moz.build index 8b3f47cb57e86f..5b669bac249239 100644 --- a/devtools/client/dom/content/components/moz.build +++ b/devtools/client/dom/content/components/moz.build @@ -3,8 +3,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'DomTree.js', - 'MainFrame.js', - 'MainToolbar.js' -) +DevToolsModules("DomTree.js", "MainFrame.js", "MainToolbar.js") diff --git a/devtools/client/dom/content/moz.build b/devtools/client/dom/content/moz.build index ddf6dbca32a74f..04e0ac4efca425 100644 --- a/devtools/client/dom/content/moz.build +++ b/devtools/client/dom/content/moz.build @@ -4,15 +4,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', + "actions", + "components", + "reducers", ] DevToolsModules( - 'constants.js', - 'dom-decorator.js', - 'dom-view.js', - 'grip-provider.js', - 'utils.js', + "constants.js", + "dom-decorator.js", + "dom-view.js", + "grip-provider.js", + "utils.js", ) diff --git a/devtools/client/dom/content/reducers/moz.build b/devtools/client/dom/content/reducers/moz.build index 0a00b3feb91d55..8d98444e345d23 100644 --- a/devtools/client/dom/content/reducers/moz.build +++ b/devtools/client/dom/content/reducers/moz.build @@ -4,7 +4,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'filter.js', - 'grips.js', - 'index.js', + "filter.js", + "grips.js", + "index.js", ) diff --git a/devtools/client/dom/moz.build b/devtools/client/dom/moz.build index 0fb09d9e6b2125..6e4380260a907b 100644 --- a/devtools/client/dom/moz.build +++ b/devtools/client/dom/moz.build @@ -3,15 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] DIRS += [ - 'content', + "content", ] DevToolsModules( - 'panel.js', + "panel.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'DOM') +with Files("**"): + BUG_COMPONENT = ("DevTools", "DOM") diff --git a/devtools/client/framework/actions/moz.build b/devtools/client/framework/actions/moz.build index bacd0b70578f50..53f955faa61809 100644 --- a/devtools/client/framework/actions/moz.build +++ b/devtools/client/framework/actions/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'dom-mutation-breakpoints.js', - 'index.js', - 'targets.js', + "dom-mutation-breakpoints.js", + "index.js", + "targets.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Framework') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Framework") diff --git a/devtools/client/framework/browser-toolbox/moz.build b/devtools/client/framework/browser-toolbox/moz.build index 7af402f4aaade8..35954c1f2b24da 100644 --- a/devtools/client/framework/browser-toolbox/moz.build +++ b/devtools/client/framework/browser-toolbox/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. BROWSER_CHROME_MANIFESTS += [ - 'test/browser.ini', + "test/browser.ini", ] DevToolsModules( - 'Launcher.jsm', + "Launcher.jsm", ) diff --git a/devtools/client/framework/components/moz.build b/devtools/client/framework/components/moz.build index 3cde71701ea88b..9499fa6294d516 100644 --- a/devtools/client/framework/components/moz.build +++ b/devtools/client/framework/components/moz.build @@ -6,11 +6,11 @@ DevToolsModules( - 'DebugTargetErrorPage.js', - 'DebugTargetInfo.js', - 'MeatballMenu.js', - 'ToolboxController.js', - 'ToolboxTab.js', - 'ToolboxTabs.js', - 'ToolboxToolbar.js', + "DebugTargetErrorPage.js", + "DebugTargetInfo.js", + "MeatballMenu.js", + "ToolboxController.js", + "ToolboxTab.js", + "ToolboxTabs.js", + "ToolboxToolbar.js", ) diff --git a/devtools/client/framework/moz.build b/devtools/client/framework/moz.build index f723994bbe3e40..7d17a7386d9f24 100644 --- a/devtools/client/framework/moz.build +++ b/devtools/client/framework/moz.build @@ -5,45 +5,45 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. BROWSER_CHROME_MANIFESTS += [ - 'test/allocations/browser_allocations_target.ini', - 'test/browser-enable-popup-devtools-user.ini', - 'test/browser-enable-popup-new-user.ini', - 'test/browser-telemetry-startup.ini', - 'test/browser.ini', - 'test/metrics/browser_metrics_debugger.ini', - 'test/metrics/browser_metrics_inspector.ini', - 'test/metrics/browser_metrics_netmonitor.ini', - 'test/metrics/browser_metrics_webconsole.ini', + "test/allocations/browser_allocations_target.ini", + "test/browser-enable-popup-devtools-user.ini", + "test/browser-enable-popup-new-user.ini", + "test/browser-telemetry-startup.ini", + "test/browser.ini", + "test/metrics/browser_metrics_debugger.ini", + "test/metrics/browser_metrics_inspector.ini", + "test/metrics/browser_metrics_netmonitor.ini", + "test/metrics/browser_metrics_webconsole.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] DIRS += [ - 'actions', - 'browser-toolbox', - 'components', - 'reducers', + "actions", + "browser-toolbox", + "components", + "reducers", ] DevToolsModules( - 'browser-menus.js', - 'devtools-browser.js', - 'devtools.js', - 'enable-devtools-popup.js', - 'menu-item.js', - 'menu.js', - 'selection.js', - 'source-map-url-service.js', - 'store-provider.js', - 'store.js', - 'target-from-url.js', - 'target.js', - 'toolbox-context-menu.js', - 'toolbox-host-manager.js', - 'toolbox-hosts.js', - 'toolbox-options.js', - 'toolbox-tabs-order-manager.js', - 'toolbox.js', + "browser-menus.js", + "devtools-browser.js", + "devtools.js", + "enable-devtools-popup.js", + "menu-item.js", + "menu.js", + "selection.js", + "source-map-url-service.js", + "store-provider.js", + "store.js", + "target-from-url.js", + "target.js", + "toolbox-context-menu.js", + "toolbox-host-manager.js", + "toolbox-hosts.js", + "toolbox-options.js", + "toolbox-tabs-order-manager.js", + "toolbox.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Framework') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Framework") diff --git a/devtools/client/framework/reducers/moz.build b/devtools/client/framework/reducers/moz.build index bacd0b70578f50..53f955faa61809 100644 --- a/devtools/client/framework/reducers/moz.build +++ b/devtools/client/framework/reducers/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'dom-mutation-breakpoints.js', - 'index.js', - 'targets.js', + "dom-mutation-breakpoints.js", + "index.js", + "targets.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Framework') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Framework") diff --git a/devtools/client/fronts/addon/moz.build b/devtools/client/fronts/addon/moz.build index 210f7b9d3e2336..e3821736415376 100644 --- a/devtools/client/fronts/addon/moz.build +++ b/devtools/client/fronts/addon/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'addons.js', - 'webextension-inspected-window.js', + "addons.js", + "webextension-inspected-window.js", ) diff --git a/devtools/client/fronts/descriptors/moz.build b/devtools/client/fronts/descriptors/moz.build index 913990bc406bf3..bf297b3dcb1e4b 100644 --- a/devtools/client/fronts/descriptors/moz.build +++ b/devtools/client/fronts/descriptors/moz.build @@ -5,9 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'process.js', - 'tab.js', - 'webextension.js', - 'worker.js', + "process.js", + "tab.js", + "webextension.js", + "worker.js", ) - diff --git a/devtools/client/fronts/inspector/moz.build b/devtools/client/fronts/inspector/moz.build index 4be2d9ceb8fcde..de635f5947f2d8 100644 --- a/devtools/client/fronts/inspector/moz.build +++ b/devtools/client/fronts/inspector/moz.build @@ -5,6 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'rule-rewriter.js', + "rule-rewriter.js", ) - diff --git a/devtools/client/fronts/moz.build b/devtools/client/fronts/moz.build index 30c36b262718df..80ec9fba09e088 100644 --- a/devtools/client/fronts/moz.build +++ b/devtools/client/fronts/moz.build @@ -5,51 +5,51 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'addon', - 'descriptors', - 'inspector', - 'targets', - 'worker', + "addon", + "descriptors", + "inspector", + "targets", + "worker", ] DevToolsModules( - 'accessibility.js', - 'animation.js', - 'array-buffer.js', - 'changes.js', - 'compatibility.js', - 'content-viewer.js', - 'css-properties.js', - 'device.js', - 'eventsource.js', - 'frame.js', - 'framerate.js', - 'highlighters.js', - 'inspector.js', - 'layout.js', - 'manifest.js', - 'memory.js', - 'node.js', - 'object.js', - 'perf.js', - 'performance-recording.js', - 'performance.js', - 'preference.js', - 'property-iterator.js', - 'reflow.js', - 'responsive.js', - 'root.js', - 'screenshot.js', - 'source.js', - 'stacktraces.js', - 'storage.js', - 'string.js', - 'styles.js', - 'stylesheets.js', - 'symbol-iterator.js', - 'thread.js', - 'walker.js', - 'watcher.js', - 'webconsole.js', - 'websocket.js' + "accessibility.js", + "animation.js", + "array-buffer.js", + "changes.js", + "compatibility.js", + "content-viewer.js", + "css-properties.js", + "device.js", + "eventsource.js", + "frame.js", + "framerate.js", + "highlighters.js", + "inspector.js", + "layout.js", + "manifest.js", + "memory.js", + "node.js", + "object.js", + "perf.js", + "performance-recording.js", + "performance.js", + "preference.js", + "property-iterator.js", + "reflow.js", + "responsive.js", + "root.js", + "screenshot.js", + "source.js", + "stacktraces.js", + "storage.js", + "string.js", + "styles.js", + "stylesheets.js", + "symbol-iterator.js", + "thread.js", + "walker.js", + "watcher.js", + "webconsole.js", + "websocket.js", ) diff --git a/devtools/client/fronts/targets/moz.build b/devtools/client/fronts/targets/moz.build index 59f6bcb3d4bced..420b99e433e641 100644 --- a/devtools/client/fronts/targets/moz.build +++ b/devtools/client/fronts/targets/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'browsing-context.js', - 'content-process.js', - 'target-mixin.js', - 'worker.js', + "browsing-context.js", + "content-process.js", + "target-mixin.js", + "worker.js", ) diff --git a/devtools/client/fronts/worker/moz.build b/devtools/client/fronts/worker/moz.build index b0d47c32da412a..dae0e2d60693a2 100644 --- a/devtools/client/fronts/worker/moz.build +++ b/devtools/client/fronts/worker/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'push-subscription.js', - 'service-worker-registration.js', - 'service-worker.js', + "push-subscription.js", + "service-worker-registration.js", + "service-worker.js", ) diff --git a/devtools/client/inspector/animation/actions/moz.build b/devtools/client/inspector/animation/actions/moz.build index c15330774d5fbe..f43007dd6ea1df 100644 --- a/devtools/client/inspector/animation/actions/moz.build +++ b/devtools/client/inspector/animation/actions/moz.build @@ -3,6 +3,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'animations.js', - 'index.js', + "animations.js", + "index.js", ) diff --git a/devtools/client/inspector/animation/components/graph/moz.build b/devtools/client/inspector/animation/components/graph/moz.build index d425818d246fba..866bdd30ce7507 100644 --- a/devtools/client/inspector/animation/components/graph/moz.build +++ b/devtools/client/inspector/animation/components/graph/moz.build @@ -3,15 +3,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'AnimationName.js', - 'ComputedTimingPath.js', - 'DelaySign.js', - 'EffectTimingPath.js', - 'EndDelaySign.js', - 'NegativeDelayPath.js', - 'NegativeEndDelayPath.js', - 'NegativePath.js', - 'SummaryGraph.js', - 'SummaryGraphPath.js', - 'TimingPath.js' + "AnimationName.js", + "ComputedTimingPath.js", + "DelaySign.js", + "EffectTimingPath.js", + "EndDelaySign.js", + "NegativeDelayPath.js", + "NegativeEndDelayPath.js", + "NegativePath.js", + "SummaryGraph.js", + "SummaryGraphPath.js", + "TimingPath.js", ) diff --git a/devtools/client/inspector/animation/components/keyframes-graph/moz.build b/devtools/client/inspector/animation/components/keyframes-graph/moz.build index 589f3c9540b22c..1ff518e21d2969 100644 --- a/devtools/client/inspector/animation/components/keyframes-graph/moz.build +++ b/devtools/client/inspector/animation/components/keyframes-graph/moz.build @@ -3,12 +3,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ColorPath.js', - 'ComputedStylePath.js', - 'DiscretePath.js', - 'DistancePath.js', - 'KeyframeMarkerItem.js', - 'KeyframeMarkerList.js', - 'KeyframesGraph.js', - 'KeyframesGraphPath.js', + "ColorPath.js", + "ComputedStylePath.js", + "DiscretePath.js", + "DistancePath.js", + "KeyframeMarkerItem.js", + "KeyframeMarkerList.js", + "KeyframesGraph.js", + "KeyframesGraphPath.js", ) diff --git a/devtools/client/inspector/animation/components/moz.build b/devtools/client/inspector/animation/components/moz.build index 711dfc9b8c9fa6..43e69dfa517350 100644 --- a/devtools/client/inspector/animation/components/moz.build +++ b/devtools/client/inspector/animation/components/moz.build @@ -2,33 +2,30 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'graph', - 'keyframes-graph' -] +DIRS += ["graph", "keyframes-graph"] DevToolsModules( - 'AnimatedPropertyItem.js', - 'AnimatedPropertyList.js', - 'AnimatedPropertyListContainer.js', - 'AnimatedPropertyName.js', - 'AnimationDetailContainer.js', - 'AnimationDetailHeader.js', - 'AnimationItem.js', - 'AnimationList.js', - 'AnimationListContainer.js', - 'AnimationTarget.js', - 'AnimationToolbar.js', - 'App.js', - 'CurrentTimeLabel.js', - 'CurrentTimeScrubber.js', - 'IndicationBar.js', - 'KeyframesProgressBar.js', - 'NoAnimationPanel.js', - 'PauseResumeButton.js', - 'PlaybackRateSelector.js', - 'ProgressInspectionPanel.js', - 'RewindButton.js', - 'TickLabels.js', - 'TickLines.js', + "AnimatedPropertyItem.js", + "AnimatedPropertyList.js", + "AnimatedPropertyListContainer.js", + "AnimatedPropertyName.js", + "AnimationDetailContainer.js", + "AnimationDetailHeader.js", + "AnimationItem.js", + "AnimationList.js", + "AnimationListContainer.js", + "AnimationTarget.js", + "AnimationToolbar.js", + "App.js", + "CurrentTimeLabel.js", + "CurrentTimeScrubber.js", + "IndicationBar.js", + "KeyframesProgressBar.js", + "NoAnimationPanel.js", + "PauseResumeButton.js", + "PlaybackRateSelector.js", + "ProgressInspectionPanel.js", + "RewindButton.js", + "TickLabels.js", + "TickLines.js", ) diff --git a/devtools/client/inspector/animation/moz.build b/devtools/client/inspector/animation/moz.build index 5227800ea93e65..fb6fda4f08f81c 100644 --- a/devtools/client/inspector/animation/moz.build +++ b/devtools/client/inspector/animation/moz.build @@ -2,19 +2,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils' -] +DIRS += ["actions", "components", "reducers", "utils"] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -DevToolsModules( - 'animation.js', - 'current-time-timer.js' -) +DevToolsModules("animation.js", "current-time-timer.js") -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Animations') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Animations") diff --git a/devtools/client/inspector/animation/reducers/moz.build b/devtools/client/inspector/animation/reducers/moz.build index f46925c8c6fc7c..8b20a9f6cd91e7 100644 --- a/devtools/client/inspector/animation/reducers/moz.build +++ b/devtools/client/inspector/animation/reducers/moz.build @@ -3,5 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'animations.js', + "animations.js", ) diff --git a/devtools/client/inspector/animation/utils/moz.build b/devtools/client/inspector/animation/utils/moz.build index 161250c7e83e65..ae73627a299510 100644 --- a/devtools/client/inspector/animation/utils/moz.build +++ b/devtools/client/inspector/animation/utils/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'graph-helper.js', - 'l10n.js', - 'timescale.js', - 'utils.js', + "graph-helper.js", + "l10n.js", + "timescale.js", + "utils.js", ) diff --git a/devtools/client/inspector/boxmodel/actions/moz.build b/devtools/client/inspector/boxmodel/actions/moz.build index 2f98f6793d0cdd..0bc9633762a6e4 100644 --- a/devtools/client/inspector/boxmodel/actions/moz.build +++ b/devtools/client/inspector/boxmodel/actions/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'box-model.js', - 'index.js', + "box-model.js", + "index.js", ) diff --git a/devtools/client/inspector/boxmodel/components/moz.build b/devtools/client/inspector/boxmodel/components/moz.build index 562fd1a2cba89b..ed57c93eb7e75f 100644 --- a/devtools/client/inspector/boxmodel/components/moz.build +++ b/devtools/client/inspector/boxmodel/components/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'BoxModel.js', - 'BoxModelEditable.js', - 'BoxModelInfo.js', - 'BoxModelMain.js', - 'BoxModelProperties.js', - 'ComputedProperty.js', + "BoxModel.js", + "BoxModelEditable.js", + "BoxModelInfo.js", + "BoxModelMain.js", + "BoxModelProperties.js", + "ComputedProperty.js", ) diff --git a/devtools/client/inspector/boxmodel/moz.build b/devtools/client/inspector/boxmodel/moz.build index 6ef44c80c361dc..2791728136028a 100644 --- a/devtools/client/inspector/boxmodel/moz.build +++ b/devtools/client/inspector/boxmodel/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils', + "actions", + "components", + "reducers", + "utils", ] DevToolsModules( - 'box-model.js', - 'types.js', + "box-model.js", + "types.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/devtools/client/inspector/boxmodel/reducers/moz.build b/devtools/client/inspector/boxmodel/reducers/moz.build index e2402990ad80b0..fe216631de59d4 100644 --- a/devtools/client/inspector/boxmodel/reducers/moz.build +++ b/devtools/client/inspector/boxmodel/reducers/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'box-model.js', + "box-model.js", ) diff --git a/devtools/client/inspector/boxmodel/utils/moz.build b/devtools/client/inspector/boxmodel/utils/moz.build index 4ad6c32d9ef5ce..76a56562948780 100644 --- a/devtools/client/inspector/boxmodel/utils/moz.build +++ b/devtools/client/inspector/boxmodel/utils/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'editing-session.js', + "editing-session.js", ) diff --git a/devtools/client/inspector/changes/actions/moz.build b/devtools/client/inspector/changes/actions/moz.build index 9113b89c6e1ce1..06c5314a9ecbbf 100644 --- a/devtools/client/inspector/changes/actions/moz.build +++ b/devtools/client/inspector/changes/actions/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'changes.js', - 'index.js', + "changes.js", + "index.js", ) diff --git a/devtools/client/inspector/changes/components/moz.build b/devtools/client/inspector/changes/components/moz.build index d7d2d06dd0422e..e8fba36fb8c6aa 100644 --- a/devtools/client/inspector/changes/components/moz.build +++ b/devtools/client/inspector/changes/components/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ChangesApp.js', - 'CSSDeclaration.js', + "ChangesApp.js", + "CSSDeclaration.js", ) diff --git a/devtools/client/inspector/changes/moz.build b/devtools/client/inspector/changes/moz.build index a1483df7646dd4..10c4d4a8cf9ae7 100644 --- a/devtools/client/inspector/changes/moz.build +++ b/devtools/client/inspector/changes/moz.build @@ -5,20 +5,20 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', - 'selectors', - 'utils', + "actions", + "components", + "reducers", + "selectors", + "utils", ] DevToolsModules( - 'ChangesContextMenu.js', - 'ChangesView.js', + "ChangesContextMenu.js", + "ChangesView.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Changes') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Changes") diff --git a/devtools/client/inspector/changes/reducers/moz.build b/devtools/client/inspector/changes/reducers/moz.build index ced454ee920cc8..f3ea9a1bfc32b2 100644 --- a/devtools/client/inspector/changes/reducers/moz.build +++ b/devtools/client/inspector/changes/reducers/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'changes.js', + "changes.js", ) diff --git a/devtools/client/inspector/changes/selectors/moz.build b/devtools/client/inspector/changes/selectors/moz.build index ced454ee920cc8..f3ea9a1bfc32b2 100644 --- a/devtools/client/inspector/changes/selectors/moz.build +++ b/devtools/client/inspector/changes/selectors/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'changes.js', + "changes.js", ) diff --git a/devtools/client/inspector/changes/utils/moz.build b/devtools/client/inspector/changes/utils/moz.build index 0a26b20371674f..155752e0d842c5 100644 --- a/devtools/client/inspector/changes/utils/moz.build +++ b/devtools/client/inspector/changes/utils/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'changes-utils.js', - 'l10n.js', + "changes-utils.js", + "l10n.js", ) diff --git a/devtools/client/inspector/compatibility/actions/moz.build b/devtools/client/inspector/compatibility/actions/moz.build index 0e8e7c2118847c..1b2b96950fead8 100644 --- a/devtools/client/inspector/compatibility/actions/moz.build +++ b/devtools/client/inspector/compatibility/actions/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'compatibility.js', - 'index.js', + "compatibility.js", + "index.js", ) diff --git a/devtools/client/inspector/compatibility/moz.build b/devtools/client/inspector/compatibility/moz.build index b36726a8cd42be..37fbee966c5e59 100644 --- a/devtools/client/inspector/compatibility/moz.build +++ b/devtools/client/inspector/compatibility/moz.build @@ -5,19 +5,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils', + "actions", + "components", + "reducers", + "utils", ] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] DevToolsModules( - 'CompatibilityView.js', - 'types.js', + "CompatibilityView.js", + "types.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Compatibility') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Compatibility") diff --git a/devtools/client/inspector/compatibility/reducers/moz.build b/devtools/client/inspector/compatibility/reducers/moz.build index 3ff084f8c2167c..b9399195676a66 100644 --- a/devtools/client/inspector/compatibility/reducers/moz.build +++ b/devtools/client/inspector/compatibility/reducers/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'compatibility.js', + "compatibility.js", ) diff --git a/devtools/client/inspector/compatibility/utils/moz.build b/devtools/client/inspector/compatibility/utils/moz.build index aed24f611c9352..5de4c7b3ed62d4 100644 --- a/devtools/client/inspector/compatibility/utils/moz.build +++ b/devtools/client/inspector/compatibility/utils/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'cases.js', + "cases.js", ) diff --git a/devtools/client/inspector/components/moz.build b/devtools/client/inspector/components/moz.build index 0bb0fd1898470c..cf6226361ab6c6 100644 --- a/devtools/client/inspector/components/moz.build +++ b/devtools/client/inspector/components/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'InspectorTabPanel.js', + "InspectorTabPanel.js", ) diff --git a/devtools/client/inspector/computed/moz.build b/devtools/client/inspector/computed/moz.build index 89b6b304e08c46..5ba27c6b3edf70 100644 --- a/devtools/client/inspector/computed/moz.build +++ b/devtools/client/inspector/computed/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'computed.js', + "computed.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Computed') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Computed") diff --git a/devtools/client/inspector/extensions/actions/moz.build b/devtools/client/inspector/extensions/actions/moz.build index f56e9c3b0ed6b2..d101bc2fea4967 100644 --- a/devtools/client/inspector/extensions/actions/moz.build +++ b/devtools/client/inspector/extensions/actions/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'sidebar.js', + "index.js", + "sidebar.js", ) diff --git a/devtools/client/inspector/extensions/components/moz.build b/devtools/client/inspector/extensions/components/moz.build index 37e491f80e600d..62f3d991c100ad 100644 --- a/devtools/client/inspector/extensions/components/moz.build +++ b/devtools/client/inspector/extensions/components/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ExpressionResultView.js', - 'ExtensionPage.js', - 'ExtensionSidebar.js', - 'ObjectTreeView.js', + "ExpressionResultView.js", + "ExtensionPage.js", + "ExtensionSidebar.js", + "ObjectTreeView.js", ) diff --git a/devtools/client/inspector/extensions/moz.build b/devtools/client/inspector/extensions/moz.build index dae7e8e5888555..73fe369a7ff154 100644 --- a/devtools/client/inspector/extensions/moz.build +++ b/devtools/client/inspector/extensions/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', + "actions", + "components", + "reducers", ] DevToolsModules( - 'extension-sidebar.js', - 'types.js', + "extension-sidebar.js", + "types.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/devtools/client/inspector/extensions/reducers/moz.build b/devtools/client/inspector/extensions/reducers/moz.build index 816dadcfcbc3c6..0f8a5757c8bc29 100644 --- a/devtools/client/inspector/extensions/reducers/moz.build +++ b/devtools/client/inspector/extensions/reducers/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'sidebar.js', + "sidebar.js", ) diff --git a/devtools/client/inspector/flexbox/actions/moz.build b/devtools/client/inspector/flexbox/actions/moz.build index 969b8365b96542..4327940888a357 100644 --- a/devtools/client/inspector/flexbox/actions/moz.build +++ b/devtools/client/inspector/flexbox/actions/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'flexbox.js', - 'index.js', + "flexbox.js", + "index.js", ) diff --git a/devtools/client/inspector/flexbox/components/moz.build b/devtools/client/inspector/flexbox/components/moz.build index 8feff61c113ed7..3e077a217fc004 100644 --- a/devtools/client/inspector/flexbox/components/moz.build +++ b/devtools/client/inspector/flexbox/components/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Flexbox.js', - 'FlexContainer.js', - 'FlexItem.js', - 'FlexItemList.js', - 'FlexItemSelector.js', - 'FlexItemSizingOutline.js', - 'FlexItemSizingProperties.js', - 'Header.js', + "Flexbox.js", + "FlexContainer.js", + "FlexItem.js", + "FlexItemList.js", + "FlexItemSelector.js", + "FlexItemSizingOutline.js", + "FlexItemSizingProperties.js", + "Header.js", ) diff --git a/devtools/client/inspector/flexbox/moz.build b/devtools/client/inspector/flexbox/moz.build index 1a92f264d56f80..60ec69609984eb 100644 --- a/devtools/client/inspector/flexbox/moz.build +++ b/devtools/client/inspector/flexbox/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', + "actions", + "components", + "reducers", ] DevToolsModules( - 'flexbox.js', - 'types.js', + "flexbox.js", + "types.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/devtools/client/inspector/flexbox/reducers/moz.build b/devtools/client/inspector/flexbox/reducers/moz.build index 969b8365b96542..4327940888a357 100644 --- a/devtools/client/inspector/flexbox/reducers/moz.build +++ b/devtools/client/inspector/flexbox/reducers/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'flexbox.js', - 'index.js', + "flexbox.js", + "index.js", ) diff --git a/devtools/client/inspector/fonts/actions/moz.build b/devtools/client/inspector/fonts/actions/moz.build index 334bf80fb6a6e6..31452af5807d92 100644 --- a/devtools/client/inspector/fonts/actions/moz.build +++ b/devtools/client/inspector/fonts/actions/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'font-editor.js', - 'font-options.js', - 'fonts.js', - 'index.js', + "font-editor.js", + "font-options.js", + "fonts.js", + "index.js", ) diff --git a/devtools/client/inspector/fonts/components/moz.build b/devtools/client/inspector/fonts/components/moz.build index 45dfe042b538de..8838777f6503e2 100644 --- a/devtools/client/inspector/fonts/components/moz.build +++ b/devtools/client/inspector/fonts/components/moz.build @@ -5,20 +5,20 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Font.js', - 'FontAxis.js', - 'FontEditor.js', - 'FontList.js', - 'FontName.js', - 'FontOrigin.js', - 'FontOverview.js', - 'FontPreview.js', - 'FontPreviewInput.js', - 'FontPropertyValue.js', - 'FontsApp.js', - 'FontSize.js', - 'FontStyle.js', - 'FontWeight.js', - 'LetterSpacing.js', - 'LineHeight.js', + "Font.js", + "FontAxis.js", + "FontEditor.js", + "FontList.js", + "FontName.js", + "FontOrigin.js", + "FontOverview.js", + "FontPreview.js", + "FontPreviewInput.js", + "FontPropertyValue.js", + "FontsApp.js", + "FontSize.js", + "FontStyle.js", + "FontWeight.js", + "LetterSpacing.js", + "LineHeight.js", ) diff --git a/devtools/client/inspector/fonts/moz.build b/devtools/client/inspector/fonts/moz.build index d978ed83b18f50..319273244ac061 100644 --- a/devtools/client/inspector/fonts/moz.build +++ b/devtools/client/inspector/fonts/moz.build @@ -5,18 +5,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils', + "actions", + "components", + "reducers", + "utils", ] DevToolsModules( - 'fonts.js', - 'types.js', + "fonts.js", + "types.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Fonts') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Fonts") diff --git a/devtools/client/inspector/fonts/reducers/moz.build b/devtools/client/inspector/fonts/reducers/moz.build index 0512407bf402ae..13d1c7cf34c890 100644 --- a/devtools/client/inspector/fonts/reducers/moz.build +++ b/devtools/client/inspector/fonts/reducers/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'font-editor.js', - 'font-options.js', - 'fonts.js', + "font-editor.js", + "font-options.js", + "fonts.js", ) diff --git a/devtools/client/inspector/fonts/utils/moz.build b/devtools/client/inspector/fonts/utils/moz.build index cbd481359e87dd..ddd06560a07417 100644 --- a/devtools/client/inspector/fonts/utils/moz.build +++ b/devtools/client/inspector/fonts/utils/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'font-utils.js', - 'l10n.js', + "font-utils.js", + "l10n.js", ) diff --git a/devtools/client/inspector/grids/actions/moz.build b/devtools/client/inspector/grids/actions/moz.build index 09b7039d19ded6..c2ca4f109472c7 100644 --- a/devtools/client/inspector/grids/actions/moz.build +++ b/devtools/client/inspector/grids/actions/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'grids.js', - 'highlighter-settings.js', - 'index.js', + "grids.js", + "highlighter-settings.js", + "index.js", ) diff --git a/devtools/client/inspector/grids/components/moz.build b/devtools/client/inspector/grids/components/moz.build index 9ded404213e80e..e938e51ad1bdd9 100644 --- a/devtools/client/inspector/grids/components/moz.build +++ b/devtools/client/inspector/grids/components/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Grid.js', - 'GridDisplaySettings.js', - 'GridItem.js', - 'GridList.js', - 'GridOutline.js', + "Grid.js", + "GridDisplaySettings.js", + "GridItem.js", + "GridList.js", + "GridOutline.js", ) diff --git a/devtools/client/inspector/grids/moz.build b/devtools/client/inspector/grids/moz.build index a788399edaf509..4c85701c643038 100644 --- a/devtools/client/inspector/grids/moz.build +++ b/devtools/client/inspector/grids/moz.build @@ -5,16 +5,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils', + "actions", + "components", + "reducers", + "utils", ] DevToolsModules( - 'grid-inspector.js', - 'types.js', + "grid-inspector.js", + "types.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] diff --git a/devtools/client/inspector/grids/reducers/moz.build b/devtools/client/inspector/grids/reducers/moz.build index 34867f4e837fb2..768e29b542e55b 100644 --- a/devtools/client/inspector/grids/reducers/moz.build +++ b/devtools/client/inspector/grids/reducers/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'grids.js', - 'highlighter-settings.js', + "grids.js", + "highlighter-settings.js", ) diff --git a/devtools/client/inspector/grids/utils/moz.build b/devtools/client/inspector/grids/utils/moz.build index aaa9fdff75f4e1..c74e63e61762a6 100644 --- a/devtools/client/inspector/grids/utils/moz.build +++ b/devtools/client/inspector/grids/utils/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'utils.js', + "utils.js", ) diff --git a/devtools/client/inspector/layout/components/moz.build b/devtools/client/inspector/layout/components/moz.build index 621f0943dfd22f..bb3e2624caf2bc 100644 --- a/devtools/client/inspector/layout/components/moz.build +++ b/devtools/client/inspector/layout/components/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'LayoutApp.js', + "LayoutApp.js", ) diff --git a/devtools/client/inspector/layout/moz.build b/devtools/client/inspector/layout/moz.build index 1cb5826c30b432..42d0bf4c7bcaf6 100644 --- a/devtools/client/inspector/layout/moz.build +++ b/devtools/client/inspector/layout/moz.build @@ -5,13 +5,13 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'components', - 'utils', + "components", + "utils", ] DevToolsModules( - 'layout.js', + "layout.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Layout') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Layout") diff --git a/devtools/client/inspector/layout/utils/moz.build b/devtools/client/inspector/layout/utils/moz.build index e3053b63fab6cf..ddee85b5f7c264 100644 --- a/devtools/client/inspector/layout/utils/moz.build +++ b/devtools/client/inspector/layout/utils/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'l10n.js', + "l10n.js", ) diff --git a/devtools/client/inspector/markup/components/moz.build b/devtools/client/inspector/markup/components/moz.build index c63de06d6f01b0..13e4ee411dbe6d 100644 --- a/devtools/client/inspector/markup/components/moz.build +++ b/devtools/client/inspector/markup/components/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'TextNode.js', + "TextNode.js", ) diff --git a/devtools/client/inspector/markup/moz.build b/devtools/client/inspector/markup/moz.build index cc616e83cbbcc4..1fcba80d9b3b03 100644 --- a/devtools/client/inspector/markup/moz.build +++ b/devtools/client/inspector/markup/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'components', - 'utils', - 'views', + "components", + "utils", + "views", ] DevToolsModules( - 'markup-context-menu.js', - 'markup.js', - 'utils.js', + "markup-context-menu.js", + "markup.js", + "utils.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/devtools/client/inspector/markup/utils/moz.build b/devtools/client/inspector/markup/utils/moz.build index e3053b63fab6cf..ddee85b5f7c264 100644 --- a/devtools/client/inspector/markup/utils/moz.build +++ b/devtools/client/inspector/markup/utils/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'l10n.js', + "l10n.js", ) diff --git a/devtools/client/inspector/markup/views/moz.build b/devtools/client/inspector/markup/views/moz.build index 751088f6265bb8..9be0f159ee9210 100644 --- a/devtools/client/inspector/markup/views/moz.build +++ b/devtools/client/inspector/markup/views/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'element-container.js', - 'element-editor.js', - 'html-editor.js', - 'markup-container.js', - 'read-only-container.js', - 'read-only-editor.js', - 'root-container.js', - 'slotted-node-container.js', - 'slotted-node-editor.js', - 'text-container.js', - 'text-editor.js', + "element-container.js", + "element-editor.js", + "html-editor.js", + "markup-container.js", + "read-only-container.js", + "read-only-editor.js", + "root-container.js", + "slotted-node-container.js", + "slotted-node-editor.js", + "text-container.js", + "text-editor.js", ) diff --git a/devtools/client/inspector/moz.build b/devtools/client/inspector/moz.build index bba3e5114b0bd3..b6f09b0e4a9db0 100644 --- a/devtools/client/inspector/moz.build +++ b/devtools/client/inspector/moz.build @@ -3,33 +3,33 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'animation', - 'boxmodel', - 'changes', - 'compatibility', - 'components', - 'computed', - 'extensions', - 'flexbox', - 'fonts', - 'grids', - 'layout', - 'markup', - 'rules', - 'shared' + "animation", + "boxmodel", + "changes", + "compatibility", + "components", + "computed", + "extensions", + "flexbox", + "fonts", + "grids", + "layout", + "markup", + "rules", + "shared", ] DevToolsModules( - 'breadcrumbs.js', - 'inspector-search.js', - 'inspector.js', - 'node-picker.js', - 'panel.js', - 'store.js', - 'toolsidebar.js', + "breadcrumbs.js", + "inspector-search.js", + "inspector.js", + "node-picker.js", + "panel.js", + "store.js", + "toolsidebar.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector") diff --git a/devtools/client/inspector/rules/actions/moz.build b/devtools/client/inspector/rules/actions/moz.build index 7f703c61d2fdfd..e42457d2d378d3 100644 --- a/devtools/client/inspector/rules/actions/moz.build +++ b/devtools/client/inspector/rules/actions/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'class-list.js', - 'index.js', - 'pseudo-classes.js', - 'rules.js', + "class-list.js", + "index.js", + "pseudo-classes.js", + "rules.js", ) diff --git a/devtools/client/inspector/rules/components/moz.build b/devtools/client/inspector/rules/components/moz.build index 848b5366310586..1bba69564f21ec 100644 --- a/devtools/client/inspector/rules/components/moz.build +++ b/devtools/client/inspector/rules/components/moz.build @@ -3,16 +3,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ClassListPanel.js', - 'Declaration.js', - 'Declarations.js', - 'PseudoClassPanel.js', - 'Rule.js', - 'Rules.js', - 'RulesApp.js', - 'SearchBox.js', - 'Selector.js', - 'SelectorHighlighter.js', - 'SourceLink.js', - 'Toolbar.js', + "ClassListPanel.js", + "Declaration.js", + "Declarations.js", + "PseudoClassPanel.js", + "Rule.js", + "Rules.js", + "RulesApp.js", + "SearchBox.js", + "Selector.js", + "SelectorHighlighter.js", + "SourceLink.js", + "Toolbar.js", ) diff --git a/devtools/client/inspector/rules/models/moz.build b/devtools/client/inspector/rules/models/moz.build index 16e1a1df751039..7a5561e2139dcf 100644 --- a/devtools/client/inspector/rules/models/moz.build +++ b/devtools/client/inspector/rules/models/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'class-list.js', - 'element-style.js', - 'rule.js', - 'text-property.js', - 'user-properties.js', + "class-list.js", + "element-style.js", + "rule.js", + "text-property.js", + "user-properties.js", ) diff --git a/devtools/client/inspector/rules/moz.build b/devtools/client/inspector/rules/moz.build index ea995ead08ced7..cc0dc3448be617 100644 --- a/devtools/client/inspector/rules/moz.build +++ b/devtools/client/inspector/rules/moz.build @@ -5,25 +5,25 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'models', - 'reducers', - 'utils', - 'views', + "actions", + "components", + "models", + "reducers", + "utils", + "views", ] DevToolsModules( - 'constants.js', - 'new-rules.js', - 'rules.js', - 'types.js', + "constants.js", + "new-rules.js", + "rules.js", + "types.js", ) BROWSER_CHROME_MANIFESTS += [ - 'test/browser_part1.ini', - 'test/browser_part2.ini', + "test/browser_part1.ini", + "test/browser_part2.ini", ] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Rules') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Rules") diff --git a/devtools/client/inspector/rules/reducers/moz.build b/devtools/client/inspector/rules/reducers/moz.build index d210e9f8019c71..6b99b660a9344f 100644 --- a/devtools/client/inspector/rules/reducers/moz.build +++ b/devtools/client/inspector/rules/reducers/moz.build @@ -3,7 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'class-list.js', - 'pseudo-classes.js', - 'rules.js', + "class-list.js", + "pseudo-classes.js", + "rules.js", ) diff --git a/devtools/client/inspector/rules/utils/moz.build b/devtools/client/inspector/rules/utils/moz.build index f6a6af241dddab..cc3fa4dfbdb381 100644 --- a/devtools/client/inspector/rules/utils/moz.build +++ b/devtools/client/inspector/rules/utils/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'l10n.js', - 'utils.js', + "l10n.js", + "utils.js", ) diff --git a/devtools/client/inspector/rules/views/moz.build b/devtools/client/inspector/rules/views/moz.build index af97777faf4ea0..d5058bdbc2cca2 100644 --- a/devtools/client/inspector/rules/views/moz.build +++ b/devtools/client/inspector/rules/views/moz.build @@ -3,7 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'class-list-previewer.js', - 'rule-editor.js', - 'text-property-editor.js', + "class-list-previewer.js", + "rule-editor.js", + "text-property-editor.js", ) diff --git a/devtools/client/inspector/shared/moz.build b/devtools/client/inspector/shared/moz.build index 5daeb60d340d31..3249b3918dba34 100644 --- a/devtools/client/inspector/shared/moz.build +++ b/devtools/client/inspector/shared/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'compatibility-user-settings.js', - 'highlighters-overlay.js', - 'node-reps.js', - 'node-types.js', - 'style-change-tracker.js', - 'style-inspector-menu.js', - 'tooltips-overlay.js', - 'utils.js', - 'walker-event-listener.js' + "compatibility-user-settings.js", + "highlighters-overlay.js", + "node-reps.js", + "node-types.js", + "style-change-tracker.js", + "style-inspector-menu.js", + "tooltips-overlay.js", + "utils.js", + "walker-event-listener.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/devtools/client/jsonview/components/moz.build b/devtools/client/jsonview/components/moz.build index 55f3bbe8619e44..8cb9dc8e87cc93 100644 --- a/devtools/client/jsonview/components/moz.build +++ b/devtools/client/jsonview/components/moz.build @@ -4,19 +4,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'reps' -] +DIRS += ["reps"] DevToolsModules( - 'Headers.js', - 'HeadersPanel.js', - 'HeadersToolbar.js', - 'JsonPanel.js', - 'JsonToolbar.js', - 'LiveText.js', - 'MainTabbedArea.js', - 'SearchBox.js', - 'TextPanel.js', - 'TextToolbar.js' + "Headers.js", + "HeadersPanel.js", + "HeadersToolbar.js", + "JsonPanel.js", + "JsonToolbar.js", + "LiveText.js", + "MainTabbedArea.js", + "SearchBox.js", + "TextPanel.js", + "TextToolbar.js", ) diff --git a/devtools/client/jsonview/components/reps/moz.build b/devtools/client/jsonview/components/reps/moz.build index f1982025325563..ba39d7767be2ad 100644 --- a/devtools/client/jsonview/components/reps/moz.build +++ b/devtools/client/jsonview/components/reps/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Toolbar.js', + "Toolbar.js", ) diff --git a/devtools/client/jsonview/css/moz.build b/devtools/client/jsonview/css/moz.build index 84ff655db9e724..53ebea91b0c63f 100644 --- a/devtools/client/jsonview/css/moz.build +++ b/devtools/client/jsonview/css/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'general.css', - 'headers-panel.css', - 'json-panel.css', - 'main.css', - 'search-box.css', - 'text-panel.css', - 'toolbar.css' + "general.css", + "headers-panel.css", + "json-panel.css", + "main.css", + "search-box.css", + "text-panel.css", + "toolbar.css", ) diff --git a/devtools/client/jsonview/lib/moz.build b/devtools/client/jsonview/lib/moz.build index fff9a99f90034a..461ef2e44506cd 100644 --- a/devtools/client/jsonview/lib/moz.build +++ b/devtools/client/jsonview/lib/moz.build @@ -4,6 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'require.js' -) +DevToolsModules("require.js") diff --git a/devtools/client/jsonview/moz.build b/devtools/client/jsonview/moz.build index a338923e47de48..29988e1f7bb4ef 100644 --- a/devtools/client/jsonview/moz.build +++ b/devtools/client/jsonview/moz.build @@ -4,25 +4,21 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'components', - 'css', - 'lib' -] +DIRS += ["components", "css", "lib"] DevToolsModules( - 'converter-child.js', - 'Converter.jsm', - 'json-viewer.js', - 'Sniffer.jsm', - 'viewer-config.js' + "converter-child.js", + "Converter.jsm", + "json-viewer.js", + "Sniffer.jsm", + "viewer-config.js", ) XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'JSON Viewer') +with Files("**"): + BUG_COMPONENT = ("DevTools", "JSON Viewer") diff --git a/devtools/client/locales/moz.build b/devtools/client/locales/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/devtools/client/locales/moz.build +++ b/devtools/client/locales/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/devtools/client/memory/actions/moz.build b/devtools/client/memory/actions/moz.build index 628c2488fc984a..939712ca32b6c6 100644 --- a/devtools/client/memory/actions/moz.build +++ b/devtools/client/memory/actions/moz.build @@ -4,17 +4,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'allocations.js', - 'census-display.js', - 'diffing.js', - 'filter.js', - 'front.js', - 'io.js', - 'label-display.js', - 'refresh.js', - 'sizes.js', - 'snapshot.js', - 'task-cache.js', - 'tree-map-display.js', - 'view.js', + "allocations.js", + "census-display.js", + "diffing.js", + "filter.js", + "front.js", + "io.js", + "label-display.js", + "refresh.js", + "sizes.js", + "snapshot.js", + "task-cache.js", + "tree-map-display.js", + "view.js", ) diff --git a/devtools/client/memory/components/moz.build b/devtools/client/memory/components/moz.build index 5172333c08d732..82739bf97f8d58 100644 --- a/devtools/client/memory/components/moz.build +++ b/devtools/client/memory/components/moz.build @@ -4,22 +4,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'tree-map', + "tree-map", ] DevToolsModules( - 'Census.js', - 'CensusHeader.js', - 'CensusTreeItem.js', - 'DominatorTree.js', - 'DominatorTreeHeader.js', - 'DominatorTreeItem.js', - 'Heap.js', - 'Individuals.js', - 'IndividualsHeader.js', - 'List.js', - 'ShortestPaths.js', - 'SnapshotListItem.js', - 'Toolbar.js', - 'TreeMap.js', + "Census.js", + "CensusHeader.js", + "CensusTreeItem.js", + "DominatorTree.js", + "DominatorTreeHeader.js", + "DominatorTreeItem.js", + "Heap.js", + "Individuals.js", + "IndividualsHeader.js", + "List.js", + "ShortestPaths.js", + "SnapshotListItem.js", + "Toolbar.js", + "TreeMap.js", ) diff --git a/devtools/client/memory/components/tree-map/moz.build b/devtools/client/memory/components/tree-map/moz.build index aab19319179281..a9e590033924fb 100644 --- a/devtools/client/memory/components/tree-map/moz.build +++ b/devtools/client/memory/components/tree-map/moz.build @@ -4,9 +4,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'canvas-utils.js', - 'color-coarse-type.js', - 'drag-zoom.js', - 'draw.js', - 'start.js', + "canvas-utils.js", + "color-coarse-type.js", + "drag-zoom.js", + "draw.js", + "start.js", ) diff --git a/devtools/client/memory/moz.build b/devtools/client/memory/moz.build index e53ca900ad6ef8..3e3282bd2e991b 100644 --- a/devtools/client/memory/moz.build +++ b/devtools/client/memory/moz.build @@ -3,27 +3,27 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Memory') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Memory") DIRS += [ - 'actions', - 'components', - 'reducers', + "actions", + "components", + "reducers", ] DevToolsModules( - 'app.js', - 'constants.js', - 'dominator-tree-lazy-children.js', - 'initializer.js', - 'models.js', - 'panel.js', - 'reducers.js', - 'store.js', - 'utils.js', + "app.js", + "constants.js", + "dominator-tree-lazy-children.js", + "initializer.js", + "models.js", + "panel.js", + "reducers.js", + "store.js", + "utils.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome/chrome.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome/chrome.ini"] diff --git a/devtools/client/memory/reducers/moz.build b/devtools/client/memory/reducers/moz.build index af3e3c72a2998b..53677d1d8e0314 100644 --- a/devtools/client/memory/reducers/moz.build +++ b/devtools/client/memory/reducers/moz.build @@ -4,16 +4,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'allocations.js', - 'census-display.js', - 'diffing.js', - 'errors.js', - 'filter.js', - 'front.js', - 'individuals.js', - 'label-display.js', - 'sizes.js', - 'snapshots.js', - 'tree-map-display.js', - 'view.js', + "allocations.js", + "census-display.js", + "diffing.js", + "errors.js", + "filter.js", + "front.js", + "individuals.js", + "label-display.js", + "sizes.js", + "snapshots.js", + "tree-map-display.js", + "view.js", ) diff --git a/devtools/client/moz.build b/devtools/client/moz.build index b589c7ac4d2f03..58e1e68c912184 100644 --- a/devtools/client/moz.build +++ b/devtools/client/moz.build @@ -4,45 +4,45 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('../templates.mozbuild') +include("../templates.mozbuild") DIRS += [ - 'aboutdebugging', - 'accessibility', - 'application', - 'debugger', - 'dom', - 'framework', - 'fronts', - 'inspector', - 'jsonview', - 'locales', - 'memory', - 'netmonitor', - 'performance', - 'performance-new', - 'preferences', - 'responsive', - 'shared', - 'storage', - 'styleeditor', - 'themes', - 'webconsole', + "aboutdebugging", + "accessibility", + "application", + "debugger", + "dom", + "framework", + "fronts", + "inspector", + "jsonview", + "locales", + "memory", + "netmonitor", + "performance", + "performance-new", + "preferences", + "responsive", + "shared", + "storage", + "styleeditor", + "themes", + "webconsole", ] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] DevToolsModules( - 'constants.js', - 'definitions.js', - 'devtools-client.js', - 'menus.js', + "constants.js", + "definitions.js", + "devtools-client.js", + "menus.js", ) -if not CONFIG['MOZILLA_OFFICIAL']: +if not CONFIG["MOZILLA_OFFICIAL"]: DevToolsModules( - 'devtools-fission-prefs.js', + "devtools-fission-prefs.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'General') +with Files("**"): + BUG_COMPONENT = ("DevTools", "General") diff --git a/devtools/client/netmonitor/moz.build b/devtools/client/netmonitor/moz.build index 1c47d76b357c42..d2176e4767c586 100644 --- a/devtools/client/netmonitor/moz.build +++ b/devtools/client/netmonitor/moz.build @@ -2,20 +2,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'src' -] +DIRS += ["src"] -DevToolsModules( - 'initializer.js', - 'panel.js' -) +DevToolsModules("initializer.js", "panel.js") -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Netmonitor') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Netmonitor") -SPHINX_TREES['/devtools/netmonitor'] = 'docs' +SPHINX_TREES["/devtools/netmonitor"] = "docs" diff --git a/devtools/client/netmonitor/src/actions/moz.build b/devtools/client/netmonitor/src/actions/moz.build index b47b8d8ab6497f..6644607d4bbdaa 100644 --- a/devtools/client/netmonitor/src/actions/moz.build +++ b/devtools/client/netmonitor/src/actions/moz.build @@ -3,15 +3,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'batching.js', - 'filters.js', - 'index.js', - 'messages.js', - 'request-blocking.js', - 'requests.js', - 'search.js', - 'selection.js', - 'sort.js', - 'timing-markers.js', - 'ui.js', + "batching.js", + "filters.js", + "index.js", + "messages.js", + "request-blocking.js", + "requests.js", + "search.js", + "selection.js", + "sort.js", + "timing-markers.js", + "ui.js", ) diff --git a/devtools/client/netmonitor/src/components/messages/moz.build b/devtools/client/netmonitor/src/components/messages/moz.build index c40fccfa2c2e74..b5b8751d30abbd 100644 --- a/devtools/client/netmonitor/src/components/messages/moz.build +++ b/devtools/client/netmonitor/src/components/messages/moz.build @@ -3,30 +3,30 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'parsers', + "parsers", ] DevToolsModules( - 'cbor.js', - 'ColumnData.js', - 'ColumnEventName.js', - 'ColumnFinBit.js', - 'ColumnLastEventId.js', - 'ColumnMaskBit.js', - 'ColumnOpCode.js', - 'ColumnRetry.js', - 'ColumnSize.js', - 'ColumnTime.js', - 'MessageFilterMenu.js', - 'MessageListContent.js', - 'MessageListContextMenu.js', - 'MessageListHeader.js', - 'MessageListHeaderContextMenu.js', - 'MessageListItem.js', - 'MessagePayload.js', - 'MessagesView.js', - 'msgpack.js', - 'RawData.js', - 'StatusBar.js', - 'Toolbar.js', + "cbor.js", + "ColumnData.js", + "ColumnEventName.js", + "ColumnFinBit.js", + "ColumnLastEventId.js", + "ColumnMaskBit.js", + "ColumnOpCode.js", + "ColumnRetry.js", + "ColumnSize.js", + "ColumnTime.js", + "MessageFilterMenu.js", + "MessageListContent.js", + "MessageListContextMenu.js", + "MessageListHeader.js", + "MessageListHeaderContextMenu.js", + "MessageListItem.js", + "MessagePayload.js", + "MessagesView.js", + "msgpack.js", + "RawData.js", + "StatusBar.js", + "Toolbar.js", ) diff --git a/devtools/client/netmonitor/src/components/messages/parsers/moz.build b/devtools/client/netmonitor/src/components/messages/parsers/moz.build index 07fcfb62783bcc..6b1947e4a1896b 100644 --- a/devtools/client/netmonitor/src/components/messages/parsers/moz.build +++ b/devtools/client/netmonitor/src/components/messages/parsers/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'socket-io', - 'sockjs', - 'stomp', - 'signalr', - 'wamp', + "socket-io", + "sockjs", + "stomp", + "signalr", + "wamp", ] diff --git a/devtools/client/netmonitor/src/components/messages/parsers/signalr/moz.build b/devtools/client/netmonitor/src/components/messages/parsers/signalr/moz.build index b77840a3161c93..48329513f24f48 100644 --- a/devtools/client/netmonitor/src/components/messages/parsers/signalr/moz.build +++ b/devtools/client/netmonitor/src/components/messages/parsers/signalr/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'HandshakeProtocol.js', - 'IHubProtocol.js', - 'index.js', - 'JSONHubProtocol.js', - 'TextMessageFormat.js', - 'Utils.js', + "HandshakeProtocol.js", + "IHubProtocol.js", + "index.js", + "JSONHubProtocol.js", + "TextMessageFormat.js", + "Utils.js", ) diff --git a/devtools/client/netmonitor/src/components/messages/parsers/socket-io/moz.build b/devtools/client/netmonitor/src/components/messages/parsers/socket-io/moz.build index f80fc49ebcb7ab..d38ca19dd65790 100644 --- a/devtools/client/netmonitor/src/components/messages/parsers/socket-io/moz.build +++ b/devtools/client/netmonitor/src/components/messages/parsers/socket-io/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'binary.js', - 'component-emitter.js', - 'index.js', - 'is-buffer.js', + "binary.js", + "component-emitter.js", + "index.js", + "is-buffer.js", ) diff --git a/devtools/client/netmonitor/src/components/messages/parsers/sockjs/moz.build b/devtools/client/netmonitor/src/components/messages/parsers/sockjs/moz.build index d92bd460b95d3e..e1ca52aa96dc6a 100644 --- a/devtools/client/netmonitor/src/components/messages/parsers/sockjs/moz.build +++ b/devtools/client/netmonitor/src/components/messages/parsers/sockjs/moz.build @@ -3,5 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', + "index.js", ) diff --git a/devtools/client/netmonitor/src/components/messages/parsers/stomp/moz.build b/devtools/client/netmonitor/src/components/messages/parsers/stomp/moz.build index 0d41ea8a97db49..420c7479db671f 100644 --- a/devtools/client/netmonitor/src/components/messages/parsers/stomp/moz.build +++ b/devtools/client/netmonitor/src/components/messages/parsers/stomp/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'byte.js', - 'frame.js', - 'index.js', - 'parser.js', + "byte.js", + "frame.js", + "index.js", + "parser.js", ) diff --git a/devtools/client/netmonitor/src/components/messages/parsers/wamp/moz.build b/devtools/client/netmonitor/src/components/messages/parsers/wamp/moz.build index 86806b00dcf2a0..b6b7c910362f27 100644 --- a/devtools/client/netmonitor/src/components/messages/parsers/wamp/moz.build +++ b/devtools/client/netmonitor/src/components/messages/parsers/wamp/moz.build @@ -3,6 +3,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'arrayParser.js', - 'serializers.js', + "arrayParser.js", + "serializers.js", ) diff --git a/devtools/client/netmonitor/src/components/moz.build b/devtools/client/netmonitor/src/components/moz.build index 5ab16fd921ae80..e769a5b4ab1795 100644 --- a/devtools/client/netmonitor/src/components/moz.build +++ b/devtools/client/netmonitor/src/components/moz.build @@ -3,25 +3,25 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'messages', - 'previews', - 'request-blocking', - 'request-details', - 'request-list', - 'search', + "messages", + "previews", + "request-blocking", + "request-details", + "request-list", + "search", ] DevToolsModules( - 'App.js', - 'CustomRequestPanel.js', - 'DropHarHandler.js', - 'MonitorPanel.js', - 'NetworkActionBar.js', - 'SecurityState.js', - 'SourceEditor.js', - 'StatisticsPanel.js', - 'StatusBar.js', - 'StatusCode.js', - 'TabboxPanel.js', - 'Toolbar.js', + "App.js", + "CustomRequestPanel.js", + "DropHarHandler.js", + "MonitorPanel.js", + "NetworkActionBar.js", + "SecurityState.js", + "SourceEditor.js", + "StatisticsPanel.js", + "StatusBar.js", + "StatusCode.js", + "TabboxPanel.js", + "Toolbar.js", ) diff --git a/devtools/client/netmonitor/src/components/previews/moz.build b/devtools/client/netmonitor/src/components/previews/moz.build index 9ee4d025228e2d..50c829e16df199 100644 --- a/devtools/client/netmonitor/src/components/previews/moz.build +++ b/devtools/client/netmonitor/src/components/previews/moz.build @@ -3,8 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'HtmlPreview.js', - 'ImagePreview.js', - 'SourcePreview.js', - 'UrlPreview.js' + "HtmlPreview.js", "ImagePreview.js", "SourcePreview.js", "UrlPreview.js" ) diff --git a/devtools/client/netmonitor/src/components/request-blocking/moz.build b/devtools/client/netmonitor/src/components/request-blocking/moz.build index 2331241201c103..7ce0f7ecc6a4ee 100644 --- a/devtools/client/netmonitor/src/components/request-blocking/moz.build +++ b/devtools/client/netmonitor/src/components/request-blocking/moz.build @@ -3,5 +3,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'RequestBlockingPanel.js', + "RequestBlockingPanel.js", ) diff --git a/devtools/client/netmonitor/src/components/request-details/moz.build b/devtools/client/netmonitor/src/components/request-details/moz.build index 6dc4f1b6cd4755..09896f2de3fd56 100644 --- a/devtools/client/netmonitor/src/components/request-details/moz.build +++ b/devtools/client/netmonitor/src/components/request-details/moz.build @@ -3,15 +3,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'CachePanel.js', - 'CookiesPanel.js', - 'HeadersPanel.js', - 'NetworkDetailsBar.js', - 'PropertiesView.js', - 'RequestPanel.js', - 'ResponsePanel.js', - 'SecurityPanel.js', - 'StackTracePanel.js', - 'TimingsPanel.js' - + "CachePanel.js", + "CookiesPanel.js", + "HeadersPanel.js", + "NetworkDetailsBar.js", + "PropertiesView.js", + "RequestPanel.js", + "ResponsePanel.js", + "SecurityPanel.js", + "StackTracePanel.js", + "TimingsPanel.js", ) diff --git a/devtools/client/netmonitor/src/components/request-list/moz.build b/devtools/client/netmonitor/src/components/request-list/moz.build index 09cc98e5bedc89..91bc88048714d7 100644 --- a/devtools/client/netmonitor/src/components/request-list/moz.build +++ b/devtools/client/netmonitor/src/components/request-list/moz.build @@ -3,26 +3,26 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'RequestList.js', - 'RequestListColumnContentSize.js', - 'RequestListColumnCookies.js', - 'RequestListColumnDomain.js', - 'RequestListColumnFile.js', - 'RequestListColumnInitiator.js', - 'RequestListColumnMethod.js', - 'RequestListColumnProtocol.js', - 'RequestListColumnRemoteIP.js', - 'RequestListColumnResponseHeader.js', - 'RequestListColumnScheme.js', - 'RequestListColumnSetCookies.js', - 'RequestListColumnStatus.js', - 'RequestListColumnTime.js', - 'RequestListColumnTransferredSize.js', - 'RequestListColumnType.js', - 'RequestListColumnUrl.js', - 'RequestListColumnWaterfall.js', - 'RequestListContent.js', - 'RequestListEmptyNotice.js', - 'RequestListHeader.js', - 'RequestListItem.js', + "RequestList.js", + "RequestListColumnContentSize.js", + "RequestListColumnCookies.js", + "RequestListColumnDomain.js", + "RequestListColumnFile.js", + "RequestListColumnInitiator.js", + "RequestListColumnMethod.js", + "RequestListColumnProtocol.js", + "RequestListColumnRemoteIP.js", + "RequestListColumnResponseHeader.js", + "RequestListColumnScheme.js", + "RequestListColumnSetCookies.js", + "RequestListColumnStatus.js", + "RequestListColumnTime.js", + "RequestListColumnTransferredSize.js", + "RequestListColumnType.js", + "RequestListColumnUrl.js", + "RequestListColumnWaterfall.js", + "RequestListContent.js", + "RequestListEmptyNotice.js", + "RequestListHeader.js", + "RequestListItem.js", ) diff --git a/devtools/client/netmonitor/src/components/search/moz.build b/devtools/client/netmonitor/src/components/search/moz.build index 99ba9604bed61c..7c48392d10e2c9 100644 --- a/devtools/client/netmonitor/src/components/search/moz.build +++ b/devtools/client/netmonitor/src/components/search/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'search-provider.js', - 'SearchPanel.js', - 'StatusBar.js', - 'Toolbar.js', + "search-provider.js", + "SearchPanel.js", + "StatusBar.js", + "Toolbar.js", ) diff --git a/devtools/client/netmonitor/src/connector/moz.build b/devtools/client/netmonitor/src/connector/moz.build index 02f30b745c92ef..1ab144ad587781 100644 --- a/devtools/client/netmonitor/src/connector/moz.build +++ b/devtools/client/netmonitor/src/connector/moz.build @@ -3,7 +3,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'firefox-connector.js', - 'firefox-data-provider.js', - 'index.js', + "firefox-connector.js", + "firefox-data-provider.js", + "index.js", ) diff --git a/devtools/client/netmonitor/src/har/moz.build b/devtools/client/netmonitor/src/har/moz.build index 0c236948813be0..84a7497990b789 100644 --- a/devtools/client/netmonitor/src/har/moz.build +++ b/devtools/client/netmonitor/src/har/moz.build @@ -3,18 +3,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'har-automation.js', - 'har-builder-utils.js', - 'har-builder.js', - 'har-collector.js', - 'har-exporter.js', - 'har-importer.js', - 'har-menu-utils.js', - 'har-utils.js', - 'toolbox-overlay.js', + "har-automation.js", + "har-builder-utils.js", + "har-builder.js", + "har-collector.js", + "har-exporter.js", + "har-importer.js", + "har-menu-utils.js", + "har-utils.js", + "toolbox-overlay.js", ) BROWSER_CHROME_MANIFESTS += [ - 'test/browser-harautomation.ini', - 'test/browser.ini', + "test/browser-harautomation.ini", + "test/browser.ini", ] diff --git a/devtools/client/netmonitor/src/middleware/moz.build b/devtools/client/netmonitor/src/middleware/moz.build index 4f70d1b5baa3ac..2020fae1b2f210 100644 --- a/devtools/client/netmonitor/src/middleware/moz.build +++ b/devtools/client/netmonitor/src/middleware/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'batching.js', - 'event-telemetry.js', - 'prefs.js', - 'recording.js', - 'request-blocking.js', - 'throttling.js', + "batching.js", + "event-telemetry.js", + "prefs.js", + "recording.js", + "request-blocking.js", + "throttling.js", ) diff --git a/devtools/client/netmonitor/src/moz.build b/devtools/client/netmonitor/src/moz.build index fa7c16945b5976..b3ba8c50603380 100644 --- a/devtools/client/netmonitor/src/moz.build +++ b/devtools/client/netmonitor/src/moz.build @@ -3,21 +3,21 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'connector', - 'har', - 'middleware', - 'reducers', - 'selectors', - 'utils', - 'widgets', - 'workers', + "actions", + "components", + "connector", + "har", + "middleware", + "reducers", + "selectors", + "utils", + "widgets", + "workers", ] DevToolsModules( - 'api.js', - 'app.js', - 'constants.js', - 'create-store.js', + "api.js", + "app.js", + "constants.js", + "create-store.js", ) diff --git a/devtools/client/netmonitor/src/reducers/moz.build b/devtools/client/netmonitor/src/reducers/moz.build index 2b4cb712badbbe..bd9d2387612526 100644 --- a/devtools/client/netmonitor/src/reducers/moz.build +++ b/devtools/client/netmonitor/src/reducers/moz.build @@ -3,14 +3,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'batching.js', - 'filters.js', - 'index.js', - 'messages.js', - 'request-blocking.js', - 'requests.js', - 'search.js', - 'sort.js', - 'timing-markers.js', - 'ui.js', + "batching.js", + "filters.js", + "index.js", + "messages.js", + "request-blocking.js", + "requests.js", + "search.js", + "sort.js", + "timing-markers.js", + "ui.js", ) diff --git a/devtools/client/netmonitor/src/selectors/moz.build b/devtools/client/netmonitor/src/selectors/moz.build index b8c99dd44a6aea..dcc4eac91607ec 100644 --- a/devtools/client/netmonitor/src/selectors/moz.build +++ b/devtools/client/netmonitor/src/selectors/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'messages.js', - 'requests.js', - 'search.js', - 'timing-markers.js', - 'ui.js', + "index.js", + "messages.js", + "requests.js", + "search.js", + "timing-markers.js", + "ui.js", ) diff --git a/devtools/client/netmonitor/src/utils/firefox/moz.build b/devtools/client/netmonitor/src/utils/firefox/moz.build index 56ad27a435e75d..bb0acafdf77483 100644 --- a/devtools/client/netmonitor/src/utils/firefox/moz.build +++ b/devtools/client/netmonitor/src/utils/firefox/moz.build @@ -4,5 +4,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'open-request-in-tab.js', + "open-request-in-tab.js", ) diff --git a/devtools/client/netmonitor/src/utils/moz.build b/devtools/client/netmonitor/src/utils/moz.build index d602890c824c51..284980f37bd5b9 100644 --- a/devtools/client/netmonitor/src/utils/moz.build +++ b/devtools/client/netmonitor/src/utils/moz.build @@ -4,23 +4,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'firefox', + "firefox", ] DevToolsModules( - 'context-menu-utils.js', - 'filter-autocomplete-provider.js', - 'filter-predicates.js', - 'filter-text-utils.js', - 'format-utils.js', - 'headers-provider.js', - 'l10n.js', - 'mdn-utils.js', - 'open-request-in-tab.js', - 'prefs.js', - 'request-blocking.js', - 'request-utils.js', - 'sort-predicates.js', - 'sort-utils.js', - 'tooltips.js' + "context-menu-utils.js", + "filter-autocomplete-provider.js", + "filter-predicates.js", + "filter-text-utils.js", + "format-utils.js", + "headers-provider.js", + "l10n.js", + "mdn-utils.js", + "open-request-in-tab.js", + "prefs.js", + "request-blocking.js", + "request-utils.js", + "sort-predicates.js", + "sort-utils.js", + "tooltips.js", ) diff --git a/devtools/client/netmonitor/src/widgets/moz.build b/devtools/client/netmonitor/src/widgets/moz.build index 4481702be9a852..106deca9357624 100644 --- a/devtools/client/netmonitor/src/widgets/moz.build +++ b/devtools/client/netmonitor/src/widgets/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'HeadersPanelContextMenu.js', - 'PropertiesViewContextMenu.js', - 'RequestBlockingContextMenu.js', - 'RequestListContextMenu.js', - 'RequestListHeaderContextMenu.js', - 'WaterfallBackground.js', + "HeadersPanelContextMenu.js", + "PropertiesViewContextMenu.js", + "RequestBlockingContextMenu.js", + "RequestListContextMenu.js", + "RequestListHeaderContextMenu.js", + "WaterfallBackground.js", ) diff --git a/devtools/client/netmonitor/src/workers/moz.build b/devtools/client/netmonitor/src/workers/moz.build index e0b70c2e9acde4..618799caacc8d0 100644 --- a/devtools/client/netmonitor/src/workers/moz.build +++ b/devtools/client/netmonitor/src/workers/moz.build @@ -4,9 +4,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'search', + "search", ] DevToolsModules( - 'worker-utils.js', + "worker-utils.js", ) diff --git a/devtools/client/netmonitor/src/workers/search/moz.build b/devtools/client/netmonitor/src/workers/search/moz.build index e1f13947bd2fc2..ee5a5dd9507d11 100644 --- a/devtools/client/netmonitor/src/workers/search/moz.build +++ b/devtools/client/netmonitor/src/workers/search/moz.build @@ -4,7 +4,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'search.js', - 'worker.js', + "index.js", + "search.js", + "worker.js", ) diff --git a/devtools/client/performance-new/aboutprofiling/moz.build b/devtools/client/performance-new/aboutprofiling/moz.build index bcd0c15a303c85..958a9222362ee4 100644 --- a/devtools/client/performance-new/aboutprofiling/moz.build +++ b/devtools/client/performance-new/aboutprofiling/moz.build @@ -3,5 +3,5 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") diff --git a/devtools/client/performance-new/components/moz.build b/devtools/client/performance-new/components/moz.build index f56e1607a95c52..1215eb4ac05747 100644 --- a/devtools/client/performance-new/components/moz.build +++ b/devtools/client/performance-new/components/moz.build @@ -4,14 +4,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'AboutProfiling.js', - 'Description.js', - 'DevToolsPanel.js', - 'DevToolsPresetSelection.js', - 'DirectoryPicker.js', - 'Presets.js', - 'ProfilerEventHandling.js', - 'Range.js', - 'RecordingButton.js', - 'Settings.js', + "AboutProfiling.js", + "Description.js", + "DevToolsPanel.js", + "DevToolsPresetSelection.js", + "DirectoryPicker.js", + "Presets.js", + "ProfilerEventHandling.js", + "Range.js", + "RecordingButton.js", + "Settings.js", ) diff --git a/devtools/client/performance-new/moz.build b/devtools/client/performance-new/moz.build index 310ae5cb8bb09c..a0538297e1ca85 100644 --- a/devtools/client/performance-new/moz.build +++ b/devtools/client/performance-new/moz.build @@ -4,24 +4,24 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'aboutprofiling', - 'components', - 'store', - 'popup', + "aboutprofiling", + "components", + "store", + "popup", ] DevToolsModules( - 'browser.js', - 'initializer.js', - 'panel.js', - 'preference-management.js', - 'symbolication.jsm.js', - 'typescript-lazy-load.jsm.js', - 'utils.js', + "browser.js", + "initializer.js", + "panel.js", + "preference-management.js", + "symbolication.jsm.js", + "typescript-lazy-load.jsm.js", + "utils.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") diff --git a/devtools/client/performance-new/popup/moz.build b/devtools/client/performance-new/popup/moz.build index 57fd35f56c2ae3..857cc3a3c7c4b1 100644 --- a/devtools/client/performance-new/popup/moz.build +++ b/devtools/client/performance-new/popup/moz.build @@ -4,10 +4,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'background.jsm.js', - 'menu-button.jsm.js', - 'panel.jsm.js', + "background.jsm.js", + "menu-button.jsm.js", + "panel.jsm.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") diff --git a/devtools/client/performance-new/store/moz.build b/devtools/client/performance-new/store/moz.build index b6d6252c829e47..16c3f8c65a2164 100644 --- a/devtools/client/performance-new/store/moz.build +++ b/devtools/client/performance-new/store/moz.build @@ -4,7 +4,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'actions.js', - 'reducers.js', - 'selectors.js', + "actions.js", + "reducers.js", + "selectors.js", ) diff --git a/devtools/client/performance/components/moz.build b/devtools/client/performance/components/moz.build index 0a930c622ef233..dc9ccff784f21c 100644 --- a/devtools/client/performance/components/moz.build +++ b/devtools/client/performance/components/moz.build @@ -4,16 +4,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'JITOptimizations.js', - 'JITOptimizationsItem.js', - 'RecordingButton.js', - 'RecordingControls.js', - 'RecordingList.js', - 'RecordingListItem.js', - 'Waterfall.js', - 'WaterfallHeader.js', - 'WaterfallTree.js', - 'WaterfallTreeRow.js', + "JITOptimizations.js", + "JITOptimizationsItem.js", + "RecordingButton.js", + "RecordingControls.js", + "RecordingList.js", + "RecordingListItem.js", + "Waterfall.js", + "WaterfallHeader.js", + "WaterfallTree.js", + "WaterfallTreeRow.js", ) -MOCHITEST_CHROME_MANIFESTS += ['chrome/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["chrome/chrome.ini"] diff --git a/devtools/client/performance/modules/logic/moz.build b/devtools/client/performance/modules/logic/moz.build index 179cd71b3c508a..01f77231d757fa 100644 --- a/devtools/client/performance/modules/logic/moz.build +++ b/devtools/client/performance/modules/logic/moz.build @@ -4,9 +4,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'frame-utils.js', - 'jit.js', - 'telemetry.js', - 'tree-model.js', - 'waterfall-utils.js', + "frame-utils.js", + "jit.js", + "telemetry.js", + "tree-model.js", + "waterfall-utils.js", ) diff --git a/devtools/client/performance/modules/moz.build b/devtools/client/performance/modules/moz.build index 45d2ae0d23f025..c05466902283a8 100644 --- a/devtools/client/performance/modules/moz.build +++ b/devtools/client/performance/modules/moz.build @@ -4,19 +4,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'logic', - 'widgets', + "logic", + "widgets", ] DevToolsModules( - 'categories.js', - 'constants.js', - 'global.js', - 'io.js', - 'marker-blueprint-utils.js', - 'marker-dom-utils.js', - 'marker-formatters.js', - 'markers.js', - 'utils.js', - 'waterfall-ticks.js', + "categories.js", + "constants.js", + "global.js", + "io.js", + "marker-blueprint-utils.js", + "marker-dom-utils.js", + "marker-formatters.js", + "markers.js", + "utils.js", + "waterfall-ticks.js", ) diff --git a/devtools/client/performance/modules/widgets/moz.build b/devtools/client/performance/modules/widgets/moz.build index 9f733838a53b1d..d04890425c84d8 100644 --- a/devtools/client/performance/modules/widgets/moz.build +++ b/devtools/client/performance/modules/widgets/moz.build @@ -4,8 +4,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'graphs.js', - 'marker-details.js', - 'markers-overview.js', - 'tree-view.js', + "graphs.js", + "marker-details.js", + "markers-overview.js", + "tree-view.js", ) diff --git a/devtools/client/performance/moz.build b/devtools/client/performance/moz.build index 109e73d642ae47..bb8d3ac342a377 100644 --- a/devtools/client/performance/moz.build +++ b/devtools/client/performance/moz.build @@ -4,22 +4,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'components', - 'modules', - 'test', - 'views', + "components", + "modules", + "test", + "views", ] DevToolsModules( - 'events.js', - 'initializer.js', - 'panel.js', - 'performance-controller.js', - 'performance-view.js', + "events.js", + "initializer.js", + "panel.js", + "performance-controller.js", + "performance-view.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") diff --git a/devtools/client/performance/test/helpers/moz.build b/devtools/client/performance/test/helpers/moz.build index b858530d69c528..d1b1b65472e52b 100644 --- a/devtools/client/performance/test/helpers/moz.build +++ b/devtools/client/performance/test/helpers/moz.build @@ -5,16 +5,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'actions.js', - 'dom-utils.js', - 'event-utils.js', - 'input-utils.js', - 'panel-utils.js', - 'prefs.js', - 'profiler-mm-utils.js', - 'recording-utils.js', - 'synth-utils.js', - 'tab-utils.js', - 'urls.js', - 'wait-utils.js', + "actions.js", + "dom-utils.js", + "event-utils.js", + "input-utils.js", + "panel-utils.js", + "prefs.js", + "profiler-mm-utils.js", + "recording-utils.js", + "synth-utils.js", + "tab-utils.js", + "urls.js", + "wait-utils.js", ) diff --git a/devtools/client/performance/test/moz.build b/devtools/client/performance/test/moz.build index 6bdf1a01812ed7..728f6a155bde11 100644 --- a/devtools/client/performance/test/moz.build +++ b/devtools/client/performance/test/moz.build @@ -4,5 +4,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'helpers', + "helpers", ] diff --git a/devtools/client/performance/views/moz.build b/devtools/client/performance/views/moz.build index a792fd6792ab77..d10fbf3dace13f 100644 --- a/devtools/client/performance/views/moz.build +++ b/devtools/client/performance/views/moz.build @@ -4,14 +4,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'details-abstract-subview.js', - 'details-js-call-tree.js', - 'details-js-flamegraph.js', - 'details-memory-call-tree.js', - 'details-memory-flamegraph.js', - 'details-waterfall.js', - 'details.js', - 'overview.js', - 'recordings.js', - 'toolbar.js', + "details-abstract-subview.js", + "details-js-call-tree.js", + "details-js-flamegraph.js", + "details-memory-call-tree.js", + "details-memory-flamegraph.js", + "details-waterfall.js", + "details.js", + "overview.js", + "recordings.js", + "toolbar.js", ) diff --git a/devtools/client/preferences/moz.build b/devtools/client/preferences/moz.build index 1af1809dd0a1ea..fa8847b64dbe33 100644 --- a/devtools/client/preferences/moz.build +++ b/devtools/client/preferences/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. JS_PREFERENCE_FILES += [ - 'debugger.js', + "debugger.js", ] diff --git a/devtools/client/responsive/actions/moz.build b/devtools/client/responsive/actions/moz.build index b87898ce98793d..d4fa0d243fbdcb 100644 --- a/devtools/client/responsive/actions/moz.build +++ b/devtools/client/responsive/actions/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'devices.js', - 'index.js', - 'screenshot.js', - 'ui.js', - 'viewports.js', + "devices.js", + "index.js", + "screenshot.js", + "ui.js", + "viewports.js", ) diff --git a/devtools/client/responsive/components/moz.build b/devtools/client/responsive/components/moz.build index 4999ab8e58a907..04ef642afc500a 100644 --- a/devtools/client/responsive/components/moz.build +++ b/devtools/client/responsive/components/moz.build @@ -5,16 +5,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'App.js', - 'Device.js', - 'DeviceForm.js', - 'DeviceInfo.js', - 'DeviceList.js', - 'DeviceModal.js', - 'DevicePixelRatioMenu.js', - 'DeviceSelector.js', - 'SettingsMenu.js', - 'Toolbar.js', - 'UserAgentInput.js', - 'ViewportDimension.js', + "App.js", + "Device.js", + "DeviceForm.js", + "DeviceInfo.js", + "DeviceList.js", + "DeviceModal.js", + "DevicePixelRatioMenu.js", + "DeviceSelector.js", + "SettingsMenu.js", + "Toolbar.js", + "UserAgentInput.js", + "ViewportDimension.js", ) diff --git a/devtools/client/responsive/moz.build b/devtools/client/responsive/moz.build index 1f9956812211d2..5816bea1cd25e6 100644 --- a/devtools/client/responsive/moz.build +++ b/devtools/client/responsive/moz.build @@ -5,24 +5,24 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'reducers', - 'utils', + "actions", + "components", + "reducers", + "utils", ] DevToolsModules( - 'constants.js', - 'index.js', - 'manager.js', - 'reducers.js', - 'store.js', - 'types.js', - 'ui.js', + "constants.js", + "index.js", + "manager.js", + "reducers.js", + "store.js", + "types.js", + "ui.js", ) -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Responsive Design Mode') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Responsive Design Mode") diff --git a/devtools/client/responsive/reducers/moz.build b/devtools/client/responsive/reducers/moz.build index 1c246637e46df1..9f2fb54e161324 100644 --- a/devtools/client/responsive/reducers/moz.build +++ b/devtools/client/responsive/reducers/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'devices.js', - 'screenshot.js', - 'ui.js', - 'viewports.js', + "devices.js", + "screenshot.js", + "ui.js", + "viewports.js", ) diff --git a/devtools/client/responsive/utils/moz.build b/devtools/client/responsive/utils/moz.build index 26f7b60589923a..503da8932a45dc 100644 --- a/devtools/client/responsive/utils/moz.build +++ b/devtools/client/responsive/utils/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'e10s.js', - 'key.js', - 'l10n.js', - 'message.js', - 'notification.js', - 'orientation.js', - 'ua.js', - 'window.js', + "e10s.js", + "key.js", + "l10n.js", + "message.js", + "notification.js", + "orientation.js", + "ua.js", + "window.js", ) diff --git a/devtools/client/shared/components/menu/moz.build b/devtools/client/shared/components/menu/moz.build index ef538be286bf4a..08046199e5910c 100644 --- a/devtools/client/shared/components/menu/moz.build +++ b/devtools/client/shared/components/menu/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'MenuButton.js', - 'MenuItem.js', - 'MenuList.js', - 'utils.js', + "MenuButton.js", + "MenuItem.js", + "MenuList.js", + "utils.js", ) diff --git a/devtools/client/shared/components/moz.build b/devtools/client/shared/components/moz.build index a366690b2ef03c..647d871d5e12ec 100644 --- a/devtools/client/shared/components/moz.build +++ b/devtools/client/shared/components/moz.build @@ -5,32 +5,32 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'object-inspector', - 'menu', - 'reps', - 'splitter', - 'tabs', - 'throttling', - 'tree', + "object-inspector", + "menu", + "reps", + "splitter", + "tabs", + "throttling", + "tree", ] DevToolsModules( - 'Accordion.js', - 'Frame.js', - 'HSplitBox.js', - 'List.js', - 'MdnLink.js', - 'NotificationBox.js', - 'SearchBox.js', - 'SearchBoxAutocompletePopup.js', - 'Sidebar.js', - 'SidebarToggle.js', - 'SmartTrace.js', - 'StackTrace.js', - 'Tree.js', - 'VirtualizedTree.js', - 'VisibilityHandler.js', + "Accordion.js", + "Frame.js", + "HSplitBox.js", + "List.js", + "MdnLink.js", + "NotificationBox.js", + "SearchBox.js", + "SearchBoxAutocompletePopup.js", + "Sidebar.js", + "SidebarToggle.js", + "SmartTrace.js", + "StackTrace.js", + "Tree.js", + "VirtualizedTree.js", + "VisibilityHandler.js", ) -MOCHITEST_CHROME_MANIFESTS += ['test/chrome/chrome.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] diff --git a/devtools/client/shared/components/object-inspector/components/moz.build b/devtools/client/shared/components/object-inspector/components/moz.build index c27a13e15b5d90..a1744891f24a7d 100644 --- a/devtools/client/shared/components/object-inspector/components/moz.build +++ b/devtools/client/shared/components/object-inspector/components/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ObjectInspector.js', - 'ObjectInspectorItem.js', + "ObjectInspector.js", + "ObjectInspectorItem.js", ) diff --git a/devtools/client/shared/components/object-inspector/moz.build b/devtools/client/shared/components/object-inspector/moz.build index 33de4c98dddd20..14f9c285ba5976 100644 --- a/devtools/client/shared/components/object-inspector/moz.build +++ b/devtools/client/shared/components/object-inspector/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'components', - 'utils', + "components", + "utils", ] DevToolsModules( - 'actions.js', - 'index.js', - 'reducer.js', + "actions.js", + "index.js", + "reducer.js", ) diff --git a/devtools/client/shared/components/object-inspector/object-inspector/components/moz.build b/devtools/client/shared/components/object-inspector/object-inspector/components/moz.build index c27a13e15b5d90..a1744891f24a7d 100644 --- a/devtools/client/shared/components/object-inspector/object-inspector/components/moz.build +++ b/devtools/client/shared/components/object-inspector/object-inspector/components/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ObjectInspector.js', - 'ObjectInspectorItem.js', + "ObjectInspector.js", + "ObjectInspectorItem.js", ) diff --git a/devtools/client/shared/components/object-inspector/object-inspector/moz.build b/devtools/client/shared/components/object-inspector/object-inspector/moz.build index 33de4c98dddd20..14f9c285ba5976 100644 --- a/devtools/client/shared/components/object-inspector/object-inspector/moz.build +++ b/devtools/client/shared/components/object-inspector/object-inspector/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'components', - 'utils', + "components", + "utils", ] DevToolsModules( - 'actions.js', - 'index.js', - 'reducer.js', + "actions.js", + "index.js", + "reducer.js", ) diff --git a/devtools/client/shared/components/object-inspector/object-inspector/utils/moz.build b/devtools/client/shared/components/object-inspector/object-inspector/utils/moz.build index ef06fba364deb6..1301b2aca6ffa9 100644 --- a/devtools/client/shared/components/object-inspector/object-inspector/utils/moz.build +++ b/devtools/client/shared/components/object-inspector/object-inspector/utils/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'client.js', - 'index.js', - 'load-properties.js', - 'node.js', - 'selection.js', + "client.js", + "index.js", + "load-properties.js", + "node.js", + "selection.js", ) diff --git a/devtools/client/shared/components/object-inspector/utils/moz.build b/devtools/client/shared/components/object-inspector/utils/moz.build index ef06fba364deb6..1301b2aca6ffa9 100644 --- a/devtools/client/shared/components/object-inspector/utils/moz.build +++ b/devtools/client/shared/components/object-inspector/utils/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'client.js', - 'index.js', - 'load-properties.js', - 'node.js', - 'selection.js', + "client.js", + "index.js", + "load-properties.js", + "node.js", + "selection.js", ) diff --git a/devtools/client/shared/components/reps/moz.build b/devtools/client/shared/components/reps/moz.build index dceb52cb605dff..058e8046a7b43b 100644 --- a/devtools/client/shared/components/reps/moz.build +++ b/devtools/client/shared/components/reps/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'reps', - 'shared', + "reps", + "shared", ] DevToolsModules( - 'index.js', + "index.js", ) diff --git a/devtools/client/shared/components/reps/reps/moz.build b/devtools/client/shared/components/reps/reps/moz.build index 1e6e955f81e1eb..be4cdd50307b23 100644 --- a/devtools/client/shared/components/reps/reps/moz.build +++ b/devtools/client/shared/components/reps/reps/moz.build @@ -5,40 +5,40 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'accessible.js', - 'accessor.js', - 'array.js', - 'attribute.js', - 'big-int.js', - 'comment-node.js', - 'constants.js', - 'date-time.js', - 'document-type.js', - 'document.js', - 'element-node.js', - 'error.js', - 'event.js', - 'function.js', - 'grip-array.js', - 'grip-map-entry.js', - 'grip-map.js', - 'grip.js', - 'infinity.js', - 'nan.js', - 'null.js', - 'number.js', - 'object-with-text.js', - 'object-with-url.js', - 'object.js', - 'promise.js', - 'prop-rep.js', - 'regexp.js', - 'rep-utils.js', - 'rep.js', - 'string.js', - 'stylesheet.js', - 'symbol.js', - 'text-node.js', - 'undefined.js', - 'window.js' + "accessible.js", + "accessor.js", + "array.js", + "attribute.js", + "big-int.js", + "comment-node.js", + "constants.js", + "date-time.js", + "document-type.js", + "document.js", + "element-node.js", + "error.js", + "event.js", + "function.js", + "grip-array.js", + "grip-map-entry.js", + "grip-map.js", + "grip.js", + "infinity.js", + "nan.js", + "null.js", + "number.js", + "object-with-text.js", + "object-with-url.js", + "object.js", + "promise.js", + "prop-rep.js", + "regexp.js", + "rep-utils.js", + "rep.js", + "string.js", + "stylesheet.js", + "symbol.js", + "text-node.js", + "undefined.js", + "window.js", ) diff --git a/devtools/client/shared/components/reps/shared/moz.build b/devtools/client/shared/components/reps/shared/moz.build index 9dd20492df8e36..6704491b97617e 100644 --- a/devtools/client/shared/components/reps/shared/moz.build +++ b/devtools/client/shared/components/reps/shared/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'dom-node-constants.js', - 'grip-length-bubble.js', + "dom-node-constants.js", + "grip-length-bubble.js", ) diff --git a/devtools/client/shared/components/splitter/moz.build b/devtools/client/shared/components/splitter/moz.build index 41efbece0263d7..4abe762b34af4d 100644 --- a/devtools/client/shared/components/splitter/moz.build +++ b/devtools/client/shared/components/splitter/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'Draggable.js', - 'GridElementWidthResizer.js', - 'SplitBox.js', + "Draggable.js", + "GridElementWidthResizer.js", + "SplitBox.js", ) diff --git a/devtools/client/shared/components/tabs/moz.build b/devtools/client/shared/components/tabs/moz.build index d28fc33adfdf88..15ede75b9db593 100644 --- a/devtools/client/shared/components/tabs/moz.build +++ b/devtools/client/shared/components/tabs/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'TabBar.js', - 'Tabs.js', + "TabBar.js", + "Tabs.js", ) diff --git a/devtools/client/shared/components/throttling/moz.build b/devtools/client/shared/components/throttling/moz.build index 4547cae1f6bfd8..2c178219fcfa3b 100644 --- a/devtools/client/shared/components/throttling/moz.build +++ b/devtools/client/shared/components/throttling/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'actions.js', - 'NetworkThrottlingMenu.js', - 'profiles.js', - 'reducer.js', - 'types.js', + "actions.js", + "NetworkThrottlingMenu.js", + "profiles.js", + "reducer.js", + "types.js", ) diff --git a/devtools/client/shared/components/tree/moz.build b/devtools/client/shared/components/tree/moz.build index ba5fb1c1671ded..0700575f17c308 100644 --- a/devtools/client/shared/components/tree/moz.build +++ b/devtools/client/shared/components/tree/moz.build @@ -4,10 +4,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'LabelCell.js', - 'ObjectProvider.js', - 'TreeCell.js', - 'TreeHeader.js', - 'TreeRow.js', - 'TreeView.js', + "LabelCell.js", + "ObjectProvider.js", + "TreeCell.js", + "TreeHeader.js", + "TreeRow.js", + "TreeView.js", ) diff --git a/devtools/client/shared/fluent-l10n/moz.build b/devtools/client/shared/fluent-l10n/moz.build index 0928a438fd65d8..654348ea26697a 100644 --- a/devtools/client/shared/fluent-l10n/moz.build +++ b/devtools/client/shared/fluent-l10n/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'fluent-l10n.js', + "fluent-l10n.js", ) diff --git a/devtools/client/shared/moz.build b/devtools/client/shared/moz.build index a75abfe05d40d2..c2557a52533774 100644 --- a/devtools/client/shared/moz.build +++ b/devtools/client/shared/moz.build @@ -4,65 +4,65 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] TEST_HARNESS_FILES.xpcshell.devtools.client.shared.test += [ - 'test/shared-redux-head.js', + "test/shared-redux-head.js", ] DIRS += [ - 'components', - 'fluent-l10n', - 'redux', - 'remote-debugging', - 'source-map', - 'sourceeditor', - 'vendor', - 'widgets', + "components", + "fluent-l10n", + "redux", + "remote-debugging", + "source-map", + "sourceeditor", + "vendor", + "widgets", ] DevToolsModules( - 'async-store-helper.js', - 'autocomplete-popup.js', - 'browser-loader-mocks.js', - 'browser-loader.js', - 'css-angle.js', - 'curl.js', - 'demangle.js', - 'devices.js', - 'enum.js', - 'events.js', - 'focus.js', - 'getjson.js', - 'inplace-editor.js', - 'key-shortcuts.js', - 'keycodes.js', - 'link.js', - 'node-attribute-parser.js', - 'options-view.js', - 'output-parser.js', - 'poller.js', - 'prefs.js', - 'react-utils.js', - 'scroll.js', - 'source-utils.js', - 'SplitView.jsm', - 'stylesheet-utils.js', - 'suggestion-picker.js', - 'telemetry.js', - 'theme.js', - 'thread-utils.js', - 'undo.js', - 'unicode-url.js', - 'view-source.js', - 'WeakMapMap.js', - 'webgl-utils.js', - 'workers-listener.js', - 'zoom-keys.js', + "async-store-helper.js", + "autocomplete-popup.js", + "browser-loader-mocks.js", + "browser-loader.js", + "css-angle.js", + "curl.js", + "demangle.js", + "devices.js", + "enum.js", + "events.js", + "focus.js", + "getjson.js", + "inplace-editor.js", + "key-shortcuts.js", + "keycodes.js", + "link.js", + "node-attribute-parser.js", + "options-view.js", + "output-parser.js", + "poller.js", + "prefs.js", + "react-utils.js", + "scroll.js", + "source-utils.js", + "SplitView.jsm", + "stylesheet-utils.js", + "suggestion-picker.js", + "telemetry.js", + "theme.js", + "thread-utils.js", + "undo.js", + "unicode-url.js", + "view-source.js", + "WeakMapMap.js", + "webgl-utils.js", + "workers-listener.js", + "zoom-keys.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'General') +with Files("**"): + BUG_COMPONENT = ("DevTools", "General") -with Files('components/**'): - BUG_COMPONENT = ('DevTools', 'Shared Components') +with Files("components/**"): + BUG_COMPONENT = ("DevTools", "Shared Components") diff --git a/devtools/client/shared/redux/middleware/moz.build b/devtools/client/shared/redux/middleware/moz.build index 613b6190f30aeb..0c8f14c59ad747 100644 --- a/devtools/client/shared/redux/middleware/moz.build +++ b/devtools/client/shared/redux/middleware/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'debounce.js', - 'history.js', - 'ignore.js', - 'log.js', - 'performance-marker.js', - 'promise.js', - 'task.js', - 'thunk.js', - 'wait-service.js', + "debounce.js", + "history.js", + "ignore.js", + "log.js", + "performance-marker.js", + "promise.js", + "task.js", + "thunk.js", + "wait-service.js", ) -XPCSHELL_TESTS_MANIFESTS += ['xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["xpcshell/xpcshell.ini"] diff --git a/devtools/client/shared/redux/moz.build b/devtools/client/shared/redux/moz.build index 328d1ffa77e26f..e34cd2645ef425 100644 --- a/devtools/client/shared/redux/moz.build +++ b/devtools/client/shared/redux/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'middleware', + "middleware", ] DevToolsModules( - 'create-store.js', - 'non-react-subscriber.js', - 'subscriber.js', - 'visibility-handler-connect.js', + "create-store.js", + "non-react-subscriber.js", + "subscriber.js", + "visibility-handler-connect.js", ) diff --git a/devtools/client/shared/remote-debugging/adb/commands/moz.build b/devtools/client/shared/remote-debugging/adb/commands/moz.build index 6f2b0f13afc7dd..ecd3428a1bb748 100644 --- a/devtools/client/shared/remote-debugging/adb/commands/moz.build +++ b/devtools/client/shared/remote-debugging/adb/commands/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'index.js', - 'list-devices.js', - 'prepare-tcp-connection.js', - 'run-command.js', - 'shell.js', - 'track-devices.js', + "index.js", + "list-devices.js", + "prepare-tcp-connection.js", + "run-command.js", + "shell.js", + "track-devices.js", ) diff --git a/devtools/client/shared/remote-debugging/adb/moz.build b/devtools/client/shared/remote-debugging/adb/moz.build index b8ac4ed3aba955..9210dec08ee7f1 100644 --- a/devtools/client/shared/remote-debugging/adb/moz.build +++ b/devtools/client/shared/remote-debugging/adb/moz.build @@ -3,22 +3,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'commands', + "commands", ] DevToolsModules( - 'adb-addon.js', - 'adb-binary.js', - 'adb-client.js', - 'adb-device.js', - 'adb-process.js', - 'adb-running-checker.js', - 'adb-runtime.js', - 'adb-socket.js', - 'adb.js', + "adb-addon.js", + "adb-binary.js", + "adb-client.js", + "adb-device.js", + "adb-process.js", + "adb-running-checker.js", + "adb-runtime.js", + "adb-socket.js", + "adb.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'about:debugging') +with Files("**"): + BUG_COMPONENT = ("DevTools", "about:debugging") -XPCSHELL_TESTS_MANIFESTS += ['xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["xpcshell/xpcshell.ini"] diff --git a/devtools/client/shared/remote-debugging/adb/xpcshell/adb.py b/devtools/client/shared/remote-debugging/adb/xpcshell/adb.py index 8fe7af7472ebf9..d4a515c6e26302 100644 --- a/devtools/client/shared/remote-debugging/adb/xpcshell/adb.py +++ b/devtools/client/shared/remote-debugging/adb/xpcshell/adb.py @@ -13,13 +13,13 @@ import SocketServer import sys -HOST = '127.0.0.1' +HOST = "127.0.0.1" PORT = 5037 class ADBRequestHandler(SocketServer.BaseRequestHandler): def sendData(self, data): - header = 'OKAY%04x' % len(data) + header = "OKAY%04x" % len(data) all_data = header + data total_length = len(all_data) sent_length = 0 @@ -34,19 +34,19 @@ def sendData(self, data): def handle(self): while True: data = self.request.recv(4096) - if 'host:kill' in data: - self.sendData('') + if "host:kill" in data: + self.sendData("") # Implicitly close all open sockets by exiting the program. # This should be done ASAP, because upon receiving the OKAY, # the client expects adb to have released the server's port. os._exit(0) break - elif 'host:version' in data: - self.sendData('001F') + elif "host:version" in data: + self.sendData("001F") self.request.close() break - elif 'host:track-devices' in data: - self.sendData('1234567890\tdevice') + elif "host:track-devices" in data: + self.sendData("1234567890\tdevice") break @@ -54,13 +54,12 @@ class ADBServer(SocketServer.TCPServer): def __init__(self, server_address): # Create a SocketServer with bind_and_activate 'False' to set # allow_reuse_address before binding. - SocketServer.TCPServer.__init__(self, - server_address, - ADBRequestHandler, - bind_and_activate=False) + SocketServer.TCPServer.__init__( + self, server_address, ADBRequestHandler, bind_and_activate=False + ) -if len(sys.argv) == 2 and sys.argv[1] == 'start-server': +if len(sys.argv) == 2 and sys.argv[1] == "start-server": # daemonize if os.fork() > 0: sys.exit(0) diff --git a/devtools/client/shared/remote-debugging/moz.build b/devtools/client/shared/remote-debugging/moz.build index 7c8c8e445f8f97..da4494b1de3831 100644 --- a/devtools/client/shared/remote-debugging/moz.build +++ b/devtools/client/shared/remote-debugging/moz.build @@ -5,18 +5,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'adb', + "adb", ] DevToolsModules( - 'constants.js', - 'remote-client-manager.js', - 'version-checker.js', + "constants.js", + "remote-client-manager.js", + "version-checker.js", ) -XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'about:debugging') \ No newline at end of file +with Files("**"): + BUG_COMPONENT = ("DevTools", "about:debugging") diff --git a/devtools/client/shared/sourceeditor/moz.build b/devtools/client/shared/sourceeditor/moz.build index 080999f1dcfd80..4a356f86d69842 100644 --- a/devtools/client/shared/sourceeditor/moz.build +++ b/devtools/client/shared/sourceeditor/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'autocomplete.js', - 'css-autocompleter.js', - 'editor-commands-controller.js', - 'editor.js', - 'wasm.js' + "autocomplete.js", + "css-autocompleter.js", + "editor-commands-controller.js", + "editor.js", + "wasm.js", ) -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Source Editor') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Source Editor") diff --git a/devtools/client/shared/widgets/moz.build b/devtools/client/shared/widgets/moz.build index 5110009a2eada9..7c1a23d4dbae7e 100644 --- a/devtools/client/shared/widgets/moz.build +++ b/devtools/client/shared/widgets/moz.build @@ -5,23 +5,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'tooltip', + "tooltip", ] DevToolsModules( - 'AbstractTreeItem.jsm', - 'Chart.js', - 'CubicBezierPresets.js', - 'CubicBezierWidget.js', - 'FilterWidget.js', - 'FlameGraph.js', - 'Graphs.js', - 'GraphsWorker.js', - 'LineGraphWidget.js', - 'MountainGraphWidget.js', - 'ShapesInContextEditor.js', - 'Spectrum.js', - 'TableWidget.js', - 'TreeWidget.js', - 'view-helpers.js', + "AbstractTreeItem.jsm", + "Chart.js", + "CubicBezierPresets.js", + "CubicBezierWidget.js", + "FilterWidget.js", + "FlameGraph.js", + "Graphs.js", + "GraphsWorker.js", + "LineGraphWidget.js", + "MountainGraphWidget.js", + "ShapesInContextEditor.js", + "Spectrum.js", + "TableWidget.js", + "TreeWidget.js", + "view-helpers.js", ) diff --git a/devtools/client/shared/widgets/tooltip/moz.build b/devtools/client/shared/widgets/tooltip/moz.build index 83a145b7aa5848..b441e61a8884d0 100644 --- a/devtools/client/shared/widgets/tooltip/moz.build +++ b/devtools/client/shared/widgets/tooltip/moz.build @@ -5,17 +5,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'css-compatibility-tooltip-helper.js', - 'EventTooltipHelper.js', - 'HTMLTooltip.js', - 'ImageTooltipHelper.js', - 'inactive-css-tooltip-helper.js', - 'InlineTooltip.js', - 'RulePreviewTooltip.js', - 'SwatchBasedEditorTooltip.js', - 'SwatchColorPickerTooltip.js', - 'SwatchCubicBezierTooltip.js', - 'SwatchFilterTooltip.js', - 'TooltipToggle.js', - 'VariableTooltipHelper.js' + "css-compatibility-tooltip-helper.js", + "EventTooltipHelper.js", + "HTMLTooltip.js", + "ImageTooltipHelper.js", + "inactive-css-tooltip-helper.js", + "InlineTooltip.js", + "RulePreviewTooltip.js", + "SwatchBasedEditorTooltip.js", + "SwatchColorPickerTooltip.js", + "SwatchCubicBezierTooltip.js", + "SwatchFilterTooltip.js", + "TooltipToggle.js", + "VariableTooltipHelper.js", ) diff --git a/devtools/client/storage/moz.build b/devtools/client/storage/moz.build index 50abafe3d853de..3d421e3c309b4c 100644 --- a/devtools/client/storage/moz.build +++ b/devtools/client/storage/moz.build @@ -4,17 +4,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] DIRS += [ - 'utils', + "utils", ] DevToolsModules( - 'panel.js', - 'ui.js', - 'VariablesView.jsm', + "panel.js", + "ui.js", + "VariablesView.jsm", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Storage Inspector') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Storage Inspector") diff --git a/devtools/client/storage/utils/moz.build b/devtools/client/storage/utils/moz.build index fbb7f5b37d4645..c9c54a9cf48a90 100644 --- a/devtools/client/storage/utils/moz.build +++ b/devtools/client/storage/utils/moz.build @@ -3,6 +3,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'l10n.js', - 'mdn-utils.js', + "l10n.js", + "mdn-utils.js", ) diff --git a/devtools/client/styleeditor/moz.build b/devtools/client/styleeditor/moz.build index ac314a710fae44..e02ef7a49ba53b 100644 --- a/devtools/client/styleeditor/moz.build +++ b/devtools/client/styleeditor/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] DevToolsModules( - 'original-source.js', - 'panel.js', - 'StyleEditorUI.jsm', - 'StyleEditorUtil.jsm', - 'StyleSheetEditor.jsm', + "original-source.js", + "panel.js", + "StyleEditorUI.jsm", + "StyleEditorUtil.jsm", + "StyleSheetEditor.jsm", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Style Editor') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Style Editor") diff --git a/devtools/client/themes/audio/moz.build b/devtools/client/themes/audio/moz.build index b68b29b8d55584..b5c0e9b75448a7 100644 --- a/devtools/client/themes/audio/moz.build +++ b/devtools/client/themes/audio/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'shutter.wav', + "shutter.wav", ) diff --git a/devtools/client/themes/moz.build b/devtools/client/themes/moz.build index 99665d5b4bc821..a8e889cb3d9b03 100644 --- a/devtools/client/themes/moz.build +++ b/devtools/client/themes/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'audio', + "audio", ] diff --git a/devtools/client/webconsole/actions/moz.build b/devtools/client/webconsole/actions/moz.build index ea9f6a1bfa81ae..5b06311064430d 100644 --- a/devtools/client/webconsole/actions/moz.build +++ b/devtools/client/webconsole/actions/moz.build @@ -4,14 +4,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'autocomplete.js', - 'filters.js', - 'history.js', - 'index.js', - 'input.js', - 'messages.js', - 'notifications.js', - 'object.js', - 'toolbox.js', - 'ui.js', + "autocomplete.js", + "filters.js", + "history.js", + "index.js", + "input.js", + "messages.js", + "notifications.js", + "object.js", + "toolbox.js", + "ui.js", ) diff --git a/devtools/client/webconsole/components/FilterBar/moz.build b/devtools/client/webconsole/components/FilterBar/moz.build index 02f63bf8e5033a..46ef681317b69c 100644 --- a/devtools/client/webconsole/components/FilterBar/moz.build +++ b/devtools/client/webconsole/components/FilterBar/moz.build @@ -4,8 +4,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ConsoleSettings.js', - 'FilterBar.js', - 'FilterButton.js', - 'FilterCheckbox.js', -) \ No newline at end of file + "ConsoleSettings.js", + "FilterBar.js", + "FilterButton.js", + "FilterCheckbox.js", +) diff --git a/devtools/client/webconsole/components/Input/moz.build b/devtools/client/webconsole/components/Input/moz.build index 696cded907c81b..ae435b3495f1f2 100644 --- a/devtools/client/webconsole/components/Input/moz.build +++ b/devtools/client/webconsole/components/Input/moz.build @@ -4,10 +4,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ConfirmDialog.js', - 'EagerEvaluation.js', - 'EditorToolbar.js', - 'EvaluationContextSelector.js', - 'JSTerm.js', - 'ReverseSearchInput.js', + "ConfirmDialog.js", + "EagerEvaluation.js", + "EditorToolbar.js", + "EvaluationContextSelector.js", + "JSTerm.js", + "ReverseSearchInput.js", ) diff --git a/devtools/client/webconsole/components/Output/message-types/moz.build b/devtools/client/webconsole/components/Output/message-types/moz.build index 844e19731310a2..5b24c72b7d3f3b 100644 --- a/devtools/client/webconsole/components/Output/message-types/moz.build +++ b/devtools/client/webconsole/components/Output/message-types/moz.build @@ -4,12 +4,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'ConsoleApiCall.js', - 'ConsoleCommand.js', - 'CSSWarning.js', - 'DefaultRenderer.js', - 'EvaluationResult.js', - 'NetworkEventMessage.js', - 'PageError.js', - 'WarningGroup.js', + "ConsoleApiCall.js", + "ConsoleCommand.js", + "CSSWarning.js", + "DefaultRenderer.js", + "EvaluationResult.js", + "NetworkEventMessage.js", + "PageError.js", + "WarningGroup.js", ) diff --git a/devtools/client/webconsole/components/Output/moz.build b/devtools/client/webconsole/components/Output/moz.build index ccd97023a3720f..5721ca20148af3 100644 --- a/devtools/client/webconsole/components/Output/moz.build +++ b/devtools/client/webconsole/components/Output/moz.build @@ -4,17 +4,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'message-types', + "message-types", ] DevToolsModules( - 'CollapseButton.js', - 'ConsoleOutput.js', - 'ConsoleTable.js', - 'GripMessageBody.js', - 'Message.js', - 'MessageContainer.js', - 'MessageIcon.js', - 'MessageIndent.js', - 'MessageRepeat.js', -) \ No newline at end of file + "CollapseButton.js", + "ConsoleOutput.js", + "ConsoleTable.js", + "GripMessageBody.js", + "Message.js", + "MessageContainer.js", + "MessageIcon.js", + "MessageIndent.js", + "MessageRepeat.js", +) diff --git a/devtools/client/webconsole/components/moz.build b/devtools/client/webconsole/components/moz.build index 37c741b7505465..0b9eac77a53b97 100644 --- a/devtools/client/webconsole/components/moz.build +++ b/devtools/client/webconsole/components/moz.build @@ -4,12 +4,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'FilterBar', - 'Input', - 'Output', + "FilterBar", + "Input", + "Output", ] DevToolsModules( - 'App.js', - 'SideBar.js', + "App.js", + "SideBar.js", ) diff --git a/devtools/client/webconsole/enhancers/moz.build b/devtools/client/webconsole/enhancers/moz.build index 7574a183f0d499..b503b63b496bff 100644 --- a/devtools/client/webconsole/enhancers/moz.build +++ b/devtools/client/webconsole/enhancers/moz.build @@ -4,9 +4,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'actor-releaser.js', - 'batching.js', - 'css-error-reporting.js', - 'message-cache-clearing.js', - 'net-provider.js', + "actor-releaser.js", + "batching.js", + "css-error-reporting.js", + "message-cache-clearing.js", + "net-provider.js", ) diff --git a/devtools/client/webconsole/middleware/moz.build b/devtools/client/webconsole/middleware/moz.build index 647d120464e5a9..ecb2088ca47179 100644 --- a/devtools/client/webconsole/middleware/moz.build +++ b/devtools/client/webconsole/middleware/moz.build @@ -4,7 +4,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'event-telemetry.js', - 'history-persistence.js', - 'performance-marker.js', + "event-telemetry.js", + "history-persistence.js", + "performance-marker.js", ) diff --git a/devtools/client/webconsole/moz.build b/devtools/client/webconsole/moz.build index fa8f8075a59cfd..89bc5b340f49a5 100644 --- a/devtools/client/webconsole/moz.build +++ b/devtools/client/webconsole/moz.build @@ -5,44 +5,42 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'actions', - 'components', - 'enhancers', - 'middleware', - 'reducers', - 'selectors', - 'utils', + "actions", + "components", + "enhancers", + "middleware", + "reducers", + "selectors", + "utils", ] DevToolsModules( - 'browser-console-manager.js', - 'browser-console.js', - 'commands.js', - 'constants.js', - 'panel.js', - 'service-container.js', - 'store.js', - 'types.js', - 'utils.js', - 'webconsole-connection-proxy.js', - 'webconsole-ui.js', - 'webconsole-wrapper.js', - 'webconsole.js', + "browser-console-manager.js", + "browser-console.js", + "commands.js", + "constants.js", + "panel.js", + "service-container.js", + "store.js", + "types.js", + "utils.js", + "webconsole-connection-proxy.js", + "webconsole-ui.js", + "webconsole-wrapper.js", + "webconsole.js", ) BROWSER_CHROME_MANIFESTS += [ - 'test/browser/_browser_console.ini', - 'test/browser/_jsterm.ini', - 'test/browser/_webconsole.ini', - 'test/node/fixtures/stubs/stubs.ini', + "test/browser/_browser_console.ini", + "test/browser/_jsterm.ini", + "test/browser/_webconsole.ini", + "test/node/fixtures/stubs/stubs.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'test/chrome/chrome.ini', + "test/chrome/chrome.ini", ] -XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Console') \ No newline at end of file +with Files("**"): + BUG_COMPONENT = ("DevTools", "Console") diff --git a/devtools/client/webconsole/reducers/moz.build b/devtools/client/webconsole/reducers/moz.build index 0acc5e7c667d59..b6380ba3be6ab8 100644 --- a/devtools/client/webconsole/reducers/moz.build +++ b/devtools/client/webconsole/reducers/moz.build @@ -4,12 +4,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'autocomplete.js', - 'filters.js', - 'history.js', - 'index.js', - 'messages.js', - 'notifications.js', - 'prefs.js', - 'ui.js', + "autocomplete.js", + "filters.js", + "history.js", + "index.js", + "messages.js", + "notifications.js", + "prefs.js", + "ui.js", ) diff --git a/devtools/client/webconsole/selectors/moz.build b/devtools/client/webconsole/selectors/moz.build index cfa60949447b4a..61f0f5243aec5a 100644 --- a/devtools/client/webconsole/selectors/moz.build +++ b/devtools/client/webconsole/selectors/moz.build @@ -4,11 +4,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'autocomplete.js', - 'filters.js', - 'history.js', - 'messages.js', - 'notifications.js', - 'prefs.js', - 'ui.js', + "autocomplete.js", + "filters.js", + "history.js", + "messages.js", + "notifications.js", + "prefs.js", + "ui.js", ) diff --git a/devtools/client/webconsole/utils/moz.build b/devtools/client/webconsole/utils/moz.build index 02c05e883ac3d8..8367b8c195e737 100644 --- a/devtools/client/webconsole/utils/moz.build +++ b/devtools/client/webconsole/utils/moz.build @@ -4,11 +4,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'clipboard.js', - 'context-menu.js', - 'id-generator.js', - 'l10n.js', - 'messages.js', - 'object-inspector.js', - 'prefs.js', + "clipboard.js", + "context-menu.js", + "id-generator.js", + "l10n.js", + "messages.js", + "object-inspector.js", + "prefs.js", ) diff --git a/devtools/moz.build b/devtools/moz.build index df3c59a38081e8..bf0f5798038bd1 100644 --- a/devtools/moz.build +++ b/devtools/moz.build @@ -4,49 +4,49 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_DEVTOOLS'] and CONFIG['MOZ_DEVTOOLS'] not in ('all', 'server'): - error('Unsupported MOZ_DEVTOOLS value: %s' % (CONFIG['MOZ_DEVTOOLS'])) +if CONFIG["MOZ_DEVTOOLS"] and CONFIG["MOZ_DEVTOOLS"] not in ("all", "server"): + error("Unsupported MOZ_DEVTOOLS value: %s" % (CONFIG["MOZ_DEVTOOLS"])) -if CONFIG['MOZ_DEVTOOLS'] == 'all': +if CONFIG["MOZ_DEVTOOLS"] == "all": DIRS += [ - 'client', + "client", ] # `platform` contains all native components DIRS += [ - 'platform', - 'server', - 'shared', - 'startup', + "platform", + "server", + "shared", + "startup", ] # /browser uses DIST_SUBDIR. We opt-in to this treatment when building # DevTools for the browser to keep the root omni.ja slim for use by external XUL # apps. -if CONFIG['MOZ_BUILD_APP'] == 'browser': - DIST_SUBDIR = 'browser' - export('DIST_SUBDIR') +if CONFIG["MOZ_BUILD_APP"] == "browser": + DIST_SUBDIR = "browser" + export("DIST_SUBDIR") -with Files('**'): - BUG_COMPONENT = ('DevTools', 'General') - SCHEDULES.exclusive = ['linux', 'macosx', 'windows'] +with Files("**"): + BUG_COMPONENT = ("DevTools", "General") + SCHEDULES.exclusive = ["linux", "macosx", "windows"] -with Files('docs/**'): - BUG_COMPONENT = ('DevTools', 'General') +with Files("docs/**"): + BUG_COMPONENT = ("DevTools", "General") -with Files('docs/tools/memory-panel.md'): - BUG_COMPONENT = ('DevTools', 'Memory') +with Files("docs/tools/memory-panel.md"): + BUG_COMPONENT = ("DevTools", "Memory") -with Files('docs/tools/debugger-panel.md'): - BUG_COMPONENT = ('DevTools', 'Debugger') +with Files("docs/tools/debugger-panel.md"): + BUG_COMPONENT = ("DevTools", "Debugger") -with Files('docs/backend/debugger-api.md'): - BUG_COMPONENT = ('DevTools', 'Debugger') +with Files("docs/backend/debugger-api.md"): + BUG_COMPONENT = ("DevTools", "Debugger") -with Files('docs/tools/console-panel.md'): - BUG_COMPONENT = ('DevTools', 'Console') +with Files("docs/tools/console-panel.md"): + BUG_COMPONENT = ("DevTools", "Console") -with Files('docs/tools/inspector-panel.md'): - BUG_COMPONENT = ('DevTools', 'Inspector') +with Files("docs/tools/inspector-panel.md"): + BUG_COMPONENT = ("DevTools", "Inspector") -SPHINX_TREES['/devtools'] = 'docs' +SPHINX_TREES["/devtools"] = "docs" diff --git a/devtools/platform/moz.build b/devtools/platform/moz.build index 8f54ec62b5ad8d..2a2cba6f0ac97b 100644 --- a/devtools/platform/moz.build +++ b/devtools/platform/moz.build @@ -4,20 +4,20 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] XPIDL_SOURCES += [ - 'nsIJSInspector.idl', + "nsIJSInspector.idl", ] -XPIDL_MODULE = 'jsinspector' +XPIDL_MODULE = "jsinspector" SOURCES += [ - 'nsJSInspector.cpp', + "nsJSInspector.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/devtools/server/actors/accessibility/audit/moz.build b/devtools/server/actors/accessibility/audit/moz.build index 0600b8a8e29f64..01bd0af849881d 100644 --- a/devtools/server/actors/accessibility/audit/moz.build +++ b/devtools/server/actors/accessibility/audit/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'contrast.js', - 'keyboard.js', - 'text-label.js', + "contrast.js", + "keyboard.js", + "text-label.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Accessibility Tools') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Accessibility Tools") diff --git a/devtools/server/actors/accessibility/moz.build b/devtools/server/actors/accessibility/moz.build index c686118c3aa715..4da1cd0b244d88 100644 --- a/devtools/server/actors/accessibility/moz.build +++ b/devtools/server/actors/accessibility/moz.build @@ -3,18 +3,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'audit', + "audit", ] DevToolsModules( - 'accessibility.js', - 'accessible.js', - 'constants.js', - 'parent-accessibility.js', - 'simulator.js', - 'walker.js', - 'worker.js', + "accessibility.js", + "accessible.js", + "constants.js", + "parent-accessibility.js", + "simulator.js", + "walker.js", + "worker.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Accessibility Tools') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Accessibility Tools") diff --git a/devtools/server/actors/addon/moz.build b/devtools/server/actors/addon/moz.build index 210f7b9d3e2336..e3821736415376 100644 --- a/devtools/server/actors/addon/moz.build +++ b/devtools/server/actors/addon/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'addons.js', - 'webextension-inspected-window.js', + "addons.js", + "webextension-inspected-window.js", ) diff --git a/devtools/server/actors/compatibility/lib/moz.build b/devtools/server/actors/compatibility/lib/moz.build index 8fd8f4b47e7ed7..f28d8fe48235d1 100644 --- a/devtools/server/actors/compatibility/lib/moz.build +++ b/devtools/server/actors/compatibility/lib/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] DevToolsModules( - 'MDNCompatibility.js', + "MDNCompatibility.js", ) diff --git a/devtools/server/actors/compatibility/moz.build b/devtools/server/actors/compatibility/moz.build index fb664afabb835c..010b027d37236e 100644 --- a/devtools/server/actors/compatibility/moz.build +++ b/devtools/server/actors/compatibility/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'lib', + "lib", ] DevToolsModules( - 'compatibility.js', + "compatibility.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Compatibility') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Compatibility") diff --git a/devtools/server/actors/descriptors/moz.build b/devtools/server/actors/descriptors/moz.build index 913990bc406bf3..bf297b3dcb1e4b 100644 --- a/devtools/server/actors/descriptors/moz.build +++ b/devtools/server/actors/descriptors/moz.build @@ -5,9 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'process.js', - 'tab.js', - 'webextension.js', - 'worker.js', + "process.js", + "tab.js", + "webextension.js", + "worker.js", ) - diff --git a/devtools/server/actors/emulation/moz.build b/devtools/server/actors/emulation/moz.build index 0c7430b2d79268..2ab4b0f6764834 100644 --- a/devtools/server/actors/emulation/moz.build +++ b/devtools/server/actors/emulation/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'content-viewer.js', - 'responsive.js', - 'touch-simulator.js', + "content-viewer.js", + "responsive.js", + "touch-simulator.js", ) diff --git a/devtools/server/actors/highlighters/moz.build b/devtools/server/actors/highlighters/moz.build index 77f1cb694c1459..54886a2fe91577 100644 --- a/devtools/server/actors/highlighters/moz.build +++ b/devtools/server/actors/highlighters/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'utils', + "utils", ] DevToolsModules( - 'accessible.js', - 'auto-refresh.js', - 'box-model.js', - 'css-grid.js', - 'css-transform.js', - 'eye-dropper.js', - 'flexbox.js', - 'fonts.js', - 'geometry-editor.js', - 'measuring-tool.js', - 'paused-debugger.js', - 'rulers.js', - 'selector.js', - 'shapes.js', + "accessible.js", + "auto-refresh.js", + "box-model.js", + "css-grid.js", + "css-transform.js", + "eye-dropper.js", + "flexbox.js", + "fonts.js", + "geometry-editor.js", + "measuring-tool.js", + "paused-debugger.js", + "rulers.js", + "selector.js", + "shapes.js", ) diff --git a/devtools/server/actors/highlighters/utils/moz.build b/devtools/server/actors/highlighters/utils/moz.build index 41c3d48ed99b43..ab4f96912d8b4e 100644 --- a/devtools/server/actors/highlighters/utils/moz.build +++ b/devtools/server/actors/highlighters/utils/moz.build @@ -4,8 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'accessibility.js', - 'canvas.js', - 'markup.js' -) +DevToolsModules("accessibility.js", "canvas.js", "markup.js") diff --git a/devtools/server/actors/inspector/moz.build b/devtools/server/actors/inspector/moz.build index d8195d1e7e4a8c..03c69dc9fec620 100644 --- a/devtools/server/actors/inspector/moz.build +++ b/devtools/server/actors/inspector/moz.build @@ -5,17 +5,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'constants.js', - 'css-logic.js', - 'custom-element-watcher.js', - 'document-walker.js', - 'event-collector.js', - 'inspector.js', - 'node-picker.js', - 'node.js', - 'utils.js', - 'walker.js', + "constants.js", + "css-logic.js", + "custom-element-watcher.js", + "document-walker.js", + "event-collector.js", + "inspector.js", + "node-picker.js", + "node.js", + "utils.js", + "walker.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector") diff --git a/devtools/server/actors/moz.build b/devtools/server/actors/moz.build index 389cc42468e7a4..ea2c9c6069d601 100644 --- a/devtools/server/actors/moz.build +++ b/devtools/server/actors/moz.build @@ -5,86 +5,86 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'accessibility', - 'addon', - 'compatibility', - 'descriptors', - 'emulation', - 'highlighters', - 'inspector', - 'network-monitor', - 'object', - 'resources', - 'targets', - 'utils', - 'watcher', - 'webconsole', - 'worker', + "accessibility", + "addon", + "compatibility", + "descriptors", + "emulation", + "highlighters", + "inspector", + "network-monitor", + "object", + "resources", + "targets", + "utils", + "watcher", + "webconsole", + "worker", ] DevToolsModules( - 'animation-type-longhand.js', - 'animation.js', - 'array-buffer.js', - 'breakpoint.js', - 'changes.js', - 'common.js', - 'css-properties.js', - 'device.js', - 'environment.js', - 'errordocs.js', - 'frame.js', - 'framerate.js', - 'heap-snapshot-file.js', - 'highlighters.css', - 'highlighters.js', - 'layout.js', - 'manifest.js', - 'memory.js', - 'object.js', - 'pause-scoped.js', - 'perf.js', - 'performance-recording.js', - 'performance.js', - 'preference.js', - 'process.js', - 'reflow.js', - 'root.js', - 'screenshot.js', - 'source.js', - 'storage.js', - 'string.js', - 'styles.js', - 'stylesheets.js', - 'thread.js', - 'watcher.js', - 'webbrowser.js', - 'webconsole.js', + "animation-type-longhand.js", + "animation.js", + "array-buffer.js", + "breakpoint.js", + "changes.js", + "common.js", + "css-properties.js", + "device.js", + "environment.js", + "errordocs.js", + "frame.js", + "framerate.js", + "heap-snapshot-file.js", + "highlighters.css", + "highlighters.js", + "layout.js", + "manifest.js", + "memory.js", + "object.js", + "pause-scoped.js", + "perf.js", + "performance-recording.js", + "performance.js", + "preference.js", + "process.js", + "reflow.js", + "root.js", + "screenshot.js", + "source.js", + "storage.js", + "string.js", + "styles.js", + "stylesheets.js", + "thread.js", + "watcher.js", + "webbrowser.js", + "webconsole.js", ) -with Files('animation.js'): - BUG_COMPONENT = ('DevTools', 'Inspector: Animations') +with Files("animation.js"): + BUG_COMPONENT = ("DevTools", "Inspector: Animations") -with Files('breakpoint.js'): - BUG_COMPONENT = ('DevTools', 'Debugger') +with Files("breakpoint.js"): + BUG_COMPONENT = ("DevTools", "Debugger") -with Files('css-properties.js'): - BUG_COMPONENT = ('DevTools', 'Inspector: Rules') +with Files("css-properties.js"): + BUG_COMPONENT = ("DevTools", "Inspector: Rules") -with Files('memory.js'): - BUG_COMPONENT = ('DevTools', 'Memory') +with Files("memory.js"): + BUG_COMPONENT = ("DevTools", "Memory") -with Files('performance*'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("performance*"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") -with Files('source.js'): - BUG_COMPONENT = ('DevTools', 'Debugger') +with Files("source.js"): + BUG_COMPONENT = ("DevTools", "Debugger") -with Files('storage.js'): - BUG_COMPONENT = ('DevTools', 'Storage Inspector') +with Files("storage.js"): + BUG_COMPONENT = ("DevTools", "Storage Inspector") -with Files('stylesheets.js'): - BUG_COMPONENT = ('DevTools', 'Style Editor') +with Files("stylesheets.js"): + BUG_COMPONENT = ("DevTools", "Style Editor") -with Files('webconsole.js'): - BUG_COMPONENT = ('DevTools', 'Console') +with Files("webconsole.js"): + BUG_COMPONENT = ("DevTools", "Console") diff --git a/devtools/server/actors/network-monitor/moz.build b/devtools/server/actors/network-monitor/moz.build index 27ea1ffcb09fd0..3ed343daa0275f 100644 --- a/devtools/server/actors/network-monitor/moz.build +++ b/devtools/server/actors/network-monitor/moz.build @@ -5,18 +5,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'utils', + "utils", ] DevToolsModules( - 'channel-event-sink.js', - 'eventsource-actor.js', - 'network-event-actor.js', - 'network-event.js', - 'network-monitor.js', - 'network-observer.js', - 'network-response-listener.js', - 'stack-trace-collector.js', - 'stack-traces-actor.js', - 'websocket-actor.js', + "channel-event-sink.js", + "eventsource-actor.js", + "network-event-actor.js", + "network-event.js", + "network-monitor.js", + "network-observer.js", + "network-response-listener.js", + "stack-trace-collector.js", + "stack-traces-actor.js", + "websocket-actor.js", ) diff --git a/devtools/server/actors/network-monitor/utils/moz.build b/devtools/server/actors/network-monitor/utils/moz.build index 538e6f98e97160..4293b5cbf80a79 100644 --- a/devtools/server/actors/network-monitor/utils/moz.build +++ b/devtools/server/actors/network-monitor/utils/moz.build @@ -4,8 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'channel-map.js', - 'error-codes.js', - 'wildcard-to-regexp.js' -) +DevToolsModules("channel-map.js", "error-codes.js", "wildcard-to-regexp.js") diff --git a/devtools/server/actors/object/moz.build b/devtools/server/actors/object/moz.build index 056d4c3a319ec4..6f51d9abd7cc1f 100644 --- a/devtools/server/actors/object/moz.build +++ b/devtools/server/actors/object/moz.build @@ -3,10 +3,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'previewers.js', - 'property-iterator.js', - 'stringifiers.js', - 'symbol-iterator.js', - 'symbol.js', - 'utils.js', + "previewers.js", + "property-iterator.js", + "stringifiers.js", + "symbol-iterator.js", + "symbol.js", + "utils.js", ) diff --git a/devtools/server/actors/resources/moz.build b/devtools/server/actors/resources/moz.build index d042e784461fd0..cb072b4aab11d2 100644 --- a/devtools/server/actors/resources/moz.build +++ b/devtools/server/actors/resources/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'utils', + "utils", ] DevToolsModules( - 'console-messages.js', - 'css-changes.js', - 'css-messages.js', - 'document-event.js', - 'error-messages.js', - 'index.js', - 'local-storage.js', - 'network-events-stacktraces.js', - 'network-events.js', - 'platform-messages.js', - 'stylesheets.js', + "console-messages.js", + "css-changes.js", + "css-messages.js", + "document-event.js", + "error-messages.js", + "index.js", + "local-storage.js", + "network-events-stacktraces.js", + "network-events.js", + "platform-messages.js", + "stylesheets.js", ) -with Files('*-messages.js'): - BUG_COMPONENT = ('DevTools', 'Console') +with Files("*-messages.js"): + BUG_COMPONENT = ("DevTools", "Console") diff --git a/devtools/server/actors/resources/utils/moz.build b/devtools/server/actors/resources/utils/moz.build index c51080d2b4f1a7..b57360d2189d3e 100644 --- a/devtools/server/actors/resources/utils/moz.build +++ b/devtools/server/actors/resources/utils/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'content-process-storage.js', - 'nsi-console-listener-watcher.js', + "content-process-storage.js", + "nsi-console-listener-watcher.js", ) -with Files('nsi-console-listener-watcher.js'): - BUG_COMPONENT = ('DevTools', 'Console') +with Files("nsi-console-listener-watcher.js"): + BUG_COMPONENT = ("DevTools", "Console") diff --git a/devtools/server/actors/targets/moz.build b/devtools/server/actors/targets/moz.build index 4e4e10c9e93e41..3e51edd5ad539e 100644 --- a/devtools/server/actors/targets/moz.build +++ b/devtools/server/actors/targets/moz.build @@ -5,13 +5,13 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'browsing-context.js', - 'chrome-window.js', - 'content-process.js', - 'frame.js', - 'index.js', - 'parent-process.js', - 'target-actor-registry.jsm', - 'webextension.js', - 'worker.js', + "browsing-context.js", + "chrome-window.js", + "content-process.js", + "frame.js", + "index.js", + "parent-process.js", + "target-actor-registry.jsm", + "webextension.js", + "worker.js", ) diff --git a/devtools/server/actors/utils/moz.build b/devtools/server/actors/utils/moz.build index a5adf11562448b..243af425be9afb 100644 --- a/devtools/server/actors/utils/moz.build +++ b/devtools/server/actors/utils/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'accessibility.js', - 'actor-registry.js', - 'breakpoint-actor-map.js', - 'css-grid-utils.js', - 'dbg-source.js', - 'event-breakpoints.js', - 'event-loop.js', - 'inactive-property-helper.js', - 'logEvent.js', - 'make-debugger.js', - 'shapes-utils.js', - 'source-map-utils.js', - 'source-url.js', - 'stack.js', - 'TabSources.js', - 'track-change-emitter.js', - 'walker-search.js', - 'watchpoint-map.js', + "accessibility.js", + "actor-registry.js", + "breakpoint-actor-map.js", + "css-grid-utils.js", + "dbg-source.js", + "event-breakpoints.js", + "event-loop.js", + "inactive-property-helper.js", + "logEvent.js", + "make-debugger.js", + "shapes-utils.js", + "source-map-utils.js", + "source-url.js", + "stack.js", + "TabSources.js", + "track-change-emitter.js", + "walker-search.js", + "watchpoint-map.js", ) diff --git a/devtools/server/actors/watcher/moz.build b/devtools/server/actors/watcher/moz.build index c6613d473aeb59..3610332f30abf9 100644 --- a/devtools/server/actors/watcher/moz.build +++ b/devtools/server/actors/watcher/moz.build @@ -5,10 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'target-helpers', + "target-helpers", ] DevToolsModules( - 'WatcherRegistry.jsm', + "WatcherRegistry.jsm", ) - diff --git a/devtools/server/actors/watcher/target-helpers/moz.build b/devtools/server/actors/watcher/target-helpers/moz.build index c922aac0dcb3b5..92413d1f5279cc 100644 --- a/devtools/server/actors/watcher/target-helpers/moz.build +++ b/devtools/server/actors/watcher/target-helpers/moz.build @@ -5,9 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'frame-helper.js', - 'process-helper.js', - 'utils.js', - 'worker-helper.js', + "frame-helper.js", + "process-helper.js", + "utils.js", + "worker-helper.js", ) - diff --git a/devtools/server/actors/webconsole/listeners/moz.build b/devtools/server/actors/webconsole/listeners/moz.build index 4b8cb22c948ef0..372818f6c9e783 100644 --- a/devtools/server/actors/webconsole/listeners/moz.build +++ b/devtools/server/actors/webconsole/listeners/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'console-api.js', - 'console-file-activity.js', - 'console-reflow.js', - 'console-service.js', - 'content-process.js', - 'document-events.js', + "console-api.js", + "console-file-activity.js", + "console-reflow.js", + "console-service.js", + "content-process.js", + "document-events.js", ) diff --git a/devtools/server/actors/webconsole/moz.build b/devtools/server/actors/webconsole/moz.build index 1e76d7418e201e..21f21b9250feae 100644 --- a/devtools/server/actors/webconsole/moz.build +++ b/devtools/server/actors/webconsole/moz.build @@ -5,17 +5,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'listeners', + "listeners", ] DevToolsModules( - 'commands.js', - 'content-process-forward.js', - 'eager-ecma-allowlist.js', - 'eager-function-allowlist.js', - 'eval-with-debugger.js', - 'message-manager-mock.js', - 'utils.js', - 'webidl-pure-allowlist.js', - 'worker-listeners.js', + "commands.js", + "content-process-forward.js", + "eager-ecma-allowlist.js", + "eager-function-allowlist.js", + "eval-with-debugger.js", + "message-manager-mock.js", + "utils.js", + "webidl-pure-allowlist.js", + "worker-listeners.js", ) diff --git a/devtools/server/actors/worker/moz.build b/devtools/server/actors/worker/moz.build index a9cae69989117a..4c9023879ba082 100644 --- a/devtools/server/actors/worker/moz.build +++ b/devtools/server/actors/worker/moz.build @@ -5,10 +5,10 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'push-subscription.js', - 'service-worker-process.js', - 'service-worker-registration-list.js', - 'service-worker-registration.js', - 'service-worker.js', - 'worker-descriptor-actor-list.js', + "push-subscription.js", + "service-worker-process.js", + "service-worker-registration-list.js", + "service-worker-registration.js", + "service-worker.js", + "worker-descriptor-actor-list.js", ) diff --git a/devtools/server/connectors/js-window-actor/moz.build b/devtools/server/connectors/js-window-actor/moz.build index e6aea351eda0bf..9aeb1507cd3225 100644 --- a/devtools/server/connectors/js-window-actor/moz.build +++ b/devtools/server/connectors/js-window-actor/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'DevToolsFrameChild.jsm', - 'DevToolsFrameParent.jsm', - 'DevToolsWorkerChild.jsm', - 'DevToolsWorkerParent.jsm', - 'WindowGlobalLogger.jsm', + "DevToolsFrameChild.jsm", + "DevToolsFrameParent.jsm", + "DevToolsWorkerChild.jsm", + "DevToolsWorkerParent.jsm", + "WindowGlobalLogger.jsm", ) diff --git a/devtools/server/connectors/moz.build b/devtools/server/connectors/moz.build index 63d1b20868b8c7..060f0221314357 100644 --- a/devtools/server/connectors/moz.build +++ b/devtools/server/connectors/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'js-window-actor', + "js-window-actor", ] DevToolsModules( - 'content-process-connector.js', - 'frame-connector.js', - 'worker-connector.js', + "content-process-connector.js", + "frame-connector.js", + "worker-connector.js", ) diff --git a/devtools/server/moz.build b/devtools/server/moz.build index a88eeaec4d0d19..b9a59d40dc81a2 100644 --- a/devtools/server/moz.build +++ b/devtools/server/moz.build @@ -4,26 +4,26 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('../templates.mozbuild') +include("../templates.mozbuild") DIRS += [ - 'actors', - 'connectors', - 'performance', - 'socket', - 'startup', + "actors", + "connectors", + "performance", + "socket", + "startup", ] -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': - BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": + BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] DevToolsModules( - 'devtools-server-connection.js', - 'devtools-server.js', + "devtools-server-connection.js", + "devtools-server.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'General') +with Files("**"): + BUG_COMPONENT = ("DevTools", "General") diff --git a/devtools/server/performance/moz.build b/devtools/server/performance/moz.build index b784619a4fe7d2..ff3435b3d4cc4f 100644 --- a/devtools/server/performance/moz.build +++ b/devtools/server/performance/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'framerate.js', - 'memory.js', - 'profiler.js', - 'recorder.js', - 'timeline.js', + "framerate.js", + "memory.js", + "profiler.js", + "recorder.js", + "timeline.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") diff --git a/devtools/server/socket/moz.build b/devtools/server/socket/moz.build index 7ee32477984265..1d3766c192d552 100644 --- a/devtools/server/socket/moz.build +++ b/devtools/server/socket/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] DevToolsModules( - 'websocket-server.js', + "websocket-server.js", ) diff --git a/devtools/server/startup/moz.build b/devtools/server/startup/moz.build index e94595aa85b687..9f1389a33d717e 100644 --- a/devtools/server/startup/moz.build +++ b/devtools/server/startup/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'content-process-script.js', - 'content-process.js', - 'content-process.jsm', - 'frame.js', - 'worker.js', + "content-process-script.js", + "content-process.js", + "content-process.jsm", + "frame.js", + "worker.js", ) diff --git a/devtools/shared/compatibility/dataset/moz.build b/devtools/shared/compatibility/dataset/moz.build index 7647bd562cdf33..4731a5b3d7c721 100644 --- a/devtools/shared/compatibility/dataset/moz.build +++ b/devtools/shared/compatibility/dataset/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'browsers.json', - 'css-properties.json', + "browsers.json", + "css-properties.json", ) diff --git a/devtools/shared/compatibility/moz.build b/devtools/shared/compatibility/moz.build index 2fef47895dac72..be5594b6ea682f 100644 --- a/devtools/shared/compatibility/moz.build +++ b/devtools/shared/compatibility/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'dataset', + "dataset", ] -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Inspector: Compatibility') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Inspector: Compatibility") diff --git a/devtools/shared/css/generated/mach_commands.py b/devtools/shared/css/generated/mach_commands.py index 68007c2c98b456..feeac78b1ae55d 100644 --- a/devtools/shared/css/generated/mach_commands.py +++ b/devtools/shared/css/generated/mach_commands.py @@ -27,19 +27,24 @@ Command, ) + def resolve_path(start, relativePath): """Helper to resolve a path from a start, and a relative path""" return os.path.normpath(os.path.join(start, relativePath)) + def stringify(obj): """Helper to stringify to JSON""" - return json.dumps(obj, sort_keys=True, indent=2, separators=(',', ': ')) + return json.dumps(obj, sort_keys=True, indent=2, separators=(",", ": ")) + @CommandProvider class MachCommands(MachCommandBase): @Command( - 'devtools-css-db', category='post-build', - description='Rebuild the devtool\'s static css properties database.') + "devtools-css-db", + category="post-build", + description="Rebuild the devtool's static css properties database.", + ) def generate_css_db(self): """Generate the static css properties database for devtools and write it to file.""" @@ -48,62 +53,67 @@ def generate_css_db(self): if not db: return 1 - self.output_template({ - 'preferences': stringify(db['preferences']), - 'cssProperties': stringify(db['cssProperties']), - 'pseudoElements': stringify(db['pseudoElements'])}) + self.output_template( + { + "preferences": stringify(db["preferences"]), + "cssProperties": stringify(db["cssProperties"]), + "pseudoElements": stringify(db["pseudoElements"]), + } + ) def get_properties_db_from_xpcshell(self): """Generate the static css properties db for devtools from an xpcshell script.""" build = MozbuildObject.from_environment() # Get the paths - script_path = resolve_path(self.topsrcdir, - 'devtools/shared/css/generated/generate-properties-db.js') - gre_path = resolve_path(self.topobjdir, 'dist/bin') - browser_path = resolve_path(self.topobjdir, 'dist/bin/browser') + script_path = resolve_path( + self.topsrcdir, "devtools/shared/css/generated/generate-properties-db.js" + ) + gre_path = resolve_path(self.topobjdir, "dist/bin") + browser_path = resolve_path(self.topobjdir, "dist/bin/browser") try: - xpcshell_path = build.get_binary_path(what='xpcshell') + xpcshell_path = build.get_binary_path(what="xpcshell") except BinaryNotFoundException as e: - self.log(logging.ERROR, 'devtools-css-db', - {'error': str(e)}, - 'ERROR: {error}') - self.log(logging.INFO, 'devtools-css-db', - {'help': e.help()}, - '{help}') + self.log( + logging.ERROR, "devtools-css-db", {"error": str(e)}, "ERROR: {error}" + ) + self.log(logging.INFO, "devtools-css-db", {"help": e.help()}, "{help}") return None print(browser_path) sub_env = dict(os.environ) - if sys.platform.startswith('linux'): + if sys.platform.startswith("linux"): sub_env["LD_LIBRARY_PATH"] = gre_path # Run the xcpshell script, and set the appdir flag to the browser path so that # we have the proper dependencies for requiring the loader. - contents = subprocess.check_output([xpcshell_path, '-g', gre_path, - '-a', browser_path, script_path], - env = sub_env) + contents = subprocess.check_output( + [xpcshell_path, "-g", gre_path, "-a", browser_path, script_path], + env=sub_env, + ) # Extract just the output between the delimiters as the xpcshell output can # have extra output that we don't want. - contents = contents.decode().split('DEVTOOLS_CSS_DB_DELIMITER')[1] + contents = contents.decode().split("DEVTOOLS_CSS_DB_DELIMITER")[1] return json.loads(contents) def output_template(self, substitutions): """Output a the properties-db.js from a template.""" - js_template_path = resolve_path(self.topsrcdir, - 'devtools/shared/css/generated/properties-db.js.in') - destination_path = resolve_path(self.topsrcdir, - 'devtools/shared/css/generated/properties-db.js') - - with open(js_template_path, 'rb') as handle: + js_template_path = resolve_path( + self.topsrcdir, "devtools/shared/css/generated/properties-db.js.in" + ) + destination_path = resolve_path( + self.topsrcdir, "devtools/shared/css/generated/properties-db.js" + ) + + with open(js_template_path, "rb") as handle: js_template = handle.read().decode() - preamble = '/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT */\n\n' + preamble = "/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT */\n\n" contents = string.Template(js_template).substitute(substitutions) - with open(destination_path, 'wb') as destination: + with open(destination_path, "wb") as destination: destination.write(preamble.encode() + contents.encode()) - print('The database was successfully generated at ' + destination_path) + print("The database was successfully generated at " + destination_path) diff --git a/devtools/shared/css/generated/moz.build b/devtools/shared/css/generated/moz.build index 7f01281db65e80..eb3b95b784203a 100644 --- a/devtools/shared/css/generated/moz.build +++ b/devtools/shared/css/generated/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'properties-db.js', + "properties-db.js", ) diff --git a/devtools/shared/css/moz.build b/devtools/shared/css/moz.build index e0ac74eb561073..efcd47860219df 100644 --- a/devtools/shared/css/moz.build +++ b/devtools/shared/css/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'generated', + "generated", ] DevToolsModules( - 'color-db.js', - 'color.js', - 'constants.js', - 'lexer.js', - 'parsing-utils.js', - 'properties-db.js', + "color-db.js", + "color.js", + "constants.js", + "lexer.js", + "parsing-utils.js", + "properties-db.js", ) diff --git a/devtools/shared/discovery/moz.build b/devtools/shared/discovery/moz.build index 34fb592cc8bb80..7e67717dd8ff9f 100644 --- a/devtools/shared/discovery/moz.build +++ b/devtools/shared/discovery/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] DevToolsModules( - 'discovery.js', + "discovery.js", ) diff --git a/devtools/shared/heapsnapshot/moz.build b/devtools/shared/heapsnapshot/moz.build index fe06cabdc4d542..91e80b2391f011 100644 --- a/devtools/shared/heapsnapshot/moz.build +++ b/devtools/shared/heapsnapshot/moz.build @@ -4,59 +4,59 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Memory') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Memory") -if CONFIG['ENABLE_TESTS']: - DIRS += ['tests/gtest'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["tests/gtest"] -XPCSHELL_TESTS_MANIFESTS += [ 'tests/xpcshell/xpcshell.ini' ] -MOCHITEST_CHROME_MANIFESTS += [ 'tests/chrome/chrome.ini' ] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] EXPORTS.mozilla.devtools += [ - 'AutoMemMap.h', - 'CoreDump.pb.h', - 'DeserializedNode.h', - 'DominatorTree.h', - 'FileDescriptorOutputStream.h', - 'HeapSnapshot.h', - 'HeapSnapshotTempFileHelperChild.h', - 'HeapSnapshotTempFileHelperParent.h', - 'ZeroCopyNSIOutputStream.h', + "AutoMemMap.h", + "CoreDump.pb.h", + "DeserializedNode.h", + "DominatorTree.h", + "FileDescriptorOutputStream.h", + "HeapSnapshot.h", + "HeapSnapshotTempFileHelperChild.h", + "HeapSnapshotTempFileHelperParent.h", + "ZeroCopyNSIOutputStream.h", ] IPDL_SOURCES += [ - 'PHeapSnapshotTempFileHelper.ipdl', + "PHeapSnapshotTempFileHelper.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") SOURCES += [ - 'AutoMemMap.cpp', - 'CoreDump.pb.cc', - 'DeserializedNode.cpp', - 'DominatorTree.cpp', - 'FileDescriptorOutputStream.cpp', - 'HeapSnapshot.cpp', - 'HeapSnapshotTempFileHelperParent.cpp', - 'ZeroCopyNSIOutputStream.cpp', + "AutoMemMap.cpp", + "CoreDump.pb.cc", + "DeserializedNode.cpp", + "DominatorTree.cpp", + "FileDescriptorOutputStream.cpp", + "HeapSnapshot.cpp", + "HeapSnapshotTempFileHelperParent.cpp", + "ZeroCopyNSIOutputStream.cpp", ] # Disable RTTI in google protocol buffer -DEFINES['GOOGLE_PROTOBUF_NO_RTTI'] = True +DEFINES["GOOGLE_PROTOBUF_NO_RTTI"] = True -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" DevToolsModules( - 'census-tree-node.js', - 'CensusUtils.js', - 'DominatorTreeNode.js', - 'HeapAnalysesClient.js', - 'HeapAnalysesWorker.js', - 'HeapSnapshotFileUtils.js', - 'shortest-paths.js', + "census-tree-node.js", + "CensusUtils.js", + "DominatorTreeNode.js", + "HeapAnalysesClient.js", + "HeapAnalysesWorker.js", + "HeapSnapshotFileUtils.js", + "shortest-paths.js", ) -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/devtools/shared/heapsnapshot/tests/gtest/moz.build b/devtools/shared/heapsnapshot/tests/gtest/moz.build index f4752e80d069f9..25fcce6dbcfefc 100644 --- a/devtools/shared/heapsnapshot/tests/gtest/moz.build +++ b/devtools/shared/heapsnapshot/tests/gtest/moz.build @@ -4,33 +4,33 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, you can obtain one at http://mozilla.org/MPL/2.0/. -Library('devtoolstests') +Library("devtoolstests") LOCAL_INCLUDES += [ - '../..', + "../..", ] -DEFINES['GOOGLE_PROTOBUF_NO_RTTI'] = True -DEFINES['GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER'] = True +DEFINES["GOOGLE_PROTOBUF_NO_RTTI"] = True +DEFINES["GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER"] = True UNIFIED_SOURCES = [ - 'DeserializedNodeUbiNodes.cpp', - 'DeserializedStackFrameUbiStackFrames.cpp', - 'DoesCrossCompartmentBoundaries.cpp', - 'DoesntCrossCompartmentBoundaries.cpp', - 'SerializesEdgeNames.cpp', - 'SerializesEverythingInHeapGraphOnce.cpp', - 'SerializesTypeNames.cpp', + "DeserializedNodeUbiNodes.cpp", + "DeserializedStackFrameUbiStackFrames.cpp", + "DoesCrossCompartmentBoundaries.cpp", + "DoesntCrossCompartmentBoundaries.cpp", + "SerializesEdgeNames.cpp", + "SerializesEverythingInHeapGraphOnce.cpp", + "SerializesTypeNames.cpp", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # THE MOCK_METHOD2 macro from gtest triggers this clang warning and it's hard # to work around, so we just ignore it. -if CONFIG['CC_TYPE'] == 'clang': - CXXFLAGS += ['-Wno-inconsistent-missing-override'] - # Workaround bug 1142396. Suppress the warning from gmock library for clang. - CXXFLAGS += ['-Wno-null-dereference'] +if CONFIG["CC_TYPE"] == "clang": + CXXFLAGS += ["-Wno-inconsistent-missing-override"] + # Workaround bug 1142396. Suppress the warning from gmock library for clang. + CXXFLAGS += ["-Wno-null-dereference"] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/devtools/shared/inspector/moz.build b/devtools/shared/inspector/moz.build index f20d852c0de12a..21a01fae9b5ffd 100644 --- a/devtools/shared/inspector/moz.build +++ b/devtools/shared/inspector/moz.build @@ -4,7 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'css-logic.js', - 'utils.js' -) +DevToolsModules("css-logic.js", "utils.js") diff --git a/devtools/shared/layout/moz.build b/devtools/shared/layout/moz.build index 95903d67d772d7..da30931458949d 100644 --- a/devtools/shared/layout/moz.build +++ b/devtools/shared/layout/moz.build @@ -4,7 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DevToolsModules( - 'dom-matrix-2d.js', - 'utils.js' -) +DevToolsModules("dom-matrix-2d.js", "utils.js") diff --git a/devtools/shared/locales/moz.build b/devtools/shared/locales/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/devtools/shared/locales/moz.build +++ b/devtools/shared/locales/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/devtools/shared/moz.build b/devtools/shared/moz.build index 27c3e3bcd9537e..6d4f50f377f486 100644 --- a/devtools/shared/moz.build +++ b/devtools/shared/moz.build @@ -4,78 +4,78 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('../templates.mozbuild') +include("../templates.mozbuild") DIRS += [ - 'acorn', - 'css', - 'compatibility', - 'discovery', - 'heapsnapshot', - 'inspector', - 'jsbeautify', - 'layout', - 'locales', - 'node-properties', - 'performance', - 'performance-new', - 'platform', - 'protocol', - 'qrcode', - 'resources', - 'screenshot', - 'security', - 'sprintfjs', - 'specs', - 'storage', - 'transport', - 'webconsole', - 'worker', + "acorn", + "css", + "compatibility", + "discovery", + "heapsnapshot", + "inspector", + "jsbeautify", + "layout", + "locales", + "node-properties", + "performance", + "performance-new", + "platform", + "protocol", + "qrcode", + "resources", + "screenshot", + "security", + "sprintfjs", + "specs", + "storage", + "transport", + "webconsole", + "worker", ] -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': - BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": + BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] -BROWSER_CHROME_MANIFESTS += ['test-helpers/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test-helpers/browser.ini"] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] DevToolsModules( - 'accessibility.js', - 'async-storage.js', - 'async-utils.js', - 'base-loader.js', - 'builtin-modules.js', - 'constants.js', - 'content-observer.js', - 'debounce.js', - 'defer.js', - 'DevToolsUtils.js', - 'dom-helpers.js', - 'dom-node-constants.js', - 'dom-node-filter-constants.js', - 'event-emitter.js', - 'extend.js', - 'flags.js', - 'generate-uuid.js', - 'indentation.js', - 'indexed-db.js', - 'l10n.js', - 'loader-plugin-raw.jsm', - 'Loader.jsm', - 'natural-sort.js', - 'path.js', - 'picker-constants.js', - 'plural-form.js', - 'protocol.js', - 'system.js', - 'task.js', - 'ThreadSafeDevToolsUtils.js', - 'throttle.js', + "accessibility.js", + "async-storage.js", + "async-utils.js", + "base-loader.js", + "builtin-modules.js", + "constants.js", + "content-observer.js", + "debounce.js", + "defer.js", + "DevToolsUtils.js", + "dom-helpers.js", + "dom-node-constants.js", + "dom-node-filter-constants.js", + "event-emitter.js", + "extend.js", + "flags.js", + "generate-uuid.js", + "indentation.js", + "indexed-db.js", + "l10n.js", + "loader-plugin-raw.jsm", + "Loader.jsm", + "natural-sort.js", + "path.js", + "picker-constants.js", + "plural-form.js", + "protocol.js", + "system.js", + "task.js", + "ThreadSafeDevToolsUtils.js", + "throttle.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'General') +with Files("**"): + BUG_COMPONENT = ("DevTools", "General") diff --git a/devtools/shared/performance-new/moz.build b/devtools/shared/performance-new/moz.build index 007d2b3b53dc50..455098f4fbcc0d 100644 --- a/devtools/shared/performance-new/moz.build +++ b/devtools/shared/performance-new/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'gecko-profiler-interface.js', - 'recording-utils.js', + "gecko-profiler-interface.js", + "recording-utils.js", ) -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") diff --git a/devtools/shared/performance/moz.build b/devtools/shared/performance/moz.build index 96bbbeafb6dfe0..36f0139d6bf6ef 100644 --- a/devtools/shared/performance/moz.build +++ b/devtools/shared/performance/moz.build @@ -4,9 +4,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["xpcshell/xpcshell.ini"] DevToolsModules( - 'recording-common.js', - 'recording-utils.js', + "recording-common.js", + "recording-utils.js", ) diff --git a/devtools/shared/platform/moz.build b/devtools/shared/platform/moz.build index fa9cdf57d82b66..58d8a077738eda 100644 --- a/devtools/shared/platform/moz.build +++ b/devtools/shared/platform/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'cache-entry.js', - 'clipboard.js', - 'stack.js', + "cache-entry.js", + "clipboard.js", + "stack.js", ) diff --git a/devtools/shared/protocol/Actor/moz.build b/devtools/shared/protocol/Actor/moz.build index d50925a0833c69..3e3c46d49bde61 100644 --- a/devtools/shared/protocol/Actor/moz.build +++ b/devtools/shared/protocol/Actor/moz.build @@ -4,5 +4,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'generateActorSpec.js', + "generateActorSpec.js", ) diff --git a/devtools/shared/protocol/Front/moz.build b/devtools/shared/protocol/Front/moz.build index 11b01d325bc129..75b36c5eab3b5b 100644 --- a/devtools/shared/protocol/Front/moz.build +++ b/devtools/shared/protocol/Front/moz.build @@ -4,5 +4,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'FrontClassWithSpec.js', + "FrontClassWithSpec.js", ) diff --git a/devtools/shared/protocol/moz.build b/devtools/shared/protocol/moz.build index 8e68020556c462..87bf999c25eba7 100644 --- a/devtools/shared/protocol/moz.build +++ b/devtools/shared/protocol/moz.build @@ -4,19 +4,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'Actor', - 'Front', + "Actor", + "Front", ] DevToolsModules( - 'Actor.js', - 'Front.js', - 'lazy-pool.js', - 'Pool.js', - 'Request.js', - 'Response.js', - 'types.js', - 'utils.js', + "Actor.js", + "Front.js", + "lazy-pool.js", + "Pool.js", + "Request.js", + "Response.js", + "types.js", + "utils.js", ) -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] diff --git a/devtools/shared/qrcode/moz.build b/devtools/shared/qrcode/moz.build index 89ca7801f68397..241793b4017d87 100644 --- a/devtools/shared/qrcode/moz.build +++ b/devtools/shared/qrcode/moz.build @@ -4,19 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'encoder' -] +DIRS += ["encoder"] # Save file size on Fennec until there are active plans to use the decoder there -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': - DIRS += [ - 'decoder' - ] +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": + DIRS += ["decoder"] DevToolsModules( - 'index.js', + "index.js", ) -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] diff --git a/devtools/shared/resources/legacy-listeners/moz.build b/devtools/shared/resources/legacy-listeners/moz.build index 1dd466a49fc636..ca84256b66bd9b 100644 --- a/devtools/shared/resources/legacy-listeners/moz.build +++ b/devtools/shared/resources/legacy-listeners/moz.build @@ -3,22 +3,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'cache-storage.js', - 'console-messages.js', - 'cookie.js', - 'css-changes.js', - 'css-messages.js', - 'error-messages.js', - 'extension-storage.js', - 'indexed-db.js', - 'local-storage.js', - 'network-event-stacktraces.js', - 'network-events.js', - 'platform-messages.js', - 'root-node.js', - 'session-storage.js', - 'source.js', - 'storage-utils.js', - 'stylesheet.js', - 'websocket.js', + "cache-storage.js", + "console-messages.js", + "cookie.js", + "css-changes.js", + "css-messages.js", + "error-messages.js", + "extension-storage.js", + "indexed-db.js", + "local-storage.js", + "network-event-stacktraces.js", + "network-events.js", + "platform-messages.js", + "root-node.js", + "session-storage.js", + "source.js", + "storage-utils.js", + "stylesheet.js", + "websocket.js", ) diff --git a/devtools/shared/resources/legacy-target-watchers/moz.build b/devtools/shared/resources/legacy-target-watchers/moz.build index 27c0cf515a0aa3..60fdd7ec222dff 100644 --- a/devtools/shared/resources/legacy-target-watchers/moz.build +++ b/devtools/shared/resources/legacy-target-watchers/moz.build @@ -3,8 +3,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'legacy-processes-watcher.js', - 'legacy-serviceworkers-watcher.js', - 'legacy-sharedworkers-watcher.js', - 'legacy-workers-watcher.js', + "legacy-processes-watcher.js", + "legacy-serviceworkers-watcher.js", + "legacy-sharedworkers-watcher.js", + "legacy-workers-watcher.js", ) diff --git a/devtools/shared/resources/moz.build b/devtools/shared/resources/moz.build index 100f60f9cb5e99..5e709be68d836e 100644 --- a/devtools/shared/resources/moz.build +++ b/devtools/shared/resources/moz.build @@ -3,15 +3,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'legacy-listeners', - 'legacy-target-watchers', - 'transformers', + "legacy-listeners", + "legacy-target-watchers", + "transformers", ] DevToolsModules( - 'resource-watcher.js', - 'target-list.js', + "resource-watcher.js", + "target-list.js", ) -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': - BROWSER_CHROME_MANIFESTS += ['tests/browser.ini'] +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": + BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] diff --git a/devtools/shared/resources/transformers/moz.build b/devtools/shared/resources/transformers/moz.build index e252873f56ecc8..7bf5cdf8833d7b 100644 --- a/devtools/shared/resources/transformers/moz.build +++ b/devtools/shared/resources/transformers/moz.build @@ -3,9 +3,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'console-messages.js', - 'error-messages.js', - 'root-node.js', - 'storage-indexed-db.js', - 'storage-local-storage.js', + "console-messages.js", + "error-messages.js", + "root-node.js", + "storage-indexed-db.js", + "storage-local-storage.js", ) diff --git a/devtools/shared/screenshot/moz.build b/devtools/shared/screenshot/moz.build index f8d16a902e6c52..a4462c87055843 100644 --- a/devtools/shared/screenshot/moz.build +++ b/devtools/shared/screenshot/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'capture.js', - 'save.js', + "capture.js", + "save.js", ) diff --git a/devtools/shared/security/moz.build b/devtools/shared/security/moz.build index 9e20d1fe8335c2..5c364d7efae15f 100644 --- a/devtools/shared/security/moz.build +++ b/devtools/shared/security/moz.build @@ -4,12 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] DevToolsModules( - 'auth.js', - 'cert.js', - 'prompt.js', - 'socket.js', + "auth.js", + "cert.js", + "prompt.js", + "socket.js", ) diff --git a/devtools/shared/specs/addon/moz.build b/devtools/shared/specs/addon/moz.build index 210f7b9d3e2336..e3821736415376 100644 --- a/devtools/shared/specs/addon/moz.build +++ b/devtools/shared/specs/addon/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'addons.js', - 'webextension-inspected-window.js', + "addons.js", + "webextension-inspected-window.js", ) diff --git a/devtools/shared/specs/descriptors/moz.build b/devtools/shared/specs/descriptors/moz.build index 913990bc406bf3..bf297b3dcb1e4b 100644 --- a/devtools/shared/specs/descriptors/moz.build +++ b/devtools/shared/specs/descriptors/moz.build @@ -5,9 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'process.js', - 'tab.js', - 'webextension.js', - 'worker.js', + "process.js", + "tab.js", + "webextension.js", + "worker.js", ) - diff --git a/devtools/shared/specs/moz.build b/devtools/shared/specs/moz.build index 0e8fe076e7103c..6bd0b25c5f8daf 100644 --- a/devtools/shared/specs/moz.build +++ b/devtools/shared/specs/moz.build @@ -5,57 +5,57 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'addon', - 'descriptors', - 'targets', - 'worker', + "addon", + "descriptors", + "targets", + "worker", ] DevToolsModules( - 'accessibility.js', - 'animation.js', - 'array-buffer.js', - 'changes.js', - 'compatibility.js', - 'content-viewer.js', - 'css-properties.js', - 'device.js', - 'environment.js', - 'eventsource.js', - 'frame.js', - 'framerate.js', - 'heap-snapshot-file.js', - 'highlighters.js', - 'index.js', - 'inspector.js', - 'layout.js', - 'manifest.js', - 'memory.js', - 'network-event.js', - 'network-monitor.js', - 'node.js', - 'object.js', - 'perf.js', - 'performance-recording.js', - 'performance.js', - 'preference.js', - 'property-iterator.js', - 'reflow.js', - 'responsive.js', - 'root.js', - 'screenshot.js', - 'source.js', - 'stacktraces.js', - 'storage.js', - 'string.js', - 'styles.js', - 'stylesheets.js', - 'symbol-iterator.js', - 'symbol.js', - 'thread.js', - 'timeline.js', - 'walker.js', - 'watcher.js', - 'webconsole.js', - 'websocket.js', + "accessibility.js", + "animation.js", + "array-buffer.js", + "changes.js", + "compatibility.js", + "content-viewer.js", + "css-properties.js", + "device.js", + "environment.js", + "eventsource.js", + "frame.js", + "framerate.js", + "heap-snapshot-file.js", + "highlighters.js", + "index.js", + "inspector.js", + "layout.js", + "manifest.js", + "memory.js", + "network-event.js", + "network-monitor.js", + "node.js", + "object.js", + "perf.js", + "performance-recording.js", + "performance.js", + "preference.js", + "property-iterator.js", + "reflow.js", + "responsive.js", + "root.js", + "screenshot.js", + "source.js", + "stacktraces.js", + "storage.js", + "string.js", + "styles.js", + "stylesheets.js", + "symbol-iterator.js", + "symbol.js", + "thread.js", + "timeline.js", + "walker.js", + "watcher.js", + "webconsole.js", + "websocket.js", ) diff --git a/devtools/shared/specs/targets/moz.build b/devtools/shared/specs/targets/moz.build index 32c2725a1927ea..0fb59649bde448 100644 --- a/devtools/shared/specs/targets/moz.build +++ b/devtools/shared/specs/targets/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'browsing-context.js', - 'chrome-window.js', - 'content-process.js', - 'frame.js', - 'parent-process.js', - 'webextension.js', - 'worker.js', + "browsing-context.js", + "chrome-window.js", + "content-process.js", + "frame.js", + "parent-process.js", + "webextension.js", + "worker.js", ) diff --git a/devtools/shared/specs/worker/moz.build b/devtools/shared/specs/worker/moz.build index b0d47c32da412a..dae0e2d60693a2 100644 --- a/devtools/shared/specs/worker/moz.build +++ b/devtools/shared/specs/worker/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DevToolsModules( - 'push-subscription.js', - 'service-worker-registration.js', - 'service-worker.js', + "push-subscription.js", + "service-worker-registration.js", + "service-worker.js", ) diff --git a/devtools/shared/storage/moz.build b/devtools/shared/storage/moz.build index 35bb70f2fe049f..95cf2857de336c 100644 --- a/devtools/shared/storage/moz.build +++ b/devtools/shared/storage/moz.build @@ -4,10 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ - 'vendor' -] +DIRS += ["vendor"] -DevToolsModules( - 'utils.js' -) +DevToolsModules("utils.js") diff --git a/devtools/shared/transport/moz.build b/devtools/shared/transport/moz.build index 0d4cdef127e312..1ec5d017774913 100644 --- a/devtools/shared/transport/moz.build +++ b/devtools/shared/transport/moz.build @@ -4,15 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] DevToolsModules( - 'child-transport.js', - 'js-window-actor-transport.js', - 'local-transport.js', - 'packets.js', - 'stream-utils.js', - 'transport.js', - 'websocket-transport.js', - 'worker-transport.js', + "child-transport.js", + "js-window-actor-transport.js", + "local-transport.js", + "packets.js", + "stream-utils.js", + "transport.js", + "websocket-transport.js", + "worker-transport.js", ) diff --git a/devtools/shared/webconsole/GeneratePureDOMFunctions.py b/devtools/shared/webconsole/GeneratePureDOMFunctions.py index dc2181776d4775..13416bea2e3b0b 100644 --- a/devtools/shared/webconsole/GeneratePureDOMFunctions.py +++ b/devtools/shared/webconsole/GeneratePureDOMFunctions.py @@ -24,16 +24,18 @@ # This is an explicit list of interfaces to load [Pure] and [Constant] # annotation for. There are a bunch of things that are pure in other interfaces # that we don't care about in the context of the devtools. -INTERFACE_ALLOWLIST = set([ - "Document", - "Node", - "DOMTokenList", - "Element", - "Performance", - "URLSearchParams", - "FormData", - "Headers" -]) +INTERFACE_ALLOWLIST = set( + [ + "Document", + "Node", + "DOMTokenList", + "Element", + "Performance", + "URLSearchParams", + "FormData", + "Headers", + ] +) FILE_TEMPLATE = """\ /* This Source Code Form is subject to the terms of the Mozilla Public @@ -48,46 +50,45 @@ """ output_file = path.join( - buildconfig.topsrcdir, - "devtools/server/actors/webconsole/webidl-pure-allowlist.js" + buildconfig.topsrcdir, "devtools/server/actors/webconsole/webidl-pure-allowlist.js" ) input_file = path.join(buildconfig.topobjdir, "dom/bindings/file-lists.json") if not path.isfile(input_file): - raise Exception( - "Script must be run with a mozconfig referencing a non-artifact OBJDIR") + raise Exception( + "Script must be run with a mozconfig referencing a non-artifact OBJDIR" + ) file_list = json.load(open(input_file)) parser = WebIDL.Parser() for filepath in file_list["webidls"]: - with open(filepath, 'r', encoding="utf8") as f: - parser.parse(f.read(), filepath) + with open(filepath, "r", encoding="utf8") as f: + parser.parse(f.read(), filepath) results = parser.finish() output = {} for result in results: - if isinstance(result, WebIDL.IDLInterface): - iface = result.identifier.name - - for member in result.members: - # We only care about methods because eager evaluation assumes that - # all getter functions are side-effect-free. - if member.isMethod() and member.affects == "Nothing": - name = member.identifier.name - - if ((INTERFACE_ALLOWLIST and not iface in INTERFACE_ALLOWLIST) or - name.startswith("_")): - continue - if not iface in output: - output[iface] = [] - if member.isStatic(): - output[iface].append([name]) - else: - output[iface].append(["prototype", name]) + if isinstance(result, WebIDL.IDLInterface): + iface = result.identifier.name + + for member in result.members: + # We only care about methods because eager evaluation assumes that + # all getter functions are side-effect-free. + if member.isMethod() and member.affects == "Nothing": + name = member.identifier.name + + if ( + INTERFACE_ALLOWLIST and not iface in INTERFACE_ALLOWLIST + ) or name.startswith("_"): + continue + if not iface in output: + output[iface] = [] + if member.isStatic(): + output[iface].append([name]) + else: + output[iface].append(["prototype", name]) with open(output_file, "w") as f: - f.write(FILE_TEMPLATE % { - 'pure_data': json.dumps(output, indent=2, sort_keys=True) - }) + f.write(FILE_TEMPLATE % {"pure_data": json.dumps(output, indent=2, sort_keys=True)}) diff --git a/devtools/shared/webconsole/GenerateReservedWordsJS.py b/devtools/shared/webconsole/GenerateReservedWordsJS.py index 35a47b3dad84bf..4ecc35feff8162 100644 --- a/devtools/shared/webconsole/GenerateReservedWordsJS.py +++ b/devtools/shared/webconsole/GenerateReservedWordsJS.py @@ -6,37 +6,36 @@ import re import sys + def read_reserved_word_list(filename): macro_pat = re.compile(r"^\s*MACRO\(([^,]+), *[^,]+, *[^\)]+\)\s*\\?$") reserved_word_list = [] - with open(filename, 'r') as f: + with open(filename, "r") as f: for line in f: m = macro_pat.search(line) if m: reserved_word_list.append(m.group(1)) - assert(len(reserved_word_list) != 0) + assert len(reserved_word_list) != 0 return reserved_word_list def line(opt, s): - opt['output'].write('{}\n'.format(s)) + opt["output"].write("{}\n".format(s)) def main(output, reserved_words_h): reserved_word_list = read_reserved_word_list(reserved_words_h) - opt = { - 'output': output - } + opt = {"output": output} - line(opt, 'const JS_RESERVED_WORDS = ['); + line(opt, "const JS_RESERVED_WORDS = [") for word in reserved_word_list: line(opt, ' "{}",'.format(word)) - line(opt, '];'); - line(opt, 'module.exports = JS_RESERVED_WORDS;'); + line(opt, "];") + line(opt, "module.exports = JS_RESERVED_WORDS;") -if __name__ == '__main__': +if __name__ == "__main__": main(sys.stdout, *sys.argv[1:]) diff --git a/devtools/shared/webconsole/moz.build b/devtools/shared/webconsole/moz.build index b55f9827399b63..75aff117b17353 100644 --- a/devtools/shared/webconsole/moz.build +++ b/devtools/shared/webconsole/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['OS_TARGET'] != 'Android': - MOCHITEST_CHROME_MANIFESTS += ['test/chrome/chrome.ini'] - XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] - BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +if CONFIG["OS_TARGET"] != "Android": + MOCHITEST_CHROME_MANIFESTS += ["test/chrome/chrome.ini"] + XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] + BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] # Compute where to put transpiled files into omni.ja package # All DevTools modules are used via resource://devtools/ URI @@ -15,16 +15,19 @@ if CONFIG['OS_TARGET'] != 'Android': base = FINAL_TARGET_FILES.chrome.devtools.modules # Now, navigate to the right sub-directory into devtools root modules folder -for dir in RELATIVEDIR.split('/'): +for dir in RELATIVEDIR.split("/"): base = base[dir] base += ["!reserved-js-words.js"] -GeneratedFile('reserved-js-words.js', script='GenerateReservedWordsJS.py', - inputs = ['/js/src/frontend/ReservedWords.h']) +GeneratedFile( + "reserved-js-words.js", + script="GenerateReservedWordsJS.py", + inputs=["/js/src/frontend/ReservedWords.h"], +) DevToolsModules( - 'js-property-provider.js', - 'network-helper.js', - 'parser-helper.js', - 'throttle.js', + "js-property-provider.js", + "network-helper.js", + "parser-helper.js", + "throttle.js", ) diff --git a/devtools/shared/worker/moz.build b/devtools/shared/worker/moz.build index 466a52dc638533..dd86df3a6735e3 100644 --- a/devtools/shared/worker/moz.build +++ b/devtools/shared/worker/moz.build @@ -4,11 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': - BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": + BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] DevToolsModules( - 'helper.js', - 'loader.js', - 'worker.js', + "helper.js", + "loader.js", + "worker.js", ) diff --git a/devtools/startup/aboutdevtools/moz.build b/devtools/startup/aboutdevtools/moz.build index 772fc9c1fea97b..2b25782368dbf3 100644 --- a/devtools/startup/aboutdevtools/moz.build +++ b/devtools/startup/aboutdevtools/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXTRA_JS_MODULES += [ - 'AboutDevToolsRegistration.jsm', + "AboutDevToolsRegistration.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/devtools/startup/locales/moz.build b/devtools/startup/locales/moz.build index aac3a838c4c239..d988c0ff9b162c 100644 --- a/devtools/startup/locales/moz.build +++ b/devtools/startup/locales/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] diff --git a/devtools/startup/moz.build b/devtools/startup/moz.build index 9eb7821eb276e9..07341e57fb2d5c 100644 --- a/devtools/startup/moz.build +++ b/devtools/startup/moz.build @@ -4,26 +4,26 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] # Register the startup components only for 'all' builds. -if CONFIG['MOZ_DEVTOOLS'] == 'all': +if CONFIG["MOZ_DEVTOOLS"] == "all": EXTRA_JS_MODULES += [ - 'AboutDebuggingRegistration.jsm', - 'AboutDevToolsToolboxRegistration.jsm', - 'DevToolsStartup.jsm', + "AboutDebuggingRegistration.jsm", + "AboutDevToolsToolboxRegistration.jsm", + "DevToolsStartup.jsm", ] DIRS += [ - 'aboutdevtools', - 'locales', + "aboutdevtools", + "locales", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': - BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": + BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] diff --git a/docs/conf.py b/docs/conf.py index 67e24ed2c502a9..d73e83fcad5cee 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -61,7 +61,7 @@ "../toolkit/components/featuregates", "../toolkit/mozapps/extensions", "../toolkit/components/prompts/src", - ] +] root_for_relative_js_paths = ".." jsdoc_config_path = "jsdoc.json" @@ -69,7 +69,9 @@ source_suffix = [".rst", ".md"] master_doc = "index" project = "Firefox Source Docs" -html_logo = os.path.join(topsrcdir, "browser/branding/nightly/content/firefox-wordmark.svg") +html_logo = os.path.join( + topsrcdir, "browser/branding/nightly/content/firefox-wordmark.svg" +) html_favicon = os.path.join(topsrcdir, "browser/branding/nightly/firefox.ico") exclude_patterns = ["_build", "_staging", "_venv"] diff --git a/docshell/base/moz.build b/docshell/base/moz.build index 0bfcc16acb01f9..5b84c5ec9ea705 100644 --- a/docshell/base/moz.build +++ b/docshell/base/moz.build @@ -4,129 +4,127 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Navigation') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Navigation") -with Files('crashtests/430628*'): - BUG_COMPONENT = ('Core', 'DOM: Editor') +with Files("crashtests/430628*"): + BUG_COMPONENT = ("Core", "DOM: Editor") -with Files('crashtests/432114*'): - BUG_COMPONENT = ('Core', 'DOM: Editor') +with Files("crashtests/432114*"): + BUG_COMPONENT = ("Core", "DOM: Editor") -with Files('crashtests/500328*'): - BUG_COMPONENT = ('Firefox', 'Bookmarks & History') +with Files("crashtests/500328*"): + BUG_COMPONENT = ("Firefox", "Bookmarks & History") -with Files('IHistory.h'): - BUG_COMPONENT = ('Toolkit', 'Places') +with Files("IHistory.h"): + BUG_COMPONENT = ("Toolkit", "Places") -with Files('*LoadContext.*'): - BUG_COMPONENT = ('Core', 'Networking') +with Files("*LoadContext.*"): + BUG_COMPONENT = ("Core", "Networking") -with Files('nsAboutRedirector.*'): - BUG_COMPONENT = ('Core', 'General') +with Files("nsAboutRedirector.*"): + BUG_COMPONENT = ("Core", "General") -with Files('nsIScrollObserver.*'): - BUG_COMPONENT = ('Core', 'Panning and Zooming') +with Files("nsIScrollObserver.*"): + BUG_COMPONENT = ("Core", "Panning and Zooming") DIRS += [ - 'timeline', + "timeline", ] XPIDL_SOURCES += [ - 'nsIContentViewer.idl', - 'nsIContentViewerEdit.idl', - 'nsIDocShell.idl', - 'nsIDocShellTreeItem.idl', - 'nsIDocShellTreeOwner.idl', - 'nsIDocumentLoaderFactory.idl', - 'nsILoadContext.idl', - 'nsILoadURIDelegate.idl', - 'nsIPrivacyTransitionObserver.idl', - 'nsIReflowObserver.idl', - 'nsIRefreshURI.idl', - 'nsITooltipListener.idl', - 'nsITooltipTextProvider.idl', - 'nsIURIFixup.idl', - 'nsIWebNavigation.idl', - 'nsIWebNavigationInfo.idl', - 'nsIWebPageDescriptor.idl', + "nsIContentViewer.idl", + "nsIContentViewerEdit.idl", + "nsIDocShell.idl", + "nsIDocShellTreeItem.idl", + "nsIDocShellTreeOwner.idl", + "nsIDocumentLoaderFactory.idl", + "nsILoadContext.idl", + "nsILoadURIDelegate.idl", + "nsIPrivacyTransitionObserver.idl", + "nsIReflowObserver.idl", + "nsIRefreshURI.idl", + "nsITooltipListener.idl", + "nsITooltipTextProvider.idl", + "nsIURIFixup.idl", + "nsIWebNavigation.idl", + "nsIWebNavigationInfo.idl", + "nsIWebPageDescriptor.idl", ] -XPIDL_MODULE = 'docshell' +XPIDL_MODULE = "docshell" EXPORTS += [ - 'nsCTooltipTextProvider.h', - 'nsDocShell.h', - 'nsDocShellLoadState.h', - 'nsDocShellLoadTypes.h', - 'nsDocShellTreeOwner.h', - 'nsIScrollObserver.h', - 'nsWebNavigationInfo.h', - 'SerializedLoadContext.h', + "nsCTooltipTextProvider.h", + "nsDocShell.h", + "nsDocShellLoadState.h", + "nsDocShellLoadTypes.h", + "nsDocShellTreeOwner.h", + "nsIScrollObserver.h", + "nsWebNavigationInfo.h", + "SerializedLoadContext.h", ] EXPORTS.mozilla += [ - 'BaseHistory.h', - 'IHistory.h', - 'LoadContext.h', + "BaseHistory.h", + "IHistory.h", + "LoadContext.h", ] EXPORTS.mozilla.dom += [ - 'BrowsingContext.h', - 'BrowsingContextGroup.h', - 'BrowsingContextWebProgress.h', - 'CanonicalBrowsingContext.h', - 'ChildProcessChannelListener.h', - 'SyncedContext.h', - 'SyncedContextInlines.h', - 'WindowContext.h', + "BrowsingContext.h", + "BrowsingContextGroup.h", + "BrowsingContextWebProgress.h", + "CanonicalBrowsingContext.h", + "ChildProcessChannelListener.h", + "SyncedContext.h", + "SyncedContextInlines.h", + "WindowContext.h", ] UNIFIED_SOURCES += [ - 'BaseHistory.cpp', - 'BrowsingContext.cpp', - 'BrowsingContextGroup.cpp', - 'BrowsingContextWebProgress.cpp', - 'CanonicalBrowsingContext.cpp', - 'ChildProcessChannelListener.cpp', - 'LoadContext.cpp', - 'nsAboutRedirector.cpp', - 'nsDocShell.cpp', - 'nsDocShellEditorData.cpp', - 'nsDocShellEnumerator.cpp', - 'nsDocShellLoadState.cpp', - 'nsDocShellTelemetryUtils.cpp', - 'nsDocShellTreeOwner.cpp', - 'nsDSURIContentListener.cpp', - 'nsPingListener.cpp', - 'nsRefreshTimer.cpp', - 'nsWebNavigationInfo.cpp', - 'SerializedLoadContext.cpp', - 'WindowContext.cpp', + "BaseHistory.cpp", + "BrowsingContext.cpp", + "BrowsingContextGroup.cpp", + "BrowsingContextWebProgress.cpp", + "CanonicalBrowsingContext.cpp", + "ChildProcessChannelListener.cpp", + "LoadContext.cpp", + "nsAboutRedirector.cpp", + "nsDocShell.cpp", + "nsDocShellEditorData.cpp", + "nsDocShellEnumerator.cpp", + "nsDocShellLoadState.cpp", + "nsDocShellTelemetryUtils.cpp", + "nsDocShellTreeOwner.cpp", + "nsDSURIContentListener.cpp", + "nsPingListener.cpp", + "nsRefreshTimer.cpp", + "nsWebNavigationInfo.cpp", + "SerializedLoadContext.cpp", + "WindowContext.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/docshell/shistory', - '/dom/base', - '/dom/bindings', - '/js/xpconnect/src', - '/layout/base', - '/layout/generic', - '/layout/style', - '/layout/xul', - '/netwerk/base', - '/netwerk/protocol/viewsource', - '/toolkit/components/browser', - '/toolkit/components/find', - '/tools/profiler', + "/docshell/shistory", + "/dom/base", + "/dom/bindings", + "/js/xpconnect/src", + "/layout/base", + "/layout/generic", + "/layout/style", + "/layout/xul", + "/netwerk/base", + "/netwerk/protocol/viewsource", + "/toolkit/components/browser", + "/toolkit/components/find", + "/tools/profiler", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -EXTRA_JS_MODULES += [ - 'URIFixup.jsm' -] +EXTRA_JS_MODULES += ["URIFixup.jsm"] diff --git a/docshell/base/timeline/moz.build b/docshell/base/timeline/moz.build index 51c5dce531b16e..9147e8263c3d82 100644 --- a/docshell/base/timeline/moz.build +++ b/docshell/base/timeline/moz.build @@ -4,44 +4,42 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("**"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") EXPORTS.mozilla += [ - 'AbstractTimelineMarker.h', - 'AutoGlobalTimelineMarker.h', - 'AutoRestyleTimelineMarker.h', - 'AutoTimelineMarker.h', - 'CompositeTimelineMarker.h', - 'ConsoleTimelineMarker.h', - 'DocLoadingTimelineMarker.h', - 'EventTimelineMarker.h', - 'JavascriptTimelineMarker.h', - 'LayerTimelineMarker.h', - 'MarkersStorage.h', - 'MessagePortTimelineMarker.h', - 'ObservedDocShell.h', - 'RestyleTimelineMarker.h', - 'TimelineConsumers.h', - 'TimelineMarker.h', - 'TimelineMarkerEnums.h', - 'TimestampTimelineMarker.h', - 'WorkerTimelineMarker.h', + "AbstractTimelineMarker.h", + "AutoGlobalTimelineMarker.h", + "AutoRestyleTimelineMarker.h", + "AutoTimelineMarker.h", + "CompositeTimelineMarker.h", + "ConsoleTimelineMarker.h", + "DocLoadingTimelineMarker.h", + "EventTimelineMarker.h", + "JavascriptTimelineMarker.h", + "LayerTimelineMarker.h", + "MarkersStorage.h", + "MessagePortTimelineMarker.h", + "ObservedDocShell.h", + "RestyleTimelineMarker.h", + "TimelineConsumers.h", + "TimelineMarker.h", + "TimelineMarkerEnums.h", + "TimestampTimelineMarker.h", + "WorkerTimelineMarker.h", ] UNIFIED_SOURCES += [ - 'AbstractTimelineMarker.cpp', - 'AutoGlobalTimelineMarker.cpp', - 'AutoRestyleTimelineMarker.cpp', - 'AutoTimelineMarker.cpp', - 'MarkersStorage.cpp', - 'ObservedDocShell.cpp', - 'TimelineConsumers.cpp', - 'TimelineMarker.cpp', + "AbstractTimelineMarker.cpp", + "AutoGlobalTimelineMarker.cpp", + "AutoRestyleTimelineMarker.cpp", + "AutoTimelineMarker.cpp", + "MarkersStorage.cpp", + "ObservedDocShell.cpp", + "TimelineConsumers.cpp", + "TimelineMarker.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -LOCAL_INCLUDES += [ - '/docshell/base' -] +LOCAL_INCLUDES += ["/docshell/base"] diff --git a/docshell/build/moz.build b/docshell/build/moz.build index b48cb24fabe441..d9fd81848e668b 100644 --- a/docshell/build/moz.build +++ b/docshell/build/moz.build @@ -5,21 +5,21 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'nsDocShellCID.h', + "nsDocShellCID.h", ] SOURCES += [ - 'nsDocShellModule.cpp', + "nsDocShellModule.cpp", ] LOCAL_INCLUDES += [ - '/docshell/shistory', + "/docshell/shistory", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/docshell/moz.build b/docshell/moz.build index 02bc598e843e12..57ae14f5177c60 100644 --- a/docshell/moz.build +++ b/docshell/moz.build @@ -4,53 +4,53 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Navigation') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Navigation") -if CONFIG['MOZ_BUILD_APP'] == 'browser': - DEFINES['MOZ_BUILD_APP_IS_BROWSER'] = True +if CONFIG["MOZ_BUILD_APP"] == "browser": + DEFINES["MOZ_BUILD_APP_IS_BROWSER"] = True DIRS += [ - 'base', - 'shistory', - 'build', - 'resources/content', + "base", + "shistory", + "build", + "resources/content", ] XPCSHELL_TESTS_MANIFESTS += [ - 'test/unit/xpcshell.ini', - 'test/unit_ipc/xpcshell.ini', + "test/unit/xpcshell.ini", + "test/unit_ipc/xpcshell.ini", ] MOCHITEST_MANIFESTS += [ - 'test/iframesandbox/mochitest.ini', - 'test/mochitest/mochitest.ini', - 'test/navigation/mochitest.ini', + "test/iframesandbox/mochitest.ini", + "test/mochitest/mochitest.ini", + "test/navigation/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'test/chrome/chrome.ini', + "test/chrome/chrome.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', - 'test/navigation/browser.ini', + "test/browser/browser.ini", + "test/navigation/browser.ini", ] TEST_HARNESS_FILES.testing.mochitest.tests.docshell.test.chrome += [ - 'test/chrome/112564_nocache.html', - 'test/chrome/112564_nocache.html^headers^', - 'test/chrome/215405_nocache.html', - 'test/chrome/215405_nocache.html^headers^', - 'test/chrome/215405_nostore.html', - 'test/chrome/215405_nostore.html^headers^', - 'test/chrome/582176_dummy.html', - 'test/chrome/582176_xml.xml', - 'test/chrome/582176_xslt.xsl', - 'test/chrome/92598_nostore.html', - 'test/chrome/92598_nostore.html^headers^', - 'test/chrome/allowContentRetargeting.sjs', - 'test/chrome/blue.png', - 'test/chrome/bug89419.sjs', - 'test/chrome/red.png', + "test/chrome/112564_nocache.html", + "test/chrome/112564_nocache.html^headers^", + "test/chrome/215405_nocache.html", + "test/chrome/215405_nocache.html^headers^", + "test/chrome/215405_nostore.html", + "test/chrome/215405_nostore.html^headers^", + "test/chrome/582176_dummy.html", + "test/chrome/582176_xml.xml", + "test/chrome/582176_xslt.xsl", + "test/chrome/92598_nostore.html", + "test/chrome/92598_nostore.html^headers^", + "test/chrome/allowContentRetargeting.sjs", + "test/chrome/blue.png", + "test/chrome/bug89419.sjs", + "test/chrome/red.png", ] diff --git a/docshell/resources/content/moz.build b/docshell/resources/content/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/docshell/resources/content/moz.build +++ b/docshell/resources/content/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/docshell/shistory/moz.build b/docshell/shistory/moz.build index eab1a1dc31934d..2c4187d0704b2a 100644 --- a/docshell/shistory/moz.build +++ b/docshell/shistory/moz.build @@ -5,41 +5,41 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsIBFCacheEntry.idl', - 'nsISHEntry.idl', - 'nsISHistory.idl', - 'nsISHistoryListener.idl', + "nsIBFCacheEntry.idl", + "nsISHEntry.idl", + "nsISHistory.idl", + "nsISHistoryListener.idl", ] -XPIDL_MODULE = 'shistory' +XPIDL_MODULE = "shistory" EXPORTS += [ - 'nsSHEntry.h', - 'nsSHEntryShared.h', - 'nsSHistory.h', + "nsSHEntry.h", + "nsSHEntryShared.h", + "nsSHistory.h", ] EXPORTS.mozilla.dom += [ - 'ChildSHistory.h', - 'SessionHistoryEntry.h', + "ChildSHistory.h", + "SessionHistoryEntry.h", ] UNIFIED_SOURCES += [ - 'ChildSHistory.cpp', - 'nsSHEntry.cpp', - 'nsSHEntryShared.cpp', - 'nsSHistory.cpp', - 'SessionHistoryEntry.cpp', + "ChildSHistory.cpp", + "nsSHEntry.cpp", + "nsSHEntryShared.cpp", + "nsSHistory.cpp", + "SessionHistoryEntry.cpp", ] LOCAL_INCLUDES += [ - '/docshell/base', - '/dom/base', + "/docshell/base", + "/dom/base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/docshell/test/moz.build b/docshell/test/moz.build index d3abe3f1464d09..7cebe0339f642d 100644 --- a/docshell/test/moz.build +++ b/docshell/test/moz.build @@ -1,137 +1,137 @@ -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Navigation') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Navigation") -with Files('browser/*_bug234628*'): - BUG_COMPONENT = ('Core', 'Internationalization') +with Files("browser/*_bug234628*"): + BUG_COMPONENT = ("Core", "Internationalization") -with Files('browser/*_bug349769*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("browser/*_bug349769*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('browser/*_bug388121*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("browser/*_bug388121*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('browser/*_bug655270*'): - BUG_COMPONENT = ('Toolkit', 'Places') +with Files("browser/*_bug655270*"): + BUG_COMPONENT = ("Toolkit", "Places") -with Files('browser/*_bug655273*'): - BUG_COMPONENT = ('Firefox', 'Menus') +with Files("browser/*_bug655273*"): + BUG_COMPONENT = ("Firefox", "Menus") -with Files('browser/*_bug852909*'): - BUG_COMPONENT = ('Firefox', 'Menus') +with Files("browser/*_bug852909*"): + BUG_COMPONENT = ("Firefox", "Menus") -with Files('browser/*bug92473*'): - BUG_COMPONENT = ('Core', 'Internationalization') +with Files("browser/*bug92473*"): + BUG_COMPONENT = ("Core", "Internationalization") -with Files('browser/*loadDisallowInherit*'): - BUG_COMPONENT = ('Firefox', 'Address Bar') +with Files("browser/*loadDisallowInherit*"): + BUG_COMPONENT = ("Firefox", "Address Bar") -with Files('browser/*tab_touch_events*'): - BUG_COMPONENT = ('Core', 'DOM: Events') +with Files("browser/*tab_touch_events*"): + BUG_COMPONENT = ("Core", "DOM: Events") -with Files('browser/*timelineMarkers*'): - BUG_COMPONENT = ('DevTools', 'Performance Tools (Profiler/Timeline)') +with Files("browser/*timelineMarkers*"): + BUG_COMPONENT = ("DevTools", "Performance Tools (Profiler/Timeline)") -with Files('browser/*ua_emulation*'): - BUG_COMPONENT = ('DevTools', 'General') +with Files("browser/*ua_emulation*"): + BUG_COMPONENT = ("DevTools", "General") -with Files('chrome/*112564*'): - BUG_COMPONENT = ('Core', 'Networking: HTTP') +with Files("chrome/*112564*"): + BUG_COMPONENT = ("Core", "Networking: HTTP") -with Files('chrome/*303267*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("chrome/*303267*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('chrome/*453650*'): - BUG_COMPONENT = ('Core', 'Layout') +with Files("chrome/*453650*"): + BUG_COMPONENT = ("Core", "Layout") -with Files('chrome/*565388*'): - BUG_COMPONENT = ('Core', 'Widget') +with Files("chrome/*565388*"): + BUG_COMPONENT = ("Core", "Widget") -with Files('chrome/*582176*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("chrome/*582176*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('chrome/*608669*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("chrome/*608669*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('chrome/*690056*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("chrome/*690056*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('chrome/*92598*'): - BUG_COMPONENT = ('Core', 'Networking: HTTP') +with Files("chrome/*92598*"): + BUG_COMPONENT = ("Core", "Networking: HTTP") -with Files('iframesandbox/**'): - BUG_COMPONENT = ('Core', 'Security') +with Files("iframesandbox/**"): + BUG_COMPONENT = ("Core", "Security") -with Files('iframesandbox/*marquee_event_handlers*'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("iframesandbox/*marquee_event_handlers*"): + BUG_COMPONENT = ("Core", "DOM: Security") -with Files('mochitest/*1045096*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*1045096*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*1151421*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*1151421*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*402210*'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("mochitest/*402210*"): + BUG_COMPONENT = ("Core", "DOM: Security") -with Files('mochitest/*509055*'): - BUG_COMPONENT = ('Firefox', 'Bookmarks & History') +with Files("mochitest/*509055*"): + BUG_COMPONENT = ("Firefox", "Bookmarks & History") -with Files('mochitest/*511449*'): - BUG_COMPONENT = ('Core', 'Widget: Cocoa') +with Files("mochitest/*511449*"): + BUG_COMPONENT = ("Core", "Widget: Cocoa") -with Files('mochitest/*551225*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*551225*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*570341*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*570341*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*580069*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*580069*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*637644*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*637644*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*640387*'): - BUG_COMPONENT = ('Core', 'DOM: Events') +with Files("mochitest/*640387*"): + BUG_COMPONENT = ("Core", "DOM: Events") -with Files('mochitest/*668513*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*668513*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*797909*'): - BUG_COMPONENT = ('Core', 'DOM: Core & HTML') +with Files("mochitest/*797909*"): + BUG_COMPONENT = ("Core", "DOM: Core & HTML") -with Files('mochitest/*forceinheritprincipal*'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("mochitest/*forceinheritprincipal*"): + BUG_COMPONENT = ("Core", "DOM: Security") -with Files('navigation/*13871.html'): - BUG_COMPONENT = ('Core', 'Security') +with Files("navigation/*13871.html"): + BUG_COMPONENT = ("Core", "Security") -with Files('navigation/*386782*'): - BUG_COMPONENT = ('Core', 'DOM: Editor') +with Files("navigation/*386782*"): + BUG_COMPONENT = ("Core", "DOM: Editor") -with Files('navigation/*430624*'): - BUG_COMPONENT = ('Core', 'DOM: Editor') +with Files("navigation/*430624*"): + BUG_COMPONENT = ("Core", "DOM: Editor") -with Files('navigation/*430723*'): - BUG_COMPONENT = ('Core', 'DOM: UI Events & Focus Handling') +with Files("navigation/*430723*"): + BUG_COMPONENT = ("Core", "DOM: UI Events & Focus Handling") -with Files('navigation/*child*'): - BUG_COMPONENT = ('Core', 'Security') +with Files("navigation/*child*"): + BUG_COMPONENT = ("Core", "Security") -with Files('navigation/*opener*'): - BUG_COMPONENT = ('Core', 'Security') +with Files("navigation/*opener*"): + BUG_COMPONENT = ("Core", "Security") -with Files('navigation/*reserved*'): - BUG_COMPONENT = ('Core', 'Security') +with Files("navigation/*reserved*"): + BUG_COMPONENT = ("Core", "Security") -with Files('navigation/*triggering*'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("navigation/*triggering*"): + BUG_COMPONENT = ("Core", "DOM: Security") -with Files('unit/*442584*'): - BUG_COMPONENT = ('Core', 'Networking: Cache') +with Files("unit/*442584*"): + BUG_COMPONENT = ("Core", "Networking: Cache") -with Files('unit/*setUsePrivateBrowsing*'): - BUG_COMPONENT = ('Firefox', 'Extension Compatibility') +with Files("unit/*setUsePrivateBrowsing*"): + BUG_COMPONENT = ("Firefox", "Extension Compatibility") diff --git a/dom/abort/moz.build b/dom/abort/moz.build index aea70cbc0c083e..b4ef9665c274d2 100644 --- a/dom/abort/moz.build +++ b/dom/abort/moz.build @@ -7,18 +7,18 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] EXPORTS.mozilla.dom += [ - 'AbortController.h', - 'AbortSignal.h', + "AbortController.h", + "AbortSignal.h", ] UNIFIED_SOURCES += [ - 'AbortController.cpp', - 'AbortSignal.cpp', + "AbortController.cpp", + "AbortSignal.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/abort/tests/moz.build b/dom/abort/tests/moz.build index 9df9dda57ecd99..af8d7033c8e5e0 100644 --- a/dom/abort/tests/moz.build +++ b/dom/abort/tests/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['mochitest.ini'] +MOCHITEST_MANIFESTS += ["mochitest.ini"] -XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["unit/xpcshell.ini"] diff --git a/dom/animation/moz.build b/dom/animation/moz.build index 78eec9f510ef88..71d9a03e56185d 100644 --- a/dom/animation/moz.build +++ b/dom/animation/moz.build @@ -7,66 +7,66 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Animation") -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] EXPORTS.mozilla.dom += [ - 'Animation.h', - 'AnimationEffect.h', - 'AnimationTimeline.h', - 'CSSAnimation.h', - 'CSSPseudoElement.h', - 'CSSTransition.h', - 'DocumentTimeline.h', - 'KeyframeEffect.h', + "Animation.h", + "AnimationEffect.h", + "AnimationTimeline.h", + "CSSAnimation.h", + "CSSPseudoElement.h", + "CSSTransition.h", + "DocumentTimeline.h", + "KeyframeEffect.h", ] EXPORTS.mozilla += [ - 'AnimationComparator.h', - 'AnimationEventDispatcher.h', - 'AnimationPerformanceWarning.h', - 'AnimationPropertySegment.h', - 'AnimationTarget.h', - 'AnimationUtils.h', - 'ComputedTiming.h', - 'ComputedTimingFunction.h', - 'EffectCompositor.h', - 'EffectSet.h', - 'Keyframe.h', - 'KeyframeEffectParams.h', - 'KeyframeUtils.h', - 'PendingAnimationTracker.h', - 'PostRestyleMode.h', - 'PseudoElementHashEntry.h', - 'TimingParams.h', + "AnimationComparator.h", + "AnimationEventDispatcher.h", + "AnimationPerformanceWarning.h", + "AnimationPropertySegment.h", + "AnimationTarget.h", + "AnimationUtils.h", + "ComputedTiming.h", + "ComputedTimingFunction.h", + "EffectCompositor.h", + "EffectSet.h", + "Keyframe.h", + "KeyframeEffectParams.h", + "KeyframeUtils.h", + "PendingAnimationTracker.h", + "PostRestyleMode.h", + "PseudoElementHashEntry.h", + "TimingParams.h", ] UNIFIED_SOURCES += [ - 'Animation.cpp', - 'AnimationEffect.cpp', - 'AnimationEventDispatcher.cpp', - 'AnimationPerformanceWarning.cpp', - 'AnimationTimeline.cpp', - 'AnimationUtils.cpp', - 'ComputedTimingFunction.cpp', - 'CSSAnimation.cpp', - 'CSSPseudoElement.cpp', - 'CSSTransition.cpp', - 'DocumentTimeline.cpp', - 'EffectCompositor.cpp', - 'EffectSet.cpp', - 'KeyframeEffect.cpp', - 'KeyframeUtils.cpp', - 'PendingAnimationTracker.cpp', - 'TimingParams.cpp', + "Animation.cpp", + "AnimationEffect.cpp", + "AnimationEventDispatcher.cpp", + "AnimationPerformanceWarning.cpp", + "AnimationTimeline.cpp", + "AnimationUtils.cpp", + "ComputedTimingFunction.cpp", + "CSSAnimation.cpp", + "CSSPseudoElement.cpp", + "CSSTransition.cpp", + "DocumentTimeline.cpp", + "EffectCompositor.cpp", + "EffectSet.cpp", + "KeyframeEffect.cpp", + "KeyframeUtils.cpp", + "PendingAnimationTracker.cpp", + "TimingParams.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/layout/base', - '/layout/style', + "/dom/base", + "/layout/base", + "/layout/style", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/audiochannel/moz.build b/dom/audiochannel/moz.build index 24e4436c77fb02..d006368e7fe317 100644 --- a/dom/audiochannel/moz.build +++ b/dom/audiochannel/moz.build @@ -8,25 +8,25 @@ with Files("**"): BUG_COMPONENT = ("Core", "Audio/Video: Playback") XPIDL_SOURCES += [ - 'nsIAudioChannelAgent.idl', + "nsIAudioChannelAgent.idl", ] -XPIDL_MODULE = 'dom_audiochannel' +XPIDL_MODULE = "dom_audiochannel" EXPORTS += [ - 'AudioChannelAgent.h', - 'AudioChannelService.h', + "AudioChannelAgent.h", + "AudioChannelService.h", ] UNIFIED_SOURCES += [ - 'AudioChannelAgent.cpp', - 'AudioChannelService.cpp', + "AudioChannelAgent.cpp", + "AudioChannelService.cpp", ] LOCAL_INCLUDES += [ - '/dom/base/', + "/dom/base/", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/base/gen-usecounters.py b/dom/base/gen-usecounters.py index 12233b5d214217..0a491319ec527e 100755 --- a/dom/base/gen-usecounters.py +++ b/dom/base/gen-usecounters.py @@ -9,47 +9,68 @@ import json import os import sys + sys.path.append(os.path.dirname(__file__)) import usecounters -AUTOGENERATED_WARNING_COMMENT = "/* THIS FILE IS AUTOGENERATED BY gen-usecounters.py - DO NOT EDIT */" +AUTOGENERATED_WARNING_COMMENT = ( + "/* THIS FILE IS AUTOGENERATED BY gen-usecounters.py - DO NOT EDIT */" +) + def generate_list(f, counters): def print_optional_macro_declare(name): - print(''' + print( + """ #ifndef %(name)s #define %(name)s(interface_, name_) // nothing #define DEFINED_%(name)s #endif -''' % { 'name': name }, file=f) +""" + % {"name": name}, + file=f, + ) def print_optional_macro_undeclare(name): - print(''' + print( + """ #ifdef DEFINED_%(name)s #undef DEFINED_%(name)s #undef %(name)s #endif -''' % { 'name': name }, file=f) +""" + % {"name": name}, + file=f, + ) print(AUTOGENERATED_WARNING_COMMENT, file=f) - print_optional_macro_declare('USE_COUNTER_DOM_METHOD') - print_optional_macro_declare('USE_COUNTER_DOM_ATTRIBUTE') - print_optional_macro_declare('USE_COUNTER_CUSTOM') + print_optional_macro_declare("USE_COUNTER_DOM_METHOD") + print_optional_macro_declare("USE_COUNTER_DOM_ATTRIBUTE") + print_optional_macro_declare("USE_COUNTER_CUSTOM") for counter in counters: - if counter['type'] == 'method': - print('USE_COUNTER_DOM_METHOD(%s, %s)' % (counter['interface_name'], counter['method_name']), file=f) - elif counter['type'] == 'attribute': - print('USE_COUNTER_DOM_ATTRIBUTE(%s, %s)' % (counter['interface_name'], counter['attribute_name']), file=f) - elif counter['type'] == 'custom': - desc = counter['desc'].replace('\\', r'\\').replace('"', r'\"') - print('USE_COUNTER_CUSTOM(%s, "%s")' % (counter['name'], desc), file=f) - - print_optional_macro_undeclare('USE_COUNTER_DOM_METHOD') - print_optional_macro_undeclare('USE_COUNTER_DOM_ATTRIBUTE') - print_optional_macro_undeclare('USE_COUNTER_CUSTOM') + if counter["type"] == "method": + print( + "USE_COUNTER_DOM_METHOD(%s, %s)" + % (counter["interface_name"], counter["method_name"]), + file=f, + ) + elif counter["type"] == "attribute": + print( + "USE_COUNTER_DOM_ATTRIBUTE(%s, %s)" + % (counter["interface_name"], counter["attribute_name"]), + file=f, + ) + elif counter["type"] == "custom": + desc = counter["desc"].replace("\\", r"\\").replace('"', r"\"") + print('USE_COUNTER_CUSTOM(%s, "%s")' % (counter["name"], desc), file=f) + + print_optional_macro_undeclare("USE_COUNTER_DOM_METHOD") + print_optional_macro_undeclare("USE_COUNTER_DOM_ATTRIBUTE") + print_optional_macro_undeclare("USE_COUNTER_CUSTOM") + def use_counter_list(output_header, conf_filename): counters = usecounters.read_conf(conf_filename) diff --git a/dom/base/moz.build b/dom/base/moz.build index 04dd4bc8a10860..b00e233967ba42 100644 --- a/dom/base/moz.build +++ b/dom/base/moz.build @@ -4,556 +4,564 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('*Selection*'): - BUG_COMPONENT = ('Core', 'DOM: Selection') +with Files("*Selection*"): + BUG_COMPONENT = ("Core", "DOM: Selection") with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") -TEST_DIRS += ['test'] +TEST_DIRS += ["test"] XPIDL_SOURCES += [ - 'mozIDOMWindow.idl', - 'nsIContentPolicy.idl', - 'nsIDocumentEncoder.idl', - 'nsIDOMRequestService.idl', - 'nsIDroppedLinkHandler.idl', - 'nsIEventSourceEventService.idl', - 'nsIImageLoadingContent.idl', - 'nsIMessageManager.idl', - 'nsIObjectLoadingContent.idl', - 'nsIScriptableContentIterator.idl', - 'nsIScriptChannel.idl', - 'nsISelectionController.idl', - 'nsISelectionDisplay.idl', - 'nsISelectionListener.idl', - 'nsISlowScriptDebug.idl', + "mozIDOMWindow.idl", + "nsIContentPolicy.idl", + "nsIDocumentEncoder.idl", + "nsIDOMRequestService.idl", + "nsIDroppedLinkHandler.idl", + "nsIEventSourceEventService.idl", + "nsIImageLoadingContent.idl", + "nsIMessageManager.idl", + "nsIObjectLoadingContent.idl", + "nsIScriptableContentIterator.idl", + "nsIScriptChannel.idl", + "nsISelectionController.idl", + "nsISelectionDisplay.idl", + "nsISelectionListener.idl", + "nsISlowScriptDebug.idl", ] -XPIDL_MODULE = 'dom' +XPIDL_MODULE = "dom" EXPORTS += [ - 'AttrArray.h', - 'AutocompleteFieldList.h', - 'Crypto.h', - 'HTMLSplitOnSpacesTokenizer.h', - 'IframeSandboxKeywordList.h', - 'mozAutoDocUpdate.h', - 'NodeUbiReporting.h', - 'nsAttrName.h', - 'nsAttrValue.h', - 'nsAttrValueInlines.h', - 'nsCaseTreatment.h', - 'nsChildContentList.h', - 'nsContentCID.h', - 'nsContentCreatorFunctions.h', - 'nsContentList.h', - 'nsContentListDeclarations.h', - 'nsContentPermissionHelper.h', - 'nsContentPolicyUtils.h', - 'nsContentSink.h', - 'nsContentTypeParser.h', - 'nsContentUtils.h', - 'nsCopySupport.h', - 'nsDeprecatedOperationList.h', - 'nsDocElementCreatedNotificationRunner.h', - 'nsDocumentWarningList.h', - 'nsDOMAttributeMap.h', - 'nsDOMCID.h', - 'nsDOMJSUtils.h', - 'nsDOMMutationObserver.h', - 'nsDOMNavigationTiming.h', - 'nsDOMString.h', - 'nsDOMTokenList.h', - 'nsFocusManager.h', - 'nsFrameLoader.h', # Because binding headers include it. - 'nsFrameLoaderOwner.h', - 'nsFrameMessageManager.h', - 'nsGlobalWindow.h', # Because binding headers include it. - 'nsGlobalWindowInner.h', # Because binding headers include it. - 'nsGlobalWindowOuter.h', # Because binding headers include it. - 'nsIAnimationObserver.h', - 'nsIContent.h', - 'nsIContentInlines.h', - 'nsIContentSerializer.h', - 'nsIDocumentObserver.h', - 'nsIGlobalObject.h', - 'nsImageLoadingContent.h', - 'nsIMutationObserver.h', - 'nsINode.h', - 'nsINodeList.h', - 'nsIScriptContext.h', - 'nsIScriptGlobalObject.h', - 'nsIScriptObjectPrincipal.h', - 'nsJSEnvironment.h', - 'nsJSUtils.h', - 'nsLineBreaker.h', - 'nsMappedAttributeElement.h', - 'nsNameSpaceManager.h', - 'nsNodeInfoManager.h', - 'nsPIDOMWindow.h', - 'nsPIDOMWindowInlines.h', - 'nsPIWindowRoot.h', - 'nsPropertyTable.h', - 'nsRange.h', - 'nsSandboxFlags.h', - 'nsStructuredCloneContainer.h', - 'nsStubAnimationObserver.h', - 'nsStubDocumentObserver.h', - 'nsStubMutationObserver.h', - 'nsStyledElement.h', - 'nsTextFragment.h', - 'nsTraversal.h', - 'nsTreeSanitizer.h', - 'nsViewportInfo.h', - 'nsWindowMemoryReporter.h', - 'nsWindowSizes.h', - 'nsWrapperCache.h', - 'nsWrapperCacheInlines.h', - 'XPathGenerator.h', + "AttrArray.h", + "AutocompleteFieldList.h", + "Crypto.h", + "HTMLSplitOnSpacesTokenizer.h", + "IframeSandboxKeywordList.h", + "mozAutoDocUpdate.h", + "NodeUbiReporting.h", + "nsAttrName.h", + "nsAttrValue.h", + "nsAttrValueInlines.h", + "nsCaseTreatment.h", + "nsChildContentList.h", + "nsContentCID.h", + "nsContentCreatorFunctions.h", + "nsContentList.h", + "nsContentListDeclarations.h", + "nsContentPermissionHelper.h", + "nsContentPolicyUtils.h", + "nsContentSink.h", + "nsContentTypeParser.h", + "nsContentUtils.h", + "nsCopySupport.h", + "nsDeprecatedOperationList.h", + "nsDocElementCreatedNotificationRunner.h", + "nsDocumentWarningList.h", + "nsDOMAttributeMap.h", + "nsDOMCID.h", + "nsDOMJSUtils.h", + "nsDOMMutationObserver.h", + "nsDOMNavigationTiming.h", + "nsDOMString.h", + "nsDOMTokenList.h", + "nsFocusManager.h", + "nsFrameLoader.h", # Because binding headers include it. + "nsFrameLoaderOwner.h", + "nsFrameMessageManager.h", + "nsGlobalWindow.h", # Because binding headers include it. + "nsGlobalWindowInner.h", # Because binding headers include it. + "nsGlobalWindowOuter.h", # Because binding headers include it. + "nsIAnimationObserver.h", + "nsIContent.h", + "nsIContentInlines.h", + "nsIContentSerializer.h", + "nsIDocumentObserver.h", + "nsIGlobalObject.h", + "nsImageLoadingContent.h", + "nsIMutationObserver.h", + "nsINode.h", + "nsINodeList.h", + "nsIScriptContext.h", + "nsIScriptGlobalObject.h", + "nsIScriptObjectPrincipal.h", + "nsJSEnvironment.h", + "nsJSUtils.h", + "nsLineBreaker.h", + "nsMappedAttributeElement.h", + "nsNameSpaceManager.h", + "nsNodeInfoManager.h", + "nsPIDOMWindow.h", + "nsPIDOMWindowInlines.h", + "nsPIWindowRoot.h", + "nsPropertyTable.h", + "nsRange.h", + "nsSandboxFlags.h", + "nsStructuredCloneContainer.h", + "nsStubAnimationObserver.h", + "nsStubDocumentObserver.h", + "nsStubMutationObserver.h", + "nsStyledElement.h", + "nsTextFragment.h", + "nsTraversal.h", + "nsTreeSanitizer.h", + "nsViewportInfo.h", + "nsWindowMemoryReporter.h", + "nsWindowSizes.h", + "nsWrapperCache.h", + "nsWrapperCacheInlines.h", + "XPathGenerator.h", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: EXPORTS += [ - 'nsDOMDataChannel.h', - 'nsDOMDataChannelDeclarations.h', + "nsDOMDataChannel.h", + "nsDOMDataChannelDeclarations.h", ] EXPORTS.mozilla += [ - 'CallState.h', - 'ContentIterator.h', - 'CORSMode.h', - 'FlushType.h', - 'FullscreenChange.h', - 'IdentifierMapEntry.h', - 'RangeBoundary.h', - 'RangeUtils.h', - 'ScriptableContentIterator.h', - 'SelectionChangeEventDispatcher.h', - 'TextInputProcessor.h', - 'UseCounter.h', + "CallState.h", + "ContentIterator.h", + "CORSMode.h", + "FlushType.h", + "FullscreenChange.h", + "IdentifierMapEntry.h", + "RangeBoundary.h", + "RangeUtils.h", + "ScriptableContentIterator.h", + "SelectionChangeEventDispatcher.h", + "TextInputProcessor.h", + "UseCounter.h", ] EXPORTS.mozilla.dom += [ - '!UseCounterList.h', - '!UseCounterWorkerList.h', - 'AbstractRange.h', - 'AncestorIterator.h', - 'AnonymousContent.h', - 'Attr.h', - 'AutoPrintEventDispatcher.h', - 'BarProps.h', - 'BindContext.h', - 'BodyConsumer.h', - 'BodyStream.h', - 'BodyUtil.h', - 'BorrowedAttrInfo.h', - 'CharacterData.h', - 'ChildIterator.h', - 'ChildProcessMessageManager.h', - 'ChromeMessageBroadcaster.h', - 'ChromeMessageSender.h', - 'ChromeNodeList.h', - 'ChromeUtils.h', - 'Comment.h', - 'ContentFrameMessageManager.h', - 'ContentProcessMessageManager.h', - 'CustomElementRegistry.h', - 'DirectionalityUtils.h', - 'DispatcherTrait.h', - 'DocGroup.h', - 'Document.h', - 'DocumentFragment.h', - 'DocumentInlines.h', - 'DocumentOrShadowRoot.h', - 'DocumentType.h', - 'DOMArena.h', - 'DOMException.h', - 'DOMImplementation.h', - 'DOMIntersectionObserver.h', - 'DOMMatrix.h', - 'DOMMozPromiseRequestHolder.h', - 'DOMParser.h', - 'DOMPoint.h', - 'DOMQuad.h', - 'DOMRect.h', - 'DOMRequest.h', - 'DOMStringList.h', - 'DOMTokenListSupportedTokens.h', - 'Element.h', - 'ElementInlines.h', - 'EventSource.h', - 'EventSourceEventService.h', - 'FilteredNodeIterator.h', - 'FormData.h', - 'FragmentOrElement.h', - 'FromParser.h', - 'GeneratedImageContent.h', - 'IdleDeadline.h', - 'IdleRequest.h', - 'IDTracker.h', - 'ImageEncoder.h', - 'ImageTracker.h', - 'IntlUtils.h', - 'Link.h', - 'LinkStyle.h', - 'Location.h', - 'LocationBase.h', - 'MaybeCrossOriginObject.h', - 'MessageBroadcaster.h', - 'MessageListenerManager.h', - 'MessageManagerGlobal.h', - 'MessageSender.h', - 'MimeType.h', - 'MozQueryInterface.h', - 'MutationObservers.h', - 'NameSpaceConstants.h', - 'Navigator.h', - 'NodeInfo.h', - 'NodeInfoInlines.h', - 'NodeIterator.h', - 'ParentProcessMessageManager.h', - 'PlacesBookmark.h', - 'PlacesBookmarkAddition.h', - 'PlacesBookmarkRemoved.h', - 'PlacesEvent.h', - 'PlacesObservers.h', - 'PlacesVisit.h', - 'PlacesWeakCallbackWrapper.h', - 'PopupBlocker.h', - 'Pose.h', - 'PostMessageEvent.h', - 'ProcessMessageManager.h', - 'ResizeObserver.h', - 'ResizeObserverController.h', - 'ResponsiveImageSelector.h', - 'SameProcessMessageQueue.h', - 'ScreenLuminance.h', - 'ScreenOrientation.h', - 'Selection.h', - 'SerializedStackHolder.h', - 'ShadowIncludingTreeIterator.h', - 'ShadowRoot.h', - 'StaticRange.h', - 'StructuredCloneBlob.h', - 'StructuredCloneHolder.h', - 'StructuredCloneTags.h', - 'StructuredCloneTester.h', - 'StyledRange.h', - 'StyleSheetList.h', - 'SubtleCrypto.h', - 'SyncMessageSender.h', - 'Text.h', - 'Timeout.h', - 'TimeoutHandler.h', - 'TimeoutManager.h', - 'TreeIterator.h', - 'TreeOrderedArray.h', - 'TreeOrderedArrayInlines.h', - 'TreeWalker.h', - 'UIDirectionManager.h', - 'UserActivation.h', - 'ViewportMetaData.h', - 'VisualViewport.h', - 'WindowFeatures.h', - 'WindowOrientationObserver.h', - 'WindowProxyHolder.h', + "!UseCounterList.h", + "!UseCounterWorkerList.h", + "AbstractRange.h", + "AncestorIterator.h", + "AnonymousContent.h", + "Attr.h", + "AutoPrintEventDispatcher.h", + "BarProps.h", + "BindContext.h", + "BodyConsumer.h", + "BodyStream.h", + "BodyUtil.h", + "BorrowedAttrInfo.h", + "CharacterData.h", + "ChildIterator.h", + "ChildProcessMessageManager.h", + "ChromeMessageBroadcaster.h", + "ChromeMessageSender.h", + "ChromeNodeList.h", + "ChromeUtils.h", + "Comment.h", + "ContentFrameMessageManager.h", + "ContentProcessMessageManager.h", + "CustomElementRegistry.h", + "DirectionalityUtils.h", + "DispatcherTrait.h", + "DocGroup.h", + "Document.h", + "DocumentFragment.h", + "DocumentInlines.h", + "DocumentOrShadowRoot.h", + "DocumentType.h", + "DOMArena.h", + "DOMException.h", + "DOMImplementation.h", + "DOMIntersectionObserver.h", + "DOMMatrix.h", + "DOMMozPromiseRequestHolder.h", + "DOMParser.h", + "DOMPoint.h", + "DOMQuad.h", + "DOMRect.h", + "DOMRequest.h", + "DOMStringList.h", + "DOMTokenListSupportedTokens.h", + "Element.h", + "ElementInlines.h", + "EventSource.h", + "EventSourceEventService.h", + "FilteredNodeIterator.h", + "FormData.h", + "FragmentOrElement.h", + "FromParser.h", + "GeneratedImageContent.h", + "IdleDeadline.h", + "IdleRequest.h", + "IDTracker.h", + "ImageEncoder.h", + "ImageTracker.h", + "IntlUtils.h", + "Link.h", + "LinkStyle.h", + "Location.h", + "LocationBase.h", + "MaybeCrossOriginObject.h", + "MessageBroadcaster.h", + "MessageListenerManager.h", + "MessageManagerGlobal.h", + "MessageSender.h", + "MimeType.h", + "MozQueryInterface.h", + "MutationObservers.h", + "NameSpaceConstants.h", + "Navigator.h", + "NodeInfo.h", + "NodeInfoInlines.h", + "NodeIterator.h", + "ParentProcessMessageManager.h", + "PlacesBookmark.h", + "PlacesBookmarkAddition.h", + "PlacesBookmarkRemoved.h", + "PlacesEvent.h", + "PlacesObservers.h", + "PlacesVisit.h", + "PlacesWeakCallbackWrapper.h", + "PopupBlocker.h", + "Pose.h", + "PostMessageEvent.h", + "ProcessMessageManager.h", + "ResizeObserver.h", + "ResizeObserverController.h", + "ResponsiveImageSelector.h", + "SameProcessMessageQueue.h", + "ScreenLuminance.h", + "ScreenOrientation.h", + "Selection.h", + "SerializedStackHolder.h", + "ShadowIncludingTreeIterator.h", + "ShadowRoot.h", + "StaticRange.h", + "StructuredCloneBlob.h", + "StructuredCloneHolder.h", + "StructuredCloneTags.h", + "StructuredCloneTester.h", + "StyledRange.h", + "StyleSheetList.h", + "SubtleCrypto.h", + "SyncMessageSender.h", + "Text.h", + "Timeout.h", + "TimeoutHandler.h", + "TimeoutManager.h", + "TreeIterator.h", + "TreeOrderedArray.h", + "TreeOrderedArrayInlines.h", + "TreeWalker.h", + "UIDirectionManager.h", + "UserActivation.h", + "ViewportMetaData.h", + "VisualViewport.h", + "WindowFeatures.h", + "WindowOrientationObserver.h", + "WindowProxyHolder.h", ] -if CONFIG['FUZZING']: +if CONFIG["FUZZING"]: EXPORTS.mozilla.dom += [ - 'FuzzingFunctions.h', + "FuzzingFunctions.h", ] UNIFIED_SOURCES += [ - 'AbstractRange.cpp', - 'AnonymousContent.cpp', - 'Attr.cpp', - 'AttrArray.cpp', - 'BarProps.cpp', - 'BindContext.cpp', - 'BodyConsumer.cpp', - 'BodyStream.cpp', - 'BodyUtil.cpp', - 'BorrowedAttrInfo.cpp', - 'CharacterData.cpp', - 'ChildIterator.cpp', - 'ChromeMessageBroadcaster.cpp', - 'ChromeMessageSender.cpp', - 'ChromeNodeList.cpp', - 'ChromeUtils.cpp', - 'Comment.cpp', - 'ContentFrameMessageManager.cpp', - 'ContentIterator.cpp', - 'ContentProcessMessageManager.cpp', - 'Crypto.cpp', - 'CustomElementRegistry.cpp', - 'DirectionalityUtils.cpp', - 'DispatcherTrait.cpp', - 'DocGroup.cpp', - 'Document.cpp', - 'DocumentFragment.cpp', - 'DocumentOrShadowRoot.cpp', - 'DocumentType.cpp', - 'DOMException.cpp', - 'DOMImplementation.cpp', - 'DOMMatrix.cpp', - 'DOMParser.cpp', - 'DOMPoint.cpp', - 'DOMQuad.cpp', - 'DOMRect.cpp', - 'DOMRequest.cpp', - 'DOMStringList.cpp', - 'Element.cpp', - 'EventSource.cpp', - 'EventSourceEventService.cpp', - 'FormData.cpp', - 'FragmentOrElement.cpp', - 'GeneratedImageContent.cpp', - 'IdleDeadline.cpp', - 'IdleRequest.cpp', - 'IDTracker.cpp', - 'ImageEncoder.cpp', - 'ImageTracker.cpp', - 'InProcessBrowserChildMessageManager.cpp', - 'IntlUtils.cpp', - 'Link.cpp', - 'LinkStyle.cpp', - 'Location.cpp', - 'LocationBase.cpp', - 'MaybeCrossOriginObject.cpp', - 'MessageBroadcaster.cpp', - 'MessageListenerManager.cpp', - 'MessageManagerGlobal.cpp', - 'MessageSender.cpp', - 'MimeType.cpp', - 'MozQueryInterface.cpp', - 'MutationObservers.cpp', - 'Navigator.cpp', - 'NodeInfo.cpp', - 'NodeIterator.cpp', - 'NodeUbiReporting.cpp', - 'nsAttrValue.cpp', - 'nsAttrValueOrString.cpp', - 'nsCCUncollectableMarker.cpp', - 'nsContentAreaDragDrop.cpp', - 'nsContentList.cpp', - 'nsContentPermissionHelper.cpp', - 'nsContentPolicy.cpp', - 'nsContentSink.cpp', - 'nsContentTypeParser.cpp', - 'nsCopySupport.cpp', - 'nsDataDocumentContentPolicy.cpp', - 'nsDocumentEncoder.cpp', - 'nsDOMAttributeMap.cpp', - 'nsDOMCaretPosition.cpp', - 'nsDOMMutationObserver.cpp', - 'nsDOMNavigationTiming.cpp', - 'nsDOMSerializer.cpp', - 'nsDOMTokenList.cpp', - 'nsFocusManager.cpp', - 'nsFrameLoader.cpp', - 'nsFrameLoaderOwner.cpp', - 'nsGlobalWindowCommands.cpp', - 'nsHistory.cpp', - 'nsHTMLContentSerializer.cpp', - 'nsIGlobalObject.cpp', - 'nsINode.cpp', - 'nsJSEnvironment.cpp', - 'nsJSUtils.cpp', - 'nsLineBreaker.cpp', - 'nsMappedAttributeElement.cpp', - 'nsMappedAttributes.cpp', - 'nsMimeTypeArray.cpp', - 'nsNameSpaceManager.cpp', - 'nsNoDataProtocolContentPolicy.cpp', - 'nsNodeInfoManager.cpp', - 'nsOpenURIInFrameParams.cpp', - 'nsPlainTextSerializer.cpp', - 'nsPropertyTable.cpp', - 'nsQueryContentEventResult.cpp', - 'nsRange.cpp', - 'nsScreen.cpp', - 'nsStructuredCloneContainer.cpp', - 'nsStubAnimationObserver.cpp', - 'nsStubDocumentObserver.cpp', - 'nsStubMutationObserver.cpp', - 'nsStyledElement.cpp', - 'nsSyncLoadService.cpp', - 'nsTextFragment.cpp', - 'nsTextNode.cpp', - 'nsTraversal.cpp', - 'nsTreeSanitizer.cpp', - 'nsViewportInfo.cpp', - 'nsWindowMemoryReporter.cpp', - 'nsWindowRoot.cpp', - 'nsWrapperCache.cpp', - 'nsXHTMLContentSerializer.cpp', - 'nsXMLContentSerializer.cpp', - 'ParentProcessMessageManager.cpp', - 'PopupBlocker.cpp', - 'Pose.cpp', - 'PostMessageEvent.cpp', - 'ProcessMessageManager.cpp', - 'RangeUtils.cpp', - 'RemoteOuterWindowProxy.cpp', - 'ResizeObserver.cpp', - 'ResizeObserverController.cpp', - 'ResponsiveImageSelector.cpp', - 'SameProcessMessageQueue.cpp', - 'ScreenLuminance.cpp', - 'ScreenOrientation.cpp', - 'ScriptableContentIterator.cpp', - 'Selection.cpp', - 'SelectionChangeEventDispatcher.cpp', - 'SerializedStackHolder.cpp', - 'ShadowRoot.cpp', - 'StaticRange.cpp', - 'StorageAccessPermissionRequest.cpp', - 'StructuredCloneBlob.cpp', - 'StructuredCloneHolder.cpp', - 'StructuredCloneTester.cpp', - 'StyledRange.cpp', - 'StyleSheetList.cpp', - 'SubtleCrypto.cpp', - 'Text.cpp', - 'TextInputProcessor.cpp', - 'ThirdPartyUtil.cpp', - 'Timeout.cpp', - 'TimeoutBudgetManager.cpp', - 'TimeoutExecutor.cpp', - 'TimeoutHandler.cpp', - 'TimeoutManager.cpp', - 'TreeWalker.cpp', - 'UIDirectionManager.cpp', - 'UserActivation.cpp', - 'ViewportMetaData.cpp', - 'VisualViewport.cpp', - 'WindowDestroyedEvent.cpp', - 'WindowFeatures.cpp', - 'WindowNamedPropertiesHandler.cpp', - 'WindowOrientationObserver.cpp', - 'XPathGenerator.cpp', + "AbstractRange.cpp", + "AnonymousContent.cpp", + "Attr.cpp", + "AttrArray.cpp", + "BarProps.cpp", + "BindContext.cpp", + "BodyConsumer.cpp", + "BodyStream.cpp", + "BodyUtil.cpp", + "BorrowedAttrInfo.cpp", + "CharacterData.cpp", + "ChildIterator.cpp", + "ChromeMessageBroadcaster.cpp", + "ChromeMessageSender.cpp", + "ChromeNodeList.cpp", + "ChromeUtils.cpp", + "Comment.cpp", + "ContentFrameMessageManager.cpp", + "ContentIterator.cpp", + "ContentProcessMessageManager.cpp", + "Crypto.cpp", + "CustomElementRegistry.cpp", + "DirectionalityUtils.cpp", + "DispatcherTrait.cpp", + "DocGroup.cpp", + "Document.cpp", + "DocumentFragment.cpp", + "DocumentOrShadowRoot.cpp", + "DocumentType.cpp", + "DOMException.cpp", + "DOMImplementation.cpp", + "DOMMatrix.cpp", + "DOMParser.cpp", + "DOMPoint.cpp", + "DOMQuad.cpp", + "DOMRect.cpp", + "DOMRequest.cpp", + "DOMStringList.cpp", + "Element.cpp", + "EventSource.cpp", + "EventSourceEventService.cpp", + "FormData.cpp", + "FragmentOrElement.cpp", + "GeneratedImageContent.cpp", + "IdleDeadline.cpp", + "IdleRequest.cpp", + "IDTracker.cpp", + "ImageEncoder.cpp", + "ImageTracker.cpp", + "InProcessBrowserChildMessageManager.cpp", + "IntlUtils.cpp", + "Link.cpp", + "LinkStyle.cpp", + "Location.cpp", + "LocationBase.cpp", + "MaybeCrossOriginObject.cpp", + "MessageBroadcaster.cpp", + "MessageListenerManager.cpp", + "MessageManagerGlobal.cpp", + "MessageSender.cpp", + "MimeType.cpp", + "MozQueryInterface.cpp", + "MutationObservers.cpp", + "Navigator.cpp", + "NodeInfo.cpp", + "NodeIterator.cpp", + "NodeUbiReporting.cpp", + "nsAttrValue.cpp", + "nsAttrValueOrString.cpp", + "nsCCUncollectableMarker.cpp", + "nsContentAreaDragDrop.cpp", + "nsContentList.cpp", + "nsContentPermissionHelper.cpp", + "nsContentPolicy.cpp", + "nsContentSink.cpp", + "nsContentTypeParser.cpp", + "nsCopySupport.cpp", + "nsDataDocumentContentPolicy.cpp", + "nsDocumentEncoder.cpp", + "nsDOMAttributeMap.cpp", + "nsDOMCaretPosition.cpp", + "nsDOMMutationObserver.cpp", + "nsDOMNavigationTiming.cpp", + "nsDOMSerializer.cpp", + "nsDOMTokenList.cpp", + "nsFocusManager.cpp", + "nsFrameLoader.cpp", + "nsFrameLoaderOwner.cpp", + "nsGlobalWindowCommands.cpp", + "nsHistory.cpp", + "nsHTMLContentSerializer.cpp", + "nsIGlobalObject.cpp", + "nsINode.cpp", + "nsJSEnvironment.cpp", + "nsJSUtils.cpp", + "nsLineBreaker.cpp", + "nsMappedAttributeElement.cpp", + "nsMappedAttributes.cpp", + "nsMimeTypeArray.cpp", + "nsNameSpaceManager.cpp", + "nsNoDataProtocolContentPolicy.cpp", + "nsNodeInfoManager.cpp", + "nsOpenURIInFrameParams.cpp", + "nsPlainTextSerializer.cpp", + "nsPropertyTable.cpp", + "nsQueryContentEventResult.cpp", + "nsRange.cpp", + "nsScreen.cpp", + "nsStructuredCloneContainer.cpp", + "nsStubAnimationObserver.cpp", + "nsStubDocumentObserver.cpp", + "nsStubMutationObserver.cpp", + "nsStyledElement.cpp", + "nsSyncLoadService.cpp", + "nsTextFragment.cpp", + "nsTextNode.cpp", + "nsTraversal.cpp", + "nsTreeSanitizer.cpp", + "nsViewportInfo.cpp", + "nsWindowMemoryReporter.cpp", + "nsWindowRoot.cpp", + "nsWrapperCache.cpp", + "nsXHTMLContentSerializer.cpp", + "nsXMLContentSerializer.cpp", + "ParentProcessMessageManager.cpp", + "PopupBlocker.cpp", + "Pose.cpp", + "PostMessageEvent.cpp", + "ProcessMessageManager.cpp", + "RangeUtils.cpp", + "RemoteOuterWindowProxy.cpp", + "ResizeObserver.cpp", + "ResizeObserverController.cpp", + "ResponsiveImageSelector.cpp", + "SameProcessMessageQueue.cpp", + "ScreenLuminance.cpp", + "ScreenOrientation.cpp", + "ScriptableContentIterator.cpp", + "Selection.cpp", + "SelectionChangeEventDispatcher.cpp", + "SerializedStackHolder.cpp", + "ShadowRoot.cpp", + "StaticRange.cpp", + "StorageAccessPermissionRequest.cpp", + "StructuredCloneBlob.cpp", + "StructuredCloneHolder.cpp", + "StructuredCloneTester.cpp", + "StyledRange.cpp", + "StyleSheetList.cpp", + "SubtleCrypto.cpp", + "Text.cpp", + "TextInputProcessor.cpp", + "ThirdPartyUtil.cpp", + "Timeout.cpp", + "TimeoutBudgetManager.cpp", + "TimeoutExecutor.cpp", + "TimeoutHandler.cpp", + "TimeoutManager.cpp", + "TreeWalker.cpp", + "UIDirectionManager.cpp", + "UserActivation.cpp", + "ViewportMetaData.cpp", + "VisualViewport.cpp", + "WindowDestroyedEvent.cpp", + "WindowFeatures.cpp", + "WindowNamedPropertiesHandler.cpp", + "WindowOrientationObserver.cpp", + "XPathGenerator.cpp", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: UNIFIED_SOURCES += [ - 'nsDOMDataChannel.cpp', + "nsDOMDataChannel.cpp", ] -if CONFIG['FUZZING']: +if CONFIG["FUZZING"]: UNIFIED_SOURCES += [ - 'FuzzingFunctions.cpp', + "FuzzingFunctions.cpp", ] -if CONFIG['MOZ_PLACES']: +if CONFIG["MOZ_PLACES"]: UNIFIED_SOURCES += [ - 'PlacesEvent.cpp', - 'PlacesObservers.cpp', - 'PlacesWeakCallbackWrapper.cpp', + "PlacesEvent.cpp", + "PlacesObservers.cpp", + "PlacesWeakCallbackWrapper.cpp", ] # these files couldn't be in UNIFIED_SOURCES for now for reasons given below: SOURCES += [ # Several conflicts with other bindings. - 'DOMIntersectionObserver.cpp', + "DOMIntersectionObserver.cpp", # Because of OS X headers. - 'nsContentUtils.cpp', + "nsContentUtils.cpp", # this file doesn't like windows.h - 'nsDOMWindowUtils.cpp', + "nsDOMWindowUtils.cpp", # Conflicts with windows.h's definition of SendMessage. - 'nsFrameMessageManager.cpp', + "nsFrameMessageManager.cpp", # These files have a #error "Never include unwrapped windows.h in this file!" - 'nsGlobalWindowInner.cpp', - 'nsGlobalWindowOuter.cpp', + "nsGlobalWindowInner.cpp", + "nsGlobalWindowOuter.cpp", # Conflicts with windows.h's definition of LoadImage. - 'nsImageLoadingContent.cpp', + "nsImageLoadingContent.cpp", # Because of OS X headers. - 'nsObjectLoadingContent.cpp', + "nsObjectLoadingContent.cpp", # nsPluginArray.cpp includes npapi.h indirectly, and that includes a lot of system headers - 'nsPluginArray.cpp', + "nsPluginArray.cpp", ] # Are we targeting x86-32 or x86-64? If so, we want to include SSE2 code for # nsTextFragment.cpp -if CONFIG['INTEL_ARCHITECTURE']: - SOURCES += ['nsTextFragmentSSE2.cpp'] - SOURCES['nsTextFragmentSSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] +if CONFIG["INTEL_ARCHITECTURE"]: + SOURCES += ["nsTextFragmentSSE2.cpp"] + SOURCES["nsTextFragmentSSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] # Are we targeting PowerPC? If so, we can enable a SIMD version for # nsTextFragment.cpp as well. -if CONFIG['CPU_ARCH'].startswith('ppc'): - SOURCES += ['nsTextFragmentVMX.cpp'] - SOURCES['nsTextFragmentVMX.cpp'].flags += CONFIG['PPC_VMX_FLAGS'] +if CONFIG["CPU_ARCH"].startswith("ppc"): + SOURCES += ["nsTextFragmentVMX.cpp"] + SOURCES["nsTextFragmentVMX.cpp"].flags += CONFIG["PPC_VMX_FLAGS"] EXTRA_JS_MODULES += [ - 'ContentAreaDropListener.jsm', - 'DOMRequestHelper.jsm', - 'IndexedDBHelper.jsm', - 'LocationHelper.jsm', - 'ProcessSelector.jsm', - 'SlowScriptDebug.jsm', + "ContentAreaDropListener.jsm", + "DOMRequestHelper.jsm", + "IndexedDBHelper.jsm", + "LocationHelper.jsm", + "ProcessSelector.jsm", + "SlowScriptDebug.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '../battery', - '../events', - '../media', - '../network', - '/caps', - '/docshell/base', - '/dom/base', - '/dom/file', - '/dom/geolocation', - '/dom/html', - '/dom/ipc', - '/dom/storage', - '/dom/svg', - '/dom/u2f', - '/dom/xml', - '/dom/xslt/xpath', - '/dom/xul', - '/gfx/2d', - '/image', - '/js/xpconnect/loader', - '/js/xpconnect/src', - '/js/xpconnect/wrappers', - '/layout/base', - '/layout/forms', - '/layout/generic', - '/layout/style', - '/layout/xul', - '/netwerk/base', - '/netwerk/url-classifier', - '/security/manager/ssl', - '/widget', - '/xpcom/ds', + "../battery", + "../events", + "../media", + "../network", + "/caps", + "/docshell/base", + "/dom/base", + "/dom/file", + "/dom/geolocation", + "/dom/html", + "/dom/ipc", + "/dom/storage", + "/dom/svg", + "/dom/u2f", + "/dom/xml", + "/dom/xslt/xpath", + "/dom/xul", + "/gfx/2d", + "/image", + "/js/xpconnect/loader", + "/js/xpconnect/src", + "/js/xpconnect/wrappers", + "/layout/base", + "/layout/forms", + "/layout/generic", + "/layout/style", + "/layout/xul", + "/netwerk/base", + "/netwerk/url-classifier", + "/security/manager/ssl", + "/widget", + "/xpcom/ds", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: LOCAL_INCLUDES += [ - '/netwerk/sctp/datachannel', + "/netwerk/sctp/datachannel", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['MOZ_BUILD_APP'] in ['browser', 'mobile/android', 'xulrunner']: - DEFINES['HAVE_SIDEBAR'] = True +if CONFIG["MOZ_BUILD_APP"] in ["browser", "mobile/android", "xulrunner"]: + DEFINES["HAVE_SIDEBAR"] = True -if CONFIG['MOZ_X11']: - CXXFLAGS += CONFIG['TK_CFLAGS'] +if CONFIG["MOZ_X11"]: + CXXFLAGS += CONFIG["TK_CFLAGS"] -GeneratedFile('UseCounterList.h', script='gen-usecounters.py', - entry_point='use_counter_list', inputs=['UseCounters.conf']) +GeneratedFile( + "UseCounterList.h", + script="gen-usecounters.py", + entry_point="use_counter_list", + inputs=["UseCounters.conf"], +) -GeneratedFile('UseCounterWorkerList.h', script='gen-usecounters.py', - entry_point='use_counter_list', inputs=['UseCountersWorker.conf']) +GeneratedFile( + "UseCounterWorkerList.h", + script="gen-usecounters.py", + entry_point="use_counter_list", + inputs=["UseCountersWorker.conf"], +) -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/base/test/gtest/moz.build b/dom/base/test/gtest/moz.build index aa51cbf9ddcfa7..bb75fb509d11d9 100644 --- a/dom/base/test/gtest/moz.build +++ b/dom/base/test/gtest/moz.build @@ -5,17 +5,15 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestContentUtils.cpp', - 'TestMimeType.cpp', - 'TestParser.cpp', - 'TestPlainTextSerializer.cpp', - 'TestXPathGenerator.cpp', + "TestContentUtils.cpp", + "TestMimeType.cpp", + "TestParser.cpp", + "TestPlainTextSerializer.cpp", + "TestXPathGenerator.cpp", ] -LOCAL_INCLUDES += [ - '/dom/base' -] +LOCAL_INCLUDES += ["/dom/base"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/base/test/jsmodules/moz.build b/dom/base/test/jsmodules/moz.build index 123ba6c484c29f..1a7d5281ea3f09 100644 --- a/dom/base/test/jsmodules/moz.build +++ b/dom/base/test/jsmodules/moz.build @@ -4,6 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_CHROME_MANIFESTS += [ - 'chrome.ini' -] +MOCHITEST_CHROME_MANIFESTS += ["chrome.ini"] diff --git a/dom/base/test/moz.build b/dom/base/test/moz.build index 47c5148405c4bd..03daae63bc25b9 100644 --- a/dom/base/test/moz.build +++ b/dom/base/test/moz.build @@ -5,36 +5,36 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPCSHELL_TESTS_MANIFESTS += [ - 'unit/xpcshell.ini', - 'unit_ipc/xpcshell.ini', + "unit/xpcshell.ini", + "unit_ipc/xpcshell.ini", ] MOCHITEST_MANIFESTS += [ - 'mochitest.ini', + "mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'chrome.ini', - 'chrome/chrome.ini', + "chrome.ini", + "chrome/chrome.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'browser.ini', + "browser.ini", ] TEST_DIRS += [ - 'gtest', - 'jsmodules', - 'useractivation', + "gtest", + "jsmodules", + "useractivation", ] TEST_HARNESS_FILES.testing.mochitest.tests.dom.base.test.chrome += [ - 'chrome/bug421622-referer.sjs', - 'chrome/bug884693.sjs', - 'chrome/nochrome_bug1346936.html', - 'chrome/nochrome_bug1346936.js', - 'chrome/nochrome_bug1346936.js^headers^', - 'chrome/nochrome_bug765993.html', - 'chrome/nochrome_bug765993.js', - 'chrome/nochrome_bug765993.js^headers^', + "chrome/bug421622-referer.sjs", + "chrome/bug884693.sjs", + "chrome/nochrome_bug1346936.html", + "chrome/nochrome_bug1346936.js", + "chrome/nochrome_bug1346936.js^headers^", + "chrome/nochrome_bug765993.html", + "chrome/nochrome_bug765993.js", + "chrome/nochrome_bug765993.js^headers^", ] diff --git a/dom/base/test/useractivation/moz.build b/dom/base/test/useractivation/moz.build index a08441f8567d14..60b508c7748ad2 100644 --- a/dom/base/test/useractivation/moz.build +++ b/dom/base/test/useractivation/moz.build @@ -5,5 +5,5 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. MOCHITEST_MANIFESTS += [ - 'mochitest.ini', + "mochitest.ini", ] diff --git a/dom/base/usecounters.py b/dom/base/usecounters.py index adf0ed6c0f5f99..07e63278d4a5b6 100644 --- a/dom/base/usecounters.py +++ b/dom/base/usecounters.py @@ -8,70 +8,85 @@ from six import StringIO import sys + def read_conf(conf_filename): # Can't read/write from a single StringIO, so make a new one for reading. - stream = open(conf_filename, 'rU') + stream = open(conf_filename, "rU") def parse_counters(stream): for line_num, line in enumerate(stream): - line = line.rstrip('\n') - if not line or line.startswith('//'): + line = line.rstrip("\n") + if not line or line.startswith("//"): # empty line or comment continue - m = re.match(r'method ([A-Za-z0-9]+)\.([A-Za-z0-9]+)$', line) + m = re.match(r"method ([A-Za-z0-9]+)\.([A-Za-z0-9]+)$", line) if m: interface_name, method_name = m.groups() - yield { 'type': 'method', - 'interface_name': interface_name, - 'method_name': method_name } + yield { + "type": "method", + "interface_name": interface_name, + "method_name": method_name, + } continue - m = re.match(r'attribute ([A-Za-z0-9]+)\.([A-Za-z0-9]+)$', line) + m = re.match(r"attribute ([A-Za-z0-9]+)\.([A-Za-z0-9]+)$", line) if m: interface_name, attribute_name = m.groups() - yield { 'type': 'attribute', - 'interface_name': interface_name, - 'attribute_name': attribute_name } + yield { + "type": "attribute", + "interface_name": interface_name, + "attribute_name": attribute_name, + } continue - m = re.match(r'custom ([A-Za-z0-9_]+) (.*)$', line) + m = re.match(r"custom ([A-Za-z0-9_]+) (.*)$", line) if m: name, desc = m.groups() - yield { 'type': 'custom', - 'name': name, - 'desc': desc } + yield {"type": "custom", "name": name, "desc": desc} continue - raise ValueError('error parsing %s at line %d' % (conf_filename, line_num + 1)) + raise ValueError( + "error parsing %s at line %d" % (conf_filename, line_num + 1) + ) return parse_counters(stream) + def generate_histograms(filename, is_for_worker=False): # The mapping for use counters to telemetry histograms depends on the # ordering of items in the dictionary. # The ordering of the ending for workers depends on the WorkerType defined # in WorkerPrivate.h. - endings = ["DEDICATED_WORKER", "SHARED_WORKER", "SERVICE_WORKER"] if is_for_worker else ["DOCUMENT", "PAGE"] + endings = ( + ["DEDICATED_WORKER", "SHARED_WORKER", "SERVICE_WORKER"] + if is_for_worker + else ["DOCUMENT", "PAGE"] + ) items = collections.OrderedDict() for counter in read_conf(filename): + def append_counter(name, desc): - items[name] = { 'expires_in_version': 'never', - 'kind' : 'boolean', - 'description': desc } + items[name] = { + "expires_in_version": "never", + "kind": "boolean", + "description": desc, + } def append_counters(name, desc): for ending in endings: - append_counter('USE_COUNTER2_%s_%s' % (name, ending), - 'Whether a %s %s' % (ending.replace('_', ' ').lower(), desc)) + append_counter( + "USE_COUNTER2_%s_%s" % (name, ending), + "Whether a %s %s" % (ending.replace("_", " ").lower(), desc), + ) - if counter['type'] == 'method': - method = '%s.%s' % (counter['interface_name'], counter['method_name']) - append_counters(method.replace('.', '_').upper(), 'called %s' % method) - elif counter['type'] == 'attribute': - attr = '%s.%s' % (counter['interface_name'], counter['attribute_name']) - counter_name = attr.replace('.', '_').upper() - append_counters('%s_getter' % counter_name, 'got %s' % attr) - append_counters('%s_setter' % counter_name, 'set %s' % attr) - elif counter['type'] == 'custom': - append_counters(counter['name'].upper(), counter['desc']) + if counter["type"] == "method": + method = "%s.%s" % (counter["interface_name"], counter["method_name"]) + append_counters(method.replace(".", "_").upper(), "called %s" % method) + elif counter["type"] == "attribute": + attr = "%s.%s" % (counter["interface_name"], counter["attribute_name"]) + counter_name = attr.replace(".", "_").upper() + append_counters("%s_getter" % counter_name, "got %s" % attr) + append_counters("%s_setter" % counter_name, "set %s" % attr) + elif counter["type"] == "custom": + append_counters(counter["name"].upper(), counter["desc"]) return items diff --git a/dom/battery/moz.build b/dom/battery/moz.build index 9b0f067835bca3..4ef4e984873760 100644 --- a/dom/battery/moz.build +++ b/dom/battery/moz.build @@ -8,16 +8,16 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Device Interfaces") EXPORTS.mozilla.dom.battery += [ - 'Constants.h', + "Constants.h", ] SOURCES += [ - 'BatteryManager.cpp', + "BatteryManager.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] diff --git a/dom/bindings/Codegen.py b/dom/bindings/Codegen.py index 7f7692eb3f1442..a0c3442e64be78 100644 --- a/dom/bindings/Codegen.py +++ b/dom/bindings/Codegen.py @@ -14,39 +14,66 @@ from perfecthash import PerfectHash import six -from WebIDL import BuiltinTypes, IDLBuiltinType, IDLDefaultDictionaryValue, IDLNullValue, IDLSequenceType, IDLType, IDLAttribute, IDLInterfaceMember, IDLUndefinedValue, IDLEmptySequenceValue, IDLDictionary -from Configuration import NoSuchDescriptorError, getTypesFromDescriptor, getTypesFromDictionary, getTypesFromCallback, getAllTypes, Descriptor, MemberIsUnforgeable, iteratorNativeType - -AUTOGENERATED_WARNING_COMMENT = \ +from WebIDL import ( + BuiltinTypes, + IDLBuiltinType, + IDLDefaultDictionaryValue, + IDLNullValue, + IDLSequenceType, + IDLType, + IDLAttribute, + IDLInterfaceMember, + IDLUndefinedValue, + IDLEmptySequenceValue, + IDLDictionary, +) +from Configuration import ( + NoSuchDescriptorError, + getTypesFromDescriptor, + getTypesFromDictionary, + getTypesFromCallback, + getAllTypes, + Descriptor, + MemberIsUnforgeable, + iteratorNativeType, +) + +AUTOGENERATED_WARNING_COMMENT = ( "/* THIS FILE IS AUTOGENERATED BY Codegen.py - DO NOT EDIT */\n\n" -AUTOGENERATED_WITH_SOURCE_WARNING_COMMENT = \ +) +AUTOGENERATED_WITH_SOURCE_WARNING_COMMENT = ( "/* THIS FILE IS AUTOGENERATED FROM %s BY Codegen.py - DO NOT EDIT */\n\n" -ADDPROPERTY_HOOK_NAME = '_addProperty' -GETWRAPPERCACHE_HOOK_NAME = '_getWrapperCache' -FINALIZE_HOOK_NAME = '_finalize' -OBJECT_MOVED_HOOK_NAME = '_objectMoved' -CONSTRUCT_HOOK_NAME = '_constructor' -LEGACYCALLER_HOOK_NAME = '_legacycaller' -RESOLVE_HOOK_NAME = '_resolve' -MAY_RESOLVE_HOOK_NAME = '_mayResolve' -NEW_ENUMERATE_HOOK_NAME = '_newEnumerate' -ENUM_ENTRY_VARIABLE_NAME = 'strings' +) +ADDPROPERTY_HOOK_NAME = "_addProperty" +GETWRAPPERCACHE_HOOK_NAME = "_getWrapperCache" +FINALIZE_HOOK_NAME = "_finalize" +OBJECT_MOVED_HOOK_NAME = "_objectMoved" +CONSTRUCT_HOOK_NAME = "_constructor" +LEGACYCALLER_HOOK_NAME = "_legacycaller" +RESOLVE_HOOK_NAME = "_resolve" +MAY_RESOLVE_HOOK_NAME = "_mayResolve" +NEW_ENUMERATE_HOOK_NAME = "_newEnumerate" +ENUM_ENTRY_VARIABLE_NAME = "strings" INSTANCE_RESERVED_SLOTS = 1 # This size is arbitrary. It is a power of 2 to make using it as a modulo # operand cheap, and is usually around 1/3-1/5th of the set size (sometimes # smaller for very large sets). -GLOBAL_NAMES_PHF_SIZE=256 +GLOBAL_NAMES_PHF_SIZE = 256 def memberReservedSlot(member, descriptor): - return ("(DOM_INSTANCE_RESERVED_SLOTS + %d)" % - member.slotIndices[descriptor.interface.identifier.name]) + return ( + "(DOM_INSTANCE_RESERVED_SLOTS + %d)" + % member.slotIndices[descriptor.interface.identifier.name] + ) def memberXrayExpandoReservedSlot(member, descriptor): - return ("(xpc::JSSLOT_EXPANDO_COUNT + %d)" % - member.slotIndices[descriptor.interface.identifier.name]) + return ( + "(xpc::JSSLOT_EXPANDO_COUNT + %d)" + % member.slotIndices[descriptor.interface.identifier.name] + ) def mayUseXrayExpandoSlots(descriptor, attr): @@ -71,15 +98,21 @@ def toBindingNamespace(arg): def isTypeCopyConstructible(type): # Nullable and sequence stuff doesn't affect copy-constructibility type = type.unroll() - return (type.isPrimitive() or type.isString() or type.isEnum() or - (type.isUnion() and - CGUnionStruct.isUnionCopyConstructible(type)) or - (type.isDictionary() and - CGDictionary.isDictionaryCopyConstructible(type.inner)) or - # Interface types are only copy-constructible if they're Gecko - # interfaces. SpiderMonkey interfaces are not copy-constructible - # because of rooting issues. - (type.isInterface() and type.isGeckoInterface())) + return ( + type.isPrimitive() + or type.isString() + or type.isEnum() + or (type.isUnion() and CGUnionStruct.isUnionCopyConstructible(type)) + or ( + type.isDictionary() + and CGDictionary.isDictionaryCopyConstructible(type.inner) + ) + or + # Interface types are only copy-constructible if they're Gecko + # interfaces. SpiderMonkey interfaces are not copy-constructible + # because of rooting issues. + (type.isInterface() and type.isGeckoInterface()) + ) class CycleCollectionUnsupported(TypeError): @@ -89,12 +122,14 @@ def __init__(self, message): def idlTypeNeedsCycleCollection(type): type = type.unroll() # Takes care of sequences and nullables - if ((type.isPrimitive() and type.tag() in builtinNames) or - type.isEnum() or - type.isString() or - type.isAny() or - type.isObject() or - type.isSpiderMonkeyInterface()): + if ( + (type.isPrimitive() and type.tag() in builtinNames) + or type.isEnum() + or type.isString() + or type.isAny() + or type.isObject() + or type.isSpiderMonkeyInterface() + ): return False elif type.isCallback() or type.isPromise() or type.isGeckoInterface(): return True @@ -102,17 +137,19 @@ def idlTypeNeedsCycleCollection(type): return any(idlTypeNeedsCycleCollection(t) for t in type.flatMemberTypes) elif type.isRecord(): if idlTypeNeedsCycleCollection(type.inner): - raise CycleCollectionUnsupported("Cycle collection for type %s is not supported" % - type) + raise CycleCollectionUnsupported( + "Cycle collection for type %s is not supported" % type + ) return False elif type.isDictionary(): return CGDictionary.dictionaryNeedsCycleCollection(type.inner) else: - raise CycleCollectionUnsupported("Don't know whether to cycle-collect type %s" % type) + raise CycleCollectionUnsupported( + "Don't know whether to cycle-collect type %s" % type + ) -def idlTypeNeedsCallContext(type, descriptor=None, - allowTreatNonCallableAsNull=False): +def idlTypeNeedsCallContext(type, descriptor=None, allowTreatNonCallableAsNull=False): """ Returns whether the given type needs error reporting via a BindingCallContext for JS-to-C++ conversions. This will happen when the @@ -136,8 +173,11 @@ def idlTypeNeedsCallContext(type, descriptor=None, if type.nullable(): # treatNonObjectAsNull() and treatNonCallableAsNull() are # only sane things to test on nullable types, so do that now. - if (allowTreatNonCallableAsNull and type.isCallback() and - (type.treatNonObjectAsNull() or type.treatNonCallableAsNull())): + if ( + allowTreatNonCallableAsNull + and type.isCallback() + and (type.treatNonObjectAsNull() or type.treatNonCallableAsNull()) + ): # This can't throw. so never needs a method description. return False type = type.inner @@ -168,8 +208,13 @@ def idlTypeNeedsCallContext(type, descriptor=None, # JS-to-Promise conversion won't cause us to throw any # specific exceptions, so does not need a method description. return False - if (type.isObject() or type.isInterface() or type.isCallback() or - type.isDictionary() or type.isRecord()): + if ( + type.isObject() + or type.isInterface() + or type.isCallback() + or type.isDictionary() + or type.isRecord() + ): # These can all throw if a primitive is passed in, at the very least. # There are some rare cases when we know we have an object, but those # are not worth the complexity of optimizing for. @@ -185,19 +230,21 @@ def idlTypeNeedsCallContext(type, descriptor=None, # CGHeaders trying to decide whether to include the method description # header. return False - raise TypeError("Don't know whether type '%s' needs a method description" % - type) + raise TypeError("Don't know whether type '%s' needs a method description" % type) # TryPreserveWrapper uses the addProperty hook to preserve the wrapper of # non-nsISupports cycle collected objects, so if wantsAddProperty is changed # to not cover that case then TryPreserveWrapper will need to be changed. def wantsAddProperty(desc): - return (desc.concrete and desc.wrapperCache and not desc.isGlobal()) + return desc.concrete and desc.wrapperCache and not desc.isGlobal() + def wantsGetWrapperCache(desc): - return desc.concrete and desc.wrapperCache and not desc.isGlobal() \ - and not desc.proxy + return ( + desc.concrete and desc.wrapperCache and not desc.isGlobal() and not desc.proxy + ) + # We'll want to insert the indent at the beginnings of lines, but we # don't want to indent empty lines. So only indent lines that have a @@ -233,6 +280,7 @@ def wrapper(arg): if retval is None: retval = cache[arg] = fn(arg) return retval + return wrapper @@ -242,7 +290,7 @@ def dedent(s): Remove all leading whitespace from s, and remove a blank line at the beginning. """ - if s.startswith('\n'): + if s.startswith("\n"): s = s[1:] return textwrap.dedent(s) @@ -284,9 +332,11 @@ def replace(match): depth = len(indentation) # Check that $*{xyz} appears by itself on a line. - prev = match.string[:match.start()] + prev = match.string[: match.start()] if (prev and not prev.endswith("\n")) or nl is None: - raise ValueError("Invalid fill() template: $*{%s} must appear by itself on a line" % name) + raise ValueError( + "Invalid fill() template: $*{%s} must appear by itself on a line" % name + ) # Now replace this whole line of template with the indented equivalent. modified_name = name + "_" + str(depth) @@ -332,16 +382,19 @@ def fill(template, **args): # Now apply argModList to args for (name, modified_name, depth) in argModList: if not (args[name] == "" or args[name].endswith("\n")): - raise ValueError("Argument %s with value %r is missing a newline" % (name, args[name])) + raise ValueError( + "Argument %s with value %r is missing a newline" % (name, args[name]) + ) args[modified_name] = indent(args[name], depth) return t.substitute(args) -class CGThing(): +class CGThing: """ Abstract base class for things that spit out code. """ + def __init__(self): pass # Nothing for now @@ -377,6 +430,7 @@ class CGStringTable(CGThing): The uint16_t indices are smaller than the pointer equivalents, and the string table requires no runtime relocations. """ + def __init__(self, accessorName, strings, static=False): CGThing.__init__(self) self.accessorName = accessorName @@ -409,13 +463,15 @@ def define(self): name=self.accessorName, table=table, indices=", ".join("%d" % index for index in indices), - currentIndex=currentIndex) + currentIndex=currentIndex, + ) class CGNativePropertyHooks(CGThing): """ Generate a NativePropertyHooks for a given descriptor """ + def __init__(self, descriptor, properties): CGThing.__init__(self) self.descriptor = descriptor @@ -424,7 +480,8 @@ def __init__(self, descriptor, properties): def declare(self): if not self.descriptor.wantsXrays: return "" - return dedent(""" + return dedent( + """ // We declare this as an array so that retrieving a pointer to this // binding's property hooks only requires compile/link-time resolvable // address arithmetic. Declaring it as a pointer instead would require @@ -433,15 +490,18 @@ def declare(self): // would require a run-time load for proper initialization, which would // then induce static constructors. Lots of static constructors. extern const NativePropertyHooks sNativePropertyHooks[]; - """) + """ + ) def define(self): if not self.descriptor.wantsXrays: return "" deleteNamedProperty = "nullptr" - if (self.descriptor.concrete and - self.descriptor.proxy and - not self.descriptor.isMaybeCrossOriginObject()): + if ( + self.descriptor.concrete + and self.descriptor.proxy + and not self.descriptor.isMaybeCrossOriginObject() + ): resolveOwnProperty = "ResolveOwnProperty" enumerateOwnProperties = "EnumerateOwnProperties" if self.descriptor.needsXrayNamedDeleterHook(): @@ -471,8 +531,11 @@ def define(self): else: prototypeID += "_ID_Count" parentProtoName = self.descriptor.parentPrototypeName - parentHooks = (toBindingNamespace(parentProtoName) + "::sNativePropertyHooks" - if parentProtoName else 'nullptr') + parentHooks = ( + toBindingNamespace(parentProtoName) + "::sNativePropertyHooks" + if parentProtoName + else "nullptr" + ) if self.descriptor.wantsXrayExpandoClass: expandoClass = "&sXrayExpandoObjectClass" @@ -500,20 +563,28 @@ def define(self): prototypeID=prototypeID, constructorID=constructorID, parentHooks=parentHooks, - expandoClass=expandoClass) + expandoClass=expandoClass, + ) def NativePropertyHooks(descriptor): - return "&sEmptyNativePropertyHooks" if not descriptor.wantsXrays else "sNativePropertyHooks" + return ( + "&sEmptyNativePropertyHooks" + if not descriptor.wantsXrays + else "sNativePropertyHooks" + ) def DOMClass(descriptor): - protoList = ['prototypes::id::' + proto for proto in descriptor.prototypeNameChain] + protoList = ["prototypes::id::" + proto for proto in descriptor.prototypeNameChain] # Pad out the list to the right length with _ID_Count so we # guarantee that all the lists are the same length. _ID_Count # is never the ID of any prototype, so it's safe to use as # padding. - protoList.extend(['prototypes::id::_ID_Count'] * (descriptor.config.maxProtoChainLength - len(protoList))) + protoList.extend( + ["prototypes::id::_ID_Count"] + * (descriptor.config.maxProtoChainLength - len(protoList)) + ) if descriptor.interface.isSerializable(): serializer = "Serialize" @@ -536,11 +607,12 @@ def DOMClass(descriptor): ${serializer}, ${wrapperCacheGetter} """, - protoChain=', '.join(protoList), + protoChain=", ".join(protoList), nativeType=descriptor.nativeType, hooks=NativePropertyHooks(descriptor), serializer=serializer, - wrapperCacheGetter=wrapperCacheGetter) + wrapperCacheGetter=wrapperCacheGetter, + ) def InstanceReservedSlots(descriptor): @@ -548,8 +620,10 @@ def InstanceReservedSlots(descriptor): if descriptor.isMaybeCrossOriginObject(): # We need a slot for the cross-origin holder too. if descriptor.interface.hasChildInterfaces(): - raise TypeError("We don't support non-leaf cross-origin interfaces " - "like %s" % descriptor.interface.identifier.name) + raise TypeError( + "We don't support non-leaf cross-origin interfaces " + "like %s" % descriptor.interface.identifier.name + ) slots += 1 return slots @@ -558,6 +632,7 @@ class CGDOMJSClass(CGThing): """ Generate a DOMJSClass for a given descriptor """ + def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor @@ -566,22 +641,32 @@ def declare(self): return "" def define(self): - callHook = LEGACYCALLER_HOOK_NAME if self.descriptor.operations["LegacyCaller"] else 'nullptr' - objectMovedHook = OBJECT_MOVED_HOOK_NAME if self.descriptor.wrapperCache else 'nullptr' + callHook = ( + LEGACYCALLER_HOOK_NAME + if self.descriptor.operations["LegacyCaller"] + else "nullptr" + ) + objectMovedHook = ( + OBJECT_MOVED_HOOK_NAME if self.descriptor.wrapperCache else "nullptr" + ) slotCount = InstanceReservedSlots(self.descriptor) classFlags = "JSCLASS_IS_DOMJSCLASS | JSCLASS_FOREGROUND_FINALIZE | " if self.descriptor.isGlobal(): - classFlags += "JSCLASS_DOM_GLOBAL | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(DOM_GLOBAL_SLOTS)" + classFlags += ( + "JSCLASS_DOM_GLOBAL | JSCLASS_GLOBAL_FLAGS_WITH_SLOTS(DOM_GLOBAL_SLOTS)" + ) traceHook = "JS_GlobalObjectTraceHook" reservedSlots = "JSCLASS_GLOBAL_APPLICATION_SLOTS" else: classFlags += "JSCLASS_HAS_RESERVED_SLOTS(%d)" % slotCount - traceHook = 'nullptr' + traceHook = "nullptr" reservedSlots = slotCount if self.descriptor.interface.hasProbablyShortLivingWrapper(): if not self.descriptor.wrapperCache: - raise TypeError("Need a wrapper cache to support nursery " - "allocation of DOM objects") + raise TypeError( + "Need a wrapper cache to support nursery " + "allocation of DOM objects" + ) classFlags += " | JSCLASS_SKIP_NURSERY_FINALIZE" if self.descriptor.interface.getExtendedAttribute("NeedResolve"): @@ -634,7 +719,9 @@ def define(self): """, name=self.descriptor.interface.getClassName(), flags=classFlags, - addProperty=ADDPROPERTY_HOOK_NAME if wantsAddProperty(self.descriptor) else 'nullptr', + addProperty=ADDPROPERTY_HOOK_NAME + if wantsAddProperty(self.descriptor) + else "nullptr", newEnumerate=newEnumerateHook, resolve=resolveHook, mayResolve=mayResolveHook, @@ -645,13 +732,15 @@ def define(self): descriptor=DOMClass(self.descriptor), instanceReservedSlots=INSTANCE_RESERVED_SLOTS, reservedSlots=reservedSlots, - slotCount=slotCount) + slotCount=slotCount, + ) class CGDOMProxyJSClass(CGThing): """ Generate a DOMJSClass for a given proxy descriptor """ + def __init__(self, descriptor): CGThing.__init__(self) self.descriptor = descriptor @@ -662,8 +751,7 @@ def declare(self): def define(self): slotCount = InstanceReservedSlots(self.descriptor) # We need one reserved slot (DOM_OBJECT_SLOT). - flags = ["JSCLASS_IS_DOMJSCLASS", - "JSCLASS_HAS_RESERVED_SLOTS(%d)" % slotCount] + flags = ["JSCLASS_IS_DOMJSCLASS", "JSCLASS_HAS_RESERVED_SLOTS(%d)" % slotCount] # We don't use an IDL annotation for JSCLASS_EMULATES_UNDEFINED because # we don't want people ever adding that to any interface other than # HTMLAllCollection. So just hardcode it here. @@ -679,7 +767,8 @@ def define(self): """, name=self.descriptor.interface.identifier.name, flags=" | ".join(flags), - descriptor=DOMClass(self.descriptor)) + descriptor=DOMClass(self.descriptor), + ) class CGXrayExpandoJSClass(CGThing): @@ -688,12 +777,13 @@ class CGXrayExpandoJSClass(CGThing): needed if we have members in slots (for [Cached] or [StoreInSlot] stuff). """ + def __init__(self, descriptor): assert descriptor.interface.totalMembersInSlots != 0 assert descriptor.wantsXrays assert descriptor.wantsXrayExpandoClass CGThing.__init__(self) - self.descriptor = descriptor; + self.descriptor = descriptor def declare(self): return "" @@ -708,7 +798,8 @@ def define(self): // way for now. DEFINE_XRAY_EXPANDO_CLASS(static, sXrayExpandoObjectClass, ${memberSlots}); """, - memberSlots=self.descriptor.interface.totalMembersInSlots) + memberSlots=self.descriptor.interface.totalMembersInSlots, + ) def PrototypeIDAndDepth(descriptor): @@ -768,11 +859,16 @@ def define(self): slotCount = "DOM_INTERFACE_PROTO_SLOTS_BASE" # Globals handle unforgeables directly in Wrap() instead of # via a holder. - if (self.descriptor.hasUnforgeableMembers and - not self.descriptor.isGlobal()): - slotCount += " + 1 /* slot for the JSObject holding the unforgeable properties */" + if self.descriptor.hasUnforgeableMembers and not self.descriptor.isGlobal(): + slotCount += ( + " + 1 /* slot for the JSObject holding the unforgeable properties */" + ) (protoGetter, _) = InterfacePrototypeObjectProtoGetter(self.descriptor) - type = "eGlobalInterfacePrototype" if self.descriptor.isGlobal() else "eInterfacePrototype" + type = ( + "eGlobalInterfacePrototype" + if self.descriptor.isGlobal() + else "eInterfacePrototype" + ) return fill( """ static const DOMIfaceAndProtoJSClass sPrototypeClass = { @@ -799,7 +895,8 @@ def define(self): hooks=NativePropertyHooks(self.descriptor), prototypeID=prototypeID, depth=depth, - protoGetter=protoGetter) + protoGetter=protoGetter, + ) def InterfaceObjectProtoGetter(descriptor, forXrays=False): @@ -822,8 +919,7 @@ def InterfaceObjectProtoGetter(descriptor, forXrays=False): protoGetter = prefix + "::GetConstructorObject" protoHandleGetter = prefix + "::GetConstructorObjectHandle" elif descriptor.interface.isNamespace(): - if (forXrays or - not descriptor.interface.getExtendedAttribute("ProtoObjectHack")): + if forXrays or not descriptor.interface.getExtendedAttribute("ProtoObjectHack"): protoGetter = "JS::GetRealmObjectPrototype" else: protoGetter = "GetHackedNamespaceProtoObject" @@ -857,10 +953,10 @@ def define(self): prototypeID, depth = PrototypeIDAndDepth(self.descriptor) slotCount = "DOM_INTERFACE_SLOTS_BASE" if len(self.descriptor.interface.namedConstructors) > 0: - slotCount += (" + %i /* slots for the named constructors */" % - len(self.descriptor.interface.namedConstructors)) - (protoGetter, _) = InterfaceObjectProtoGetter(self.descriptor, - forXrays=True) + slotCount += " + %i /* slots for the named constructors */" % len( + self.descriptor.interface.namedConstructors + ) + (protoGetter, _) = InterfaceObjectProtoGetter(self.descriptor, forXrays=True) if ctorname == "ThrowingConstructor": ret = "" @@ -886,7 +982,8 @@ def define(self): }; """, - ctorname=ctorname) + ctorname=ctorname, + ) classOpsPtr = "&sInterfaceObjectClassOps" if self.descriptor.interface.isNamespace(): @@ -899,8 +996,10 @@ def define(self): objectOps = "JS_NULL_OBJECT_OPS" else: classString = "Function" - funToString = ("\"function %s() {\\n [native code]\\n}\"" % - self.descriptor.interface.identifier.name) + funToString = ( + '"function %s() {\\n [native code]\\n}"' + % self.descriptor.interface.identifier.name + ) # We need non-default ObjectOps so we can actually make # use of our funToString. objectOps = "&sInterfaceObjectClassObjectOps" @@ -934,14 +1033,17 @@ def define(self): prototypeID=prototypeID, depth=depth, funToString=funToString, - protoGetter=protoGetter) + protoGetter=protoGetter, + ) return ret + class CGList(CGThing): """ Generate code for a list of GCThings. Just concatenates them together, with an optional joiner string. "\n" is a common joiner. """ + def __init__(self, children, joiner=""): CGThing.__init__(self) # Make a copy of the kids into a list, because if someone passes in a @@ -963,7 +1065,9 @@ def join(self, iterable): return self.joiner.join(s for s in iterable if len(s) > 0) def declare(self): - return self.join(child.declare() for child in self.children if child is not None) + return self.join( + child.declare() for child in self.children if child is not None + ) def define(self): return self.join(child.define() for child in self.children if child is not None) @@ -985,6 +1089,7 @@ class CGGeneric(CGThing): A class that spits out a fixed string into the codegen. Can spit out a separate string for the declaration too. """ + def __init__(self, define="", declare=""): self.declareText = declare self.defineText = define @@ -1004,6 +1109,7 @@ class CGIndenter(CGThing): A class that takes another CGThing and generates code that indents that CGThing by some number of spaces. The default indent is two spaces. """ + def __init__(self, child, indentLevel=2, declareOnly=False): assert isinstance(child, CGThing) CGThing.__init__(self) @@ -1026,9 +1132,20 @@ class CGWrapper(CGThing): """ Generic CGThing that wraps other CGThings with pre and post text. """ - def __init__(self, child, pre="", post="", declarePre=None, - declarePost=None, definePre=None, definePost=None, - declareOnly=False, defineOnly=False, reindent=False): + + def __init__( + self, + child, + pre="", + post="", + declarePre=None, + declarePost=None, + definePre=None, + definePost=None, + declareOnly=False, + defineOnly=False, + reindent=False, + ): CGThing.__init__(self) self.child = child self.declarePre = declarePre or pre @@ -1041,7 +1158,7 @@ def __init__(self, child, pre="", post="", declarePre=None, def declare(self): if self.defineOnly: - return '' + return "" decl = self.child.declare() if self.reindent: decl = self.reindentString(decl, self.declarePre) @@ -1049,7 +1166,7 @@ def declare(self): def define(self): if self.declareOnly: - return '' + return "" defn = self.child.define() if self.reindent: defn = self.reindentString(defn, self.definePre) @@ -1063,7 +1180,8 @@ def reindentString(stringToIndent, widthString): # it is a multiline string. lastLineWidth = len(widthString.splitlines()[-1]) return stripTrailingWhitespace( - stringToIndent.replace("\n", "\n" + (" " * lastLineWidth))) + stringToIndent.replace("\n", "\n" + (" " * lastLineWidth)) + ) def deps(self): return self.child.deps() @@ -1071,28 +1189,39 @@ def deps(self): class CGIfWrapper(CGList): def __init__(self, child, condition): - CGList.__init__(self, [ - CGWrapper(CGGeneric(condition), pre="if (", post=") {\n", reindent=True), - CGIndenter(child), - CGGeneric("}\n") - ]) + CGList.__init__( + self, + [ + CGWrapper( + CGGeneric(condition), pre="if (", post=") {\n", reindent=True + ), + CGIndenter(child), + CGGeneric("}\n"), + ], + ) class CGIfElseWrapper(CGList): def __init__(self, condition, ifTrue, ifFalse): - CGList.__init__(self, [ - CGWrapper(CGGeneric(condition), pre="if (", post=") {\n", reindent=True), - CGIndenter(ifTrue), - CGGeneric("} else {\n"), - CGIndenter(ifFalse), - CGGeneric("}\n") - ]) + CGList.__init__( + self, + [ + CGWrapper( + CGGeneric(condition), pre="if (", post=") {\n", reindent=True + ), + CGIndenter(ifTrue), + CGGeneric("} else {\n"), + CGIndenter(ifFalse), + CGGeneric("}\n"), + ], + ) class CGElseChain(CGThing): """ Concatenate if statements in an if-else-if-else chain. """ + def __init__(self, children): self.children = [c for c in children if c is not None] @@ -1127,8 +1256,7 @@ class CGNamespace(CGWrapper): def __init__(self, namespace, child, declareOnly=False): pre = "namespace %s {\n" % namespace post = "} // namespace %s\n" % namespace - CGWrapper.__init__(self, child, pre=pre, post=post, - declareOnly=declareOnly) + CGWrapper.__init__(self, child, pre=pre, post=post, declareOnly=declareOnly) @staticmethod def build(namespaces, child, declareOnly=False): @@ -1145,22 +1273,36 @@ class CGIncludeGuard(CGWrapper): """ Generates include guards for a header. """ + def __init__(self, prefix, child): """|prefix| is the filename without the extension.""" - define = 'mozilla_dom_%s_h' % prefix - CGWrapper.__init__(self, child, - declarePre='#ifndef %s\n#define %s\n\n' % (define, define), - declarePost='\n#endif // %s\n' % define) + define = "mozilla_dom_%s_h" % prefix + CGWrapper.__init__( + self, + child, + declarePre="#ifndef %s\n#define %s\n\n" % (define, define), + declarePost="\n#endif // %s\n" % define, + ) class CGHeaders(CGWrapper): """ Generates the appropriate include statements. """ - def __init__(self, descriptors, dictionaries, callbacks, - callbackDescriptors, - declareIncludes, defineIncludes, prefix, child, - config=None, jsImplementedDescriptors=[]): + + def __init__( + self, + descriptors, + dictionaries, + callbacks, + callbackDescriptors, + declareIncludes, + defineIncludes, + prefix, + child, + config=None, + jsImplementedDescriptors=[], + ): """ Builds a set of includes to cover |descriptors|. @@ -1206,7 +1348,9 @@ def __init__(self, descriptors, dictionaries, callbacks, bindingIncludes = set(self.getDeclarationFilename(d) for d in interfaceDeps) # Grab all the implementation declaration files we need. - implementationIncludes = set(d.headerFile for d in descriptors if d.needsHeaderInclude()) + implementationIncludes = set( + d.headerFile for d in descriptors if d.needsHeaderInclude() + ) # Now find all the things we'll need as arguments because we # need to wrap or unwrap them. @@ -1304,8 +1448,9 @@ def addHeadersForType(typeAndPossibleDictionary): # parametrized over, if needed. addHeadersForType((t.inner, dictionary)) - for t in getAllTypes(descriptors + callbackDescriptors, dictionaries, - callbacks): + for t in getAllTypes( + descriptors + callbackDescriptors, dictionaries, callbacks + ): addHeadersForType(t) def addHeaderForFunc(func, desc): @@ -1333,7 +1478,9 @@ def addHeaderForFunc(func, desc): for m in desc.interface.members: addHeaderForFunc(PropertyDefiner.getStringAttr(m, "Func"), desc) - staticTypeOverride = PropertyDefiner.getStringAttr(m, "StaticClassOverride") + staticTypeOverride = PropertyDefiner.getStringAttr( + m, "StaticClassOverride" + ) if staticTypeOverride: bindingHeaders.add("/".join(staticTypeOverride.split("::")) + ".h") # getExtendedAttribute() returns a list, extract the entry. @@ -1348,19 +1495,20 @@ def addHeaderForFunc(func, desc): # maplike/setlike/iterable, since they'll be needed for # convenience functions if desc.interface.maplikeOrSetlikeOrIterable.hasKeyType(): - addHeadersForType((desc.interface.maplikeOrSetlikeOrIterable.keyType, - None)) + addHeadersForType( + (desc.interface.maplikeOrSetlikeOrIterable.keyType, None) + ) if desc.interface.maplikeOrSetlikeOrIterable.hasValueType(): - addHeadersForType((desc.interface.maplikeOrSetlikeOrIterable.valueType, - None)) + addHeadersForType( + (desc.interface.maplikeOrSetlikeOrIterable.valueType, None) + ) for d in dictionaries: if d.parent: declareIncludes.add(self.getDeclarationFilename(d.parent)) bindingHeaders.add(self.getDeclarationFilename(d)) for m in d.members: - addHeaderForFunc(PropertyDefiner.getStringAttr(m, "Func"), - None) + addHeaderForFunc(PropertyDefiner.getStringAttr(m, "Func"), None) # No need to worry about Func on members of ancestors, because that # will happen automatically in whatever files those ancestors live # in. @@ -1402,20 +1550,29 @@ def headerName(include): return include # Non-system headers need to be placed in quotes. return '"%s"' % include - return ''.join(['#include %s\n' % headerName(i) for i in includes]) + '\n' - CGWrapper.__init__(self, child, - declarePre=_includeString(sorted(declareIncludes)), - definePre=_includeString(sorted(set(defineIncludes) | - bindingIncludes | - bindingHeaders | - implementationIncludes))) + + return "".join(["#include %s\n" % headerName(i) for i in includes]) + "\n" + + CGWrapper.__init__( + self, + child, + declarePre=_includeString(sorted(declareIncludes)), + definePre=_includeString( + sorted( + set(defineIncludes) + | bindingIncludes + | bindingHeaders + | implementationIncludes + ) + ), + ) @staticmethod def getDeclarationFilename(decl): # Use our local version of the header, not the exported one, so that # test bindings, which don't export, will work correctly. basename = os.path.basename(decl.filename()) - return basename.replace('.webidl', 'Binding.h') + return basename.replace(".webidl", "Binding.h") @staticmethod def getUnionDeclarationFilename(config, unionType): @@ -1554,7 +1711,9 @@ def addHeadersForType(f): addHeadersForType(f) if idlTypeNeedsCycleCollection(t): - declarations.add(("mozilla::dom::%s" % CGUnionStruct.unionTypeName(t, True), False)) + declarations.add( + ("mozilla::dom::%s" % CGUnionStruct.unionTypeName(t, True), False) + ) traverseMethods[name] = CGCycleCollectionTraverseForOwningUnionMethod(t) unlinkMethods[name] = CGCycleCollectionUnlinkForOwningUnionMethod(t) @@ -1563,9 +1722,14 @@ def addHeadersForType(f): # for these methods should come before the class declaration. Otherwise # some compilers treat the friend declaration as a forward-declaration in # the class scope. - return (headers, implheaders, declarations, - SortedDictValues(traverseMethods), SortedDictValues(unlinkMethods), - SortedDictValues(unionStructs)) + return ( + headers, + implheaders, + declarations, + SortedDictValues(traverseMethods), + SortedDictValues(unlinkMethods), + SortedDictValues(unionStructs), + ) def UnionConversions(unionTypes, config): @@ -1634,28 +1798,30 @@ def addHeadersForType(f): for f in t.flatMemberTypes: addHeadersForType(f) - return (headers, - CGWrapper(CGList(SortedDictValues(unionConversions), "\n"), - post="\n\n")) + return ( + headers, + CGWrapper(CGList(SortedDictValues(unionConversions), "\n"), post="\n\n"), + ) -class Argument(): +class Argument: """ A class for outputting the type and name of an argument """ + def __init__(self, argType, name, default=None): self.argType = argType self.name = name self.default = default def declare(self): - string = self.argType + ' ' + self.name + string = self.argType + " " + self.name if self.default is not None: string += " = " + self.default return string def define(self): - return self.argType + ' ' + self.name + return self.argType + " " + self.name class CGAbstractMethod(CGThing): @@ -1686,9 +1852,19 @@ class CGAbstractMethod(CGThing): canRunScript should be True to generate a MOZ_CAN_RUN_SCRIPT annotation. """ - def __init__(self, descriptor, name, returnType, args, inline=False, - alwaysInline=False, static=False, templateArgs=None, - canRunScript=False): + + def __init__( + self, + descriptor, + name, + returnType, + args, + inline=False, + alwaysInline=False, + static=False, + templateArgs=None, + canRunScript=False, + ): CGThing.__init__(self) self.descriptor = descriptor self.name = name @@ -1701,31 +1877,36 @@ def __init__(self, descriptor, name, returnType, args, inline=False, self.canRunScript = canRunScript def _argstring(self, declare): - return ', '.join([a.declare() if declare else a.define() for a in self.args]) + return ", ".join([a.declare() if declare else a.define() for a in self.args]) def _template(self): if self.templateArgs is None: - return '' - return 'template <%s>\n' % ', '.join(self.templateArgs) + return "" + return "template <%s>\n" % ", ".join(self.templateArgs) def _decorators(self): decorators = [] if self.canRunScript: - decorators.append('MOZ_CAN_RUN_SCRIPT'); + decorators.append("MOZ_CAN_RUN_SCRIPT") if self.alwaysInline: - decorators.append('MOZ_ALWAYS_INLINE') + decorators.append("MOZ_ALWAYS_INLINE") elif self.inline: - decorators.append('inline') + decorators.append("inline") if self.static: - decorators.append('static') + decorators.append("static") decorators.append(self.returnType) maybeNewline = " " if self.inline else "\n" - return ' '.join(decorators) + maybeNewline + return " ".join(decorators) + maybeNewline def declare(self): if self.inline: return self._define(True) - return "%s%s%s(%s);\n" % (self._template(), self._decorators(), self.name, self._argstring(True)) + return "%s%s%s(%s);\n" % ( + self._template(), + self._decorators(), + self.name, + self._argstring(True), + ) def indent_body(self, body): """ @@ -1736,9 +1917,11 @@ def indent_body(self, body): return indent(body) def _define(self, fromDeclare=False): - return (self.definition_prologue(fromDeclare) + - self.indent_body(self.definition_body()) + - self.definition_epilogue()) + return ( + self.definition_prologue(fromDeclare) + + self.indent_body(self.definition_body()) + + self.definition_epilogue() + ) def define(self): return "" if self.inline else self._define() @@ -1748,26 +1931,33 @@ def definition_prologue(self, fromDeclare): if error_reporting_label: # We're going to want a BindingCallContext. Rename our JSContext* # arg accordingly. - i = 0; + i = 0 while i < len(self.args): - arg = self.args[i]; - if arg.argType == 'JSContext*': + arg = self.args[i] + if arg.argType == "JSContext*": cxname = arg.name - self.args[i] = Argument(arg.argType, 'cx_', arg.default) + self.args[i] = Argument(arg.argType, "cx_", arg.default) break i += 1 if i == len(self.args): raise TypeError("Must have a JSContext* to create a BindingCallContext") - prologue = "%s%s%s(%s)\n{\n" % (self._template(), self._decorators(), - self.name, self._argstring(fromDeclare)) + prologue = "%s%s%s(%s)\n{\n" % ( + self._template(), + self._decorators(), + self.name, + self._argstring(fromDeclare), + ) if error_reporting_label: - prologue += indent(fill( - """ + prologue += indent( + fill( + """ BindingCallContext ${cxname}(cx_, "${label}"); """, - cxname=cxname, - label=error_reporting_label)) + cxname=cxname, + label=error_reporting_label, + ) + ) profiler_label = self.auto_profiler_label() if profiler_label: @@ -1785,8 +1975,9 @@ def definition_body(self): Override this method to return a pair of (descriptive string, name of a JSContext* variable) in order to generate a profiler label for this method. """ + def auto_profiler_label(self): - return None # Override me! + return None # Override me! """ Override this method to return a string to be used as the label for a @@ -1795,18 +1986,28 @@ def auto_profiler_label(self): 'cx_' and a BindingCallContext named 'cx' will be instantiated with the given label. """ + def error_reporting_label(self): - return None # Override me! + return None # Override me! + class CGAbstractStaticMethod(CGAbstractMethod): """ Abstract base class for codegen of implementation-only (no declaration) static methods. """ + def __init__(self, descriptor, name, returnType, args, canRunScript=False): - CGAbstractMethod.__init__(self, descriptor, name, returnType, args, - inline=False, static=True, - canRunScript=canRunScript) + CGAbstractMethod.__init__( + self, + descriptor, + name, + returnType, + args, + inline=False, + static=True, + canRunScript=canRunScript, + ) def declare(self): # We only have implementation @@ -1818,13 +2019,15 @@ class CGAbstractClassHook(CGAbstractStaticMethod): Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw 'this' unwrapping as it assumes that the unwrapped type is always known. """ + def __init__(self, descriptor, name, returnType, args): - CGAbstractStaticMethod.__init__(self, descriptor, name, returnType, - args) + CGAbstractStaticMethod.__init__(self, descriptor, name, returnType, args) def definition_body_prologue(self): - return ("%s* self = UnwrapPossiblyNotInitializedDOMObject<%s>(obj);\n" % - (self.descriptor.nativeType, self.descriptor.nativeType)) + return "%s* self = UnwrapPossiblyNotInitializedDOMObject<%s>(obj);\n" % ( + self.descriptor.nativeType, + self.descriptor.nativeType, + ) def definition_body(self): return self.definition_body_prologue() + self.generate_code() @@ -1835,8 +2038,7 @@ def generate_code(self): class CGGetJSClassMethod(CGAbstractMethod): def __init__(self, descriptor): - CGAbstractMethod.__init__(self, descriptor, 'GetJSClass', 'const JSClass*', - []) + CGAbstractMethod.__init__(self, descriptor, "GetJSClass", "const JSClass*", []) def definition_body(self): return "return sClass.ToJSClass();\n" @@ -1846,13 +2048,17 @@ class CGAddPropertyHook(CGAbstractClassHook): """ A hook for addProperty, used to preserve our wrapper from GC. """ + def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('JS::Handle', 'id'), - Argument('JS::Handle', 'val')] - CGAbstractClassHook.__init__(self, descriptor, ADDPROPERTY_HOOK_NAME, - 'bool', args) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("JS::Handle", "id"), + Argument("JS::Handle", "val"), + ] + CGAbstractClassHook.__init__( + self, descriptor, ADDPROPERTY_HOOK_NAME, "bool", args + ) def generate_code(self): assert self.descriptor.wrapperCache @@ -1860,14 +2066,16 @@ def generate_code(self): # cycle collected objects, so if addProperty is ever changed to do # anything more or less than preserve the wrapper, TryPreserveWrapper # will need to be changed. - return dedent(""" + return dedent( + """ // We don't want to preserve if we don't have a wrapper, and we // obviously can't preserve if we're not initialized. if (self && self->GetWrapperPreserveColor()) { PreserveWrapper(self); } return true; - """) + """ + ) class CGGetWrapperCacheHook(CGAbstractClassHook): @@ -1875,22 +2083,25 @@ class CGGetWrapperCacheHook(CGAbstractClassHook): A hook for GetWrapperCache, used by HasReleasedWrapper to get the nsWrapperCache pointer for a non-nsISupports object. """ + def __init__(self, descriptor): - args = [Argument('JS::Handle', 'obj')] - CGAbstractClassHook.__init__(self, descriptor, - GETWRAPPERCACHE_HOOK_NAME, - 'nsWrapperCache*', args) + args = [Argument("JS::Handle", "obj")] + CGAbstractClassHook.__init__( + self, descriptor, GETWRAPPERCACHE_HOOK_NAME, "nsWrapperCache*", args + ) def generate_code(self): assert self.descriptor.wrapperCache - return dedent(""" + return dedent( + """ return self; - """) + """ + ) def finalizeHook(descriptor, hookName, freeOp, obj): finalize = "JS::SetReservedSlot(%s, DOM_OBJECT_SLOT, JS::UndefinedValue());\n" % obj - if descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + if descriptor.interface.getExtendedAttribute("OverrideBuiltins"): finalize += fill( """ // Either our proxy created an expando object or not. If it did, @@ -1911,7 +2122,8 @@ def finalizeHook(descriptor, hookName, freeOp, obj): self->mExpandoAndGeneration.expando = JS::UndefinedValue(); } """, - obj=obj) + obj=obj, + ) if descriptor.wrapperCache: finalize += "ClearWrapper(self, self, %s);\n" % obj if descriptor.isGlobal(): @@ -1923,9 +2135,9 @@ def finalizeHook(descriptor, hookName, freeOp, obj): JS::MemoryUse::DOMBinding); } """, - obj=obj) - finalize += ("AddForDeferredFinalization<%s>(self);\n" % - descriptor.nativeType) + obj=obj, + ) + finalize += "AddForDeferredFinalization<%s>(self);\n" % descriptor.nativeType return CGIfWrapper(CGGeneric(finalize), "self") @@ -1933,19 +2145,21 @@ class CGClassFinalizeHook(CGAbstractClassHook): """ A hook for finalize, used to release our native object. """ + def __init__(self, descriptor): - args = [Argument('JSFreeOp*', 'fop'), Argument('JSObject*', 'obj')] - CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME, - 'void', args) + args = [Argument("JSFreeOp*", "fop"), Argument("JSObject*", "obj")] + CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME, "void", args) def generate_code(self): - return finalizeHook(self.descriptor, self.name, - self.args[0].name, self.args[1].name).define() + return finalizeHook( + self.descriptor, self.name, self.args[0].name, self.args[1].name + ).define() def objectMovedHook(descriptor, hookName, obj, old): assert descriptor.wrapperCache - return fill(""" + return fill( + """ if (self) { UpdateWrapper(self, self, ${obj}, ${old}); } @@ -1953,7 +2167,8 @@ def objectMovedHook(descriptor, hookName, obj, old): return 0; """, obj=obj, - old=old) + old=old, + ) class CGClassObjectMovedHook(CGAbstractClassHook): @@ -1961,29 +2176,36 @@ class CGClassObjectMovedHook(CGAbstractClassHook): A hook for objectMovedOp, used to update the wrapper cache when an object it is holding moves. """ + def __init__(self, descriptor): - args = [Argument('JSObject*', 'obj'), Argument('JSObject*', 'old')] - CGAbstractClassHook.__init__(self, descriptor, OBJECT_MOVED_HOOK_NAME, - 'size_t', args) + args = [Argument("JSObject*", "obj"), Argument("JSObject*", "old")] + CGAbstractClassHook.__init__( + self, descriptor, OBJECT_MOVED_HOOK_NAME, "size_t", args + ) def generate_code(self): - return objectMovedHook(self.descriptor, self.name, - self.args[0].name, self.args[1].name) + return objectMovedHook( + self.descriptor, self.name, self.args[0].name, self.args[1].name + ) def JSNativeArguments(): - return [Argument('JSContext*', 'cx'), - Argument('unsigned', 'argc'), - Argument('JS::Value*', 'vp')] + return [ + Argument("JSContext*", "cx"), + Argument("unsigned", "argc"), + Argument("JS::Value*", "vp"), + ] class CGClassConstructor(CGAbstractStaticMethod): """ JS-visible constructor for our objects """ + def __init__(self, descriptor, ctor, name=CONSTRUCT_HOOK_NAME): - CGAbstractStaticMethod.__init__(self, descriptor, name, 'bool', - JSNativeArguments()) + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", JSNativeArguments() + ) self._ctor = ctor def define(self): @@ -2013,19 +2235,26 @@ def generate_code(self): prototypes::id::${name}, CreateInterfaceObjects); """, - name=self.descriptor.name) + name=self.descriptor.name, + ) # If the interface is already SecureContext, notify getConditionList to skip that check, # because the constructor won't be exposed in non-secure contexts to start with. - alreadySecureContext = self.descriptor.interface.getExtendedAttribute("SecureContext") + alreadySecureContext = self.descriptor.interface.getExtendedAttribute( + "SecureContext" + ) # We want to throw if any of the conditions returned by getConditionList are false. conditionsCheck = "" - rawConditions = getRawConditionList(self._ctor, "cx", "obj", alreadySecureContext) + rawConditions = getRawConditionList( + self._ctor, "cx", "obj", alreadySecureContext + ) if len(rawConditions) > 0: notConditions = " ||\n".join("!" + cond for cond in rawConditions) failedCheckAction = CGGeneric("return ThrowingConstructor(cx, argc, vp);\n") - conditionsCheck = CGIfWrapper(failedCheckAction, notConditions).define() + "\n" + conditionsCheck = ( + CGIfWrapper(failedCheckAction, notConditions).define() + "\n" + ) # Additionally, we want to throw if a caller does a bareword invocation # of a constructor without |new|. @@ -2050,12 +2279,14 @@ def generate_code(self): """, conditionsCheck=conditionsCheck, ctorName=ctorName, - name=self.descriptor.name) + name=self.descriptor.name, + ) name = self._ctor.identifier.name nativeName = MakeNativeName(self.descriptor.binaryNameFor(name)) - callGenerator = CGMethodCall(nativeName, True, self.descriptor, - self._ctor, isConstructor=True) + callGenerator = CGMethodCall( + nativeName, True, self.descriptor, self._ctor, isConstructor=True + ) return preamble + "\n" + callGenerator.define() def auto_profiler_label(self): @@ -2065,16 +2296,17 @@ def auto_profiler_label(self): "${ctorName}", "constructor", DOM, cx, uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, - ctorName=GetConstructorNameForReporting(self.descriptor, self._ctor)) + ctorName=GetConstructorNameForReporting(self.descriptor, self._ctor), + ) def error_reporting_label(self): - return CGSpecializedMethod.error_reporting_label_helper(self.descriptor, - self._ctor, - isConstructor=True) + return CGSpecializedMethod.error_reporting_label_helper( + self.descriptor, self._ctor, isConstructor=True + ) def NamedConstructorName(m): - return '_' + m.identifier.name + return "_" + m.identifier.name class CGNamedConstructors(CGThing): @@ -2098,8 +2330,9 @@ def define(self): namedConstructors = "" for n in self.descriptor.interface.namedConstructors: namedConstructors += ( - "{ \"%s\", { %s, &sNamedConstructorNativePropertyHooks }, %i },\n" % - (n.identifier.name, NamedConstructorName(n), methodLength(n))) + '{ "%s", { %s, &sNamedConstructorNativePropertyHooks }, %i },\n' + % (n.identifier.name, NamedConstructorName(n), methodLength(n)) + ) return fill( """ @@ -2120,7 +2353,8 @@ def define(self): """, name=self.descriptor.name, constructorID=constructorID, - namedConstructors=namedConstructors) + namedConstructors=namedConstructors, + ) def isChromeOnly(m): @@ -2130,9 +2364,11 @@ def isChromeOnly(m): def prefIdentifier(pref): return pref.replace(".", "_").replace("-", "_") + def prefHeader(pref): return "mozilla/StaticPrefs_%s.h" % pref.partition(".")[0] + class MemberCondition: """ An object representing the condition for a member to actually be @@ -2145,8 +2381,10 @@ class MemberCondition: nonExposedGlobals: A set of names of globals. Can be empty, in which case it's treated the same way as None. """ - def __init__(self, pref=None, func=None, secureContext=False, - nonExposedGlobals=None): + + def __init__( + self, pref=None, func=None, secureContext=False, nonExposedGlobals=None + ): assert pref is None or isinstance(pref, str) assert func is None or isinstance(func, str) assert isinstance(secureContext, bool) @@ -2164,29 +2402,35 @@ def toFuncPtr(val): if val is None: return "nullptr" return "&" + val + self.func = toFuncPtr(func) if nonExposedGlobals: # Nonempty set self.nonExposedGlobals = " | ".join( - map(lambda g: "GlobalNames::%s" % g, - sorted(nonExposedGlobals))) + map(lambda g: "GlobalNames::%s" % g, sorted(nonExposedGlobals)) + ) else: self.nonExposedGlobals = "0" def __eq__(self, other): - return (self.pref == other.pref and self.func == other.func and - self.secureContext == other.secureContext and - self.nonExposedGlobals == other.nonExposedGlobals) + return ( + self.pref == other.pref + and self.func == other.func + and self.secureContext == other.secureContext + and self.nonExposedGlobals == other.nonExposedGlobals + ) def __ne__(self, other): return not self.__eq__(other) def hasDisablers(self): - return (self.pref is not None or - self.secureContext or - self.func != "nullptr" or - self.nonExposedGlobals != "0") + return ( + self.pref is not None + or self.secureContext + or self.func != "nullptr" + or self.nonExposedGlobals != "0" + ) class PropertyDefiner: @@ -2198,6 +2442,7 @@ class PropertyDefiner: things only exposed to chrome and self.regular to the list of things exposed to both chrome and web pages. """ + def __init__(self, descriptor, name): self.descriptor = descriptor self.name = name @@ -2247,17 +2492,22 @@ def getControllingCondition(interfaceMember, descriptor): nonExposureSet = interface.exposureSet - interfaceMember.exposureSet return MemberCondition( - PropertyDefiner.getStringAttr(interfaceMember, - "Pref"), - PropertyDefiner.getStringAttr(interfaceMember, - "Func"), + PropertyDefiner.getStringAttr(interfaceMember, "Pref"), + PropertyDefiner.getStringAttr(interfaceMember, "Func"), interfaceMember.getExtendedAttribute("SecureContext") is not None, - nonExposureSet) + nonExposureSet, + ) @staticmethod - def generatePrefableArrayValues(array, descriptor, specFormatter, specTerminator, - getCondition, getDataTuple, - switchToCondition=None): + def generatePrefableArrayValues( + array, + descriptor, + specFormatter, + specTerminator, + getCondition, + getDataTuple, + switchToCondition=None, + ): """ This method generates an array of spec entries for interface members. It returns a tuple containing the array of spec entries and the maximum of the number of @@ -2283,6 +2533,7 @@ def generatePrefableArrayValues(array, descriptor, specFormatter, specTerminator previously generated spec entries. If None is passed for this function then all the interface members should return the same value from getCondition. """ + def unsupportedSwitchToCondition(condition, specs): # If no specs have been added yet then this is just the first call to # switchToCondition that we call to avoid putting a specTerminator at the @@ -2323,8 +2574,16 @@ def unsupportedSwitchToCondition(condition, specs): return (specs, maxNumSpecsInPrefable) - def generatePrefableArray(self, array, name, specFormatter, specTerminator, - specType, getCondition, getDataTuple): + def generatePrefableArray( + self, + array, + name, + specFormatter, + specTerminator, + specType, + getCondition, + getDataTuple, + ): """ This method generates our various arrays. @@ -2363,29 +2622,43 @@ def generatePrefableArray(self, array, name, specFormatter, specTerminator, static const PrefableDisablers %s_disablers%d = { %s, %s, %s, %s }; - """) - prefableWithDisablersTemplate = ' { &%s_disablers%d, &%s_specs[%d] }' - prefableWithoutDisablersTemplate = ' { nullptr, &%s_specs[%d] }' - prefCacheTemplate = '&%s[%d].disablers->enabled' + """ + ) + prefableWithDisablersTemplate = " { &%s_disablers%d, &%s_specs[%d] }" + prefableWithoutDisablersTemplate = " { nullptr, &%s_specs[%d] }" + prefCacheTemplate = "&%s[%d].disablers->enabled" def switchToCondition(condition, specs): # Set up pointers to the new sets of specs inside prefableSpecs if condition.hasDisablers(): - prefableSpecs.append(prefableWithDisablersTemplate % - (name, len(specs), name, len(specs))) - disablers.append(disablersTemplate % - (name, len(specs), - condition.prefFuncIndex, - toStringBool(condition.secureContext), - condition.nonExposedGlobals, - condition.func)) + prefableSpecs.append( + prefableWithDisablersTemplate % (name, len(specs), name, len(specs)) + ) + disablers.append( + disablersTemplate + % ( + name, + len(specs), + condition.prefFuncIndex, + toStringBool(condition.secureContext), + condition.nonExposedGlobals, + condition.func, + ) + ) else: - prefableSpecs.append(prefableWithoutDisablersTemplate % - (name, len(specs))) + prefableSpecs.append( + prefableWithoutDisablersTemplate % (name, len(specs)) + ) specs, maxNumSpecsInPrefable = self.generatePrefableArrayValues( - array, self.descriptor, specFormatter, specTerminator, getCondition, - getDataTuple, switchToCondition) + array, + self.descriptor, + specFormatter, + specTerminator, + getCondition, + getDataTuple, + switchToCondition, + ) prefableSpecs.append(" { nullptr, nullptr }") specType = "const " + specType @@ -2404,9 +2677,10 @@ def switchToCondition(condition, specs): """, specType=specType, name=name, - disablers='\n'.join(disablers), - specs=',\n'.join(specs), - prefableSpecs=',\n'.join(prefableSpecs)) + disablers="\n".join(disablers), + specs=",\n".join(specs), + prefableSpecs=",\n".join(prefableSpecs), + ) if self.usedForXrays(): arrays = fill( @@ -2420,8 +2694,9 @@ def switchToCondition(condition, specs): """, arrays=arrays, # Minus 1 because there's a list terminator in prefableSpecs. - numPrefableSpecs=len(prefableSpecs)-1, - maxNumSpecsInPrefable=maxNumSpecsInPrefable) + numPrefableSpecs=len(prefableSpecs) - 1, + maxNumSpecsInPrefable=maxNumSpecsInPrefable, + ) return arrays @@ -2441,11 +2716,13 @@ def methodLength(method): def clearableCachedAttrs(descriptor): - return (m for m in descriptor.interface.members if - m.isAttr() and - # Constants should never need clearing! - m.dependsOn != "Nothing" and - m.slotIndices is not None) + return ( + m + for m in descriptor.interface.members + if m.isAttr() and + # Constants should never need clearing! + m.dependsOn != "Nothing" and m.slotIndices is not None + ) def MakeClearCachedValueNativeName(member): @@ -2466,6 +2743,7 @@ class MethodDefiner(PropertyDefiner): """ A class for defining methods on a prototype object. """ + def __init__(self, descriptor, name, crossOriginOnly, static, unforgeable=False): assert not (static and unforgeable) PropertyDefiner.__init__(self, descriptor, name) @@ -2476,12 +2754,18 @@ def __init__(self, descriptor, name, crossOriginOnly, static, unforgeable=False) # Ignore non-static methods for interfaces without a proto object if descriptor.interface.hasInterfacePrototypeObject() or static: - methods = [m for m in descriptor.interface.members if - m.isMethod() and m.isStatic() == static and - MemberIsUnforgeable(m, descriptor) == unforgeable and - (not crossOriginOnly or m.getExtendedAttribute("CrossOriginCallable")) and - not m.isIdentifierLess() and - not m.getExtendedAttribute("Unexposed")] + methods = [ + m + for m in descriptor.interface.members + if m.isMethod() + and m.isStatic() == static + and MemberIsUnforgeable(m, descriptor) == unforgeable + and ( + not crossOriginOnly or m.getExtendedAttribute("CrossOriginCallable") + ) + and not m.isIdentifierLess() + and not m.getExtendedAttribute("Unexposed") + ] else: methods = [] self.chrome = [] @@ -2490,7 +2774,9 @@ def __init__(self, descriptor, name, crossOriginOnly, static, unforgeable=False) method = self.methodData(m, descriptor) if m.isStatic(): - method["nativeName"] = CppKeywords.checkMethodName(IDLToCIdentifier(m.identifier.name)) + method["nativeName"] = CppKeywords.checkMethodName( + IDLToCIdentifier(m.identifier.name) + ) if isChromeOnly(m): self.chrome.append(method) @@ -2502,114 +2788,137 @@ def __init__(self, descriptor, name, crossOriginOnly, static, unforgeable=False) # is resolved. # https://www.w3.org/Bugs/Public/show_bug.cgi?id=28592 def hasIterator(methods, regular): - return (any("@@iterator" in m.aliases for m in methods) or - any("@@iterator" == r["name"] for r in regular)) + return any("@@iterator" in m.aliases for m in methods) or any( + "@@iterator" == r["name"] for r in regular + ) # Check whether we need to output an @@iterator due to having an indexed # getter. We only do this while outputting non-static and # non-unforgeable methods, since the @@iterator function will be # neither. - if (not static and - not unforgeable and - descriptor.supportsIndexedProperties()): + if not static and not unforgeable and descriptor.supportsIndexedProperties(): if hasIterator(methods, self.regular): - raise TypeError("Cannot have indexed getter/attr on " - "interface %s with other members " - "that generate @@iterator, such as " - "maplike/setlike or aliased functions." % - self.descriptor.interface.identifier.name) - self.regular.append({ - "name": "@@iterator", - "methodInfo": False, - "selfHostedName": "$ArrayValues", - "length": 0, - "flags": "0", # Not enumerable, per spec. - "condition": MemberCondition() - }) + raise TypeError( + "Cannot have indexed getter/attr on " + "interface %s with other members " + "that generate @@iterator, such as " + "maplike/setlike or aliased functions." + % self.descriptor.interface.identifier.name + ) + self.regular.append( + { + "name": "@@iterator", + "methodInfo": False, + "selfHostedName": "$ArrayValues", + "length": 0, + "flags": "0", # Not enumerable, per spec. + "condition": MemberCondition(), + } + ) # Generate the keys/values/entries aliases for value iterables. maplikeOrSetlikeOrIterable = descriptor.interface.maplikeOrSetlikeOrIterable - if (not static and - not unforgeable and - maplikeOrSetlikeOrIterable and - maplikeOrSetlikeOrIterable.isIterable() and - maplikeOrSetlikeOrIterable.isValueIterator()): + if ( + not static + and not unforgeable + and maplikeOrSetlikeOrIterable + and maplikeOrSetlikeOrIterable.isIterable() + and maplikeOrSetlikeOrIterable.isValueIterator() + ): # Add our keys/values/entries/forEach - self.regular.append({ - "name": "keys", - "methodInfo": False, - "selfHostedName": "ArrayKeys", - "length": 0, - "flags": "JSPROP_ENUMERATE", - "condition": PropertyDefiner.getControllingCondition( - maplikeOrSetlikeOrIterable, descriptor) - }) - self.regular.append({ - "name": "values", - "methodInfo": False, - "selfHostedName": "$ArrayValues", - "length": 0, - "flags": "JSPROP_ENUMERATE", - "condition": PropertyDefiner.getControllingCondition( - maplikeOrSetlikeOrIterable, descriptor) - }) - self.regular.append({ - "name": "entries", - "methodInfo": False, - "selfHostedName": "ArrayEntries", - "length": 0, - "flags": "JSPROP_ENUMERATE", - "condition": PropertyDefiner.getControllingCondition( - maplikeOrSetlikeOrIterable, descriptor) - }) - self.regular.append({ - "name": "forEach", - "methodInfo": False, - "selfHostedName": "ArrayForEach", - "length": 1, - "flags": "JSPROP_ENUMERATE", - "condition": PropertyDefiner.getControllingCondition( - maplikeOrSetlikeOrIterable, descriptor) - }) + self.regular.append( + { + "name": "keys", + "methodInfo": False, + "selfHostedName": "ArrayKeys", + "length": 0, + "flags": "JSPROP_ENUMERATE", + "condition": PropertyDefiner.getControllingCondition( + maplikeOrSetlikeOrIterable, descriptor + ), + } + ) + self.regular.append( + { + "name": "values", + "methodInfo": False, + "selfHostedName": "$ArrayValues", + "length": 0, + "flags": "JSPROP_ENUMERATE", + "condition": PropertyDefiner.getControllingCondition( + maplikeOrSetlikeOrIterable, descriptor + ), + } + ) + self.regular.append( + { + "name": "entries", + "methodInfo": False, + "selfHostedName": "ArrayEntries", + "length": 0, + "flags": "JSPROP_ENUMERATE", + "condition": PropertyDefiner.getControllingCondition( + maplikeOrSetlikeOrIterable, descriptor + ), + } + ) + self.regular.append( + { + "name": "forEach", + "methodInfo": False, + "selfHostedName": "ArrayForEach", + "length": 1, + "flags": "JSPROP_ENUMERATE", + "condition": PropertyDefiner.getControllingCondition( + maplikeOrSetlikeOrIterable, descriptor + ), + } + ) if not static: - stringifier = descriptor.operations['Stringifier'] - if (stringifier and - unforgeable == MemberIsUnforgeable(stringifier, descriptor)): + stringifier = descriptor.operations["Stringifier"] + if stringifier and unforgeable == MemberIsUnforgeable( + stringifier, descriptor + ): toStringDesc = { "name": GetWebExposedName(stringifier, descriptor), "nativeName": stringifier.identifier.name, "length": 0, "flags": "JSPROP_ENUMERATE", - "condition": PropertyDefiner.getControllingCondition(stringifier, descriptor) + "condition": PropertyDefiner.getControllingCondition( + stringifier, descriptor + ), } if isChromeOnly(stringifier): self.chrome.append(toStringDesc) else: self.regular.append(toStringDesc) - if (unforgeable and - descriptor.interface.getExtendedAttribute("Unforgeable")): + if unforgeable and descriptor.interface.getExtendedAttribute("Unforgeable"): # Synthesize our valueOf method - self.regular.append({ - "name": 'valueOf', - "selfHostedName": "Object_valueOf", - "methodInfo": False, - "length": 0, - "flags": "0", # readonly/permanent added automatically. - "condition": MemberCondition() - }) + self.regular.append( + { + "name": "valueOf", + "selfHostedName": "Object_valueOf", + "methodInfo": False, + "length": 0, + "flags": "0", # readonly/permanent added automatically. + "condition": MemberCondition(), + } + ) if descriptor.interface.isJSImplemented(): if static: if descriptor.interface.hasInterfaceObject(): - self.chrome.append({ - "name": '_create', - "nativeName": ("%s::_Create" % descriptor.name), - "methodInfo": False, - "length": 2, - "flags": "0", - "condition": MemberCondition() - }) + self.chrome.append( + { + "name": "_create", + "nativeName": ("%s::_Create" % descriptor.name), + "methodInfo": False, + "length": 2, + "flags": "0", + "condition": MemberCondition(), + } + ) self.unforgeable = unforgeable @@ -2625,21 +2934,23 @@ def hasIterator(methods, regular): @staticmethod def methodData(m, descriptor, overrideFlags=None): return { - "name": m.identifier.name, - "methodInfo": not m.isStatic(), - "length": methodLength(m), - "flags": EnumerabilityFlags(m) if (overrideFlags is None) else overrideFlags, - "condition": PropertyDefiner.getControllingCondition(m, descriptor), - "allowCrossOriginThis": m.getExtendedAttribute("CrossOriginCallable"), - "returnsPromise": m.returnsPromise(), - "hasIteratorAlias": "@@iterator" in m.aliases - } + "name": m.identifier.name, + "methodInfo": not m.isStatic(), + "length": methodLength(m), + "flags": EnumerabilityFlags(m) + if (overrideFlags is None) + else overrideFlags, + "condition": PropertyDefiner.getControllingCondition(m, descriptor), + "allowCrossOriginThis": m.getExtendedAttribute("CrossOriginCallable"), + "returnsPromise": m.returnsPromise(), + "hasIteratorAlias": "@@iterator" in m.aliases, + } @staticmethod def formatSpec(fields): if fields[0].startswith("@@"): fields = (fields[0][2:],) + fields[1:] - return ' JS_SYM_FNSPEC(%s, %s, %s, %s, %s, %s)' % fields + return " JS_SYM_FNSPEC(%s, %s, %s, %s, %s, %s)" % fields return ' JS_FNSPEC("%s", %s, %s, %s, %s, %s)' % fields @staticmethod @@ -2666,19 +2977,24 @@ def flags(m, unforgeable): # Cast this in case the methodInfo is a # JSTypedMethodJitInfo. - jitinfo = ("reinterpret_cast(&%s_methodinfo)" % accessor) + jitinfo = ( + "reinterpret_cast(&%s_methodinfo)" % accessor + ) if m.get("allowCrossOriginThis", False): - accessor = ("(GenericMethod)" % - exceptionPolicy) + accessor = ( + "(GenericMethod)" % exceptionPolicy + ) elif descriptor.interface.hasDescendantWithCrossOriginMembers: - accessor = ("(GenericMethod)" % - exceptionPolicy) + accessor = ( + "(GenericMethod)" + % exceptionPolicy + ) elif descriptor.interface.isOnGlobalProtoChain(): - accessor = ("(GenericMethod)" % - exceptionPolicy) + accessor = ( + "(GenericMethod)" % exceptionPolicy + ) else: - accessor = ("(GenericMethod)" % - exceptionPolicy) + accessor = "(GenericMethod)" % exceptionPolicy else: if m.get("returnsPromise", False): jitinfo = "&%s_methodinfo" % accessor @@ -2686,7 +3002,14 @@ def flags(m, unforgeable): else: jitinfo = "nullptr" - return (m["name"], accessor, jitinfo, m["length"], flags(m, unforgeable), selfHostedName) + return ( + m["name"], + accessor, + jitinfo, + m["length"], + flags(m, unforgeable), + selfHostedName, + ) @staticmethod def condition(m, d): @@ -2697,11 +3020,14 @@ def generateArray(self, array, name): return "" return self.generatePrefableArray( - array, name, + array, + name, self.formatSpec, - ' JS_FS_END', - 'JSFunctionSpec', - self.condition, functools.partial(self.specData, unforgeable=self.unforgeable)) + " JS_FS_END", + "JSFunctionSpec", + self.condition, + functools.partial(self.specData, unforgeable=self.unforgeable), + ) class AttrDefiner(PropertyDefiner): @@ -2711,11 +3037,18 @@ def __init__(self, descriptor, name, crossOriginOnly, static, unforgeable=False) self.name = name # Ignore non-static attributes for interfaces without a proto object if descriptor.interface.hasInterfacePrototypeObject() or static: - idlAttrs = [m for m in descriptor.interface.members if - m.isAttr() and m.isStatic() == static and - MemberIsUnforgeable(m, descriptor) == unforgeable and - (not crossOriginOnly or m.getExtendedAttribute("CrossOriginReadable") or - m.getExtendedAttribute("CrossOriginWritable"))] + idlAttrs = [ + m + for m in descriptor.interface.members + if m.isAttr() + and m.isStatic() == static + and MemberIsUnforgeable(m, descriptor) == unforgeable + and ( + not crossOriginOnly + or m.getExtendedAttribute("CrossOriginReadable") + or m.getExtendedAttribute("CrossOriginWritable") + ) + ] else: idlAttrs = [] @@ -2726,8 +3059,14 @@ def __init__(self, descriptor, name, crossOriginOnly, static, unforgeable=False) self.regular = [m for m in attributes if not isChromeOnly(m["attr"])] self.static = static - if not static and not unforgeable and descriptor.interface.hasInterfacePrototypeObject(): - self.regular.append({"name": "@@toStringTag", "attr": None, "flags": "JSPROP_READONLY"}) + if ( + not static + and not unforgeable + and descriptor.interface.hasInterfacePrototypeObject() + ): + self.regular.append( + {"name": "@@toStringTag", "attr": None, "flags": "JSPROP_READONLY"} + ) if static: if not descriptor.interface.hasInterfaceObject(): @@ -2745,7 +3084,10 @@ def attrData(attr, unforgeable=False, overrideFlags=None): flags = EnumerabilityFlags(attr) + permanent else: flags = overrideFlags - return ({"name": name, "attr": attr, "flags": flags} for name in [attr.identifier.name] + attr.bindingAliases) + return ( + {"name": name, "attr": attr, "flags": flags} + for name in [attr.identifier.name] + attr.bindingAliases + ) @staticmethod def condition(m, d): @@ -2763,12 +3105,12 @@ def getter(attr): return "nullptr, nullptr" if static: if attr.type.isPromise(): - raise TypeError("Don't know how to handle " - "static Promise-returning " - "attribute %s.%s" % - (self.descriptor.name, - attr.identifier.name)) - accessor = 'get_' + IDLToCIdentifier(attr.identifier.name) + raise TypeError( + "Don't know how to handle " + "static Promise-returning " + "attribute %s.%s" % (self.descriptor.name, attr.identifier.name) + ) + accessor = "get_" + IDLToCIdentifier(attr.identifier.name) jitinfo = "nullptr" else: if attr.type.isPromise(): @@ -2778,53 +3120,63 @@ def getter(attr): if attr.hasLenientThis(): if attr.getExtendedAttribute("CrossOriginReadable"): - raise TypeError("Can't handle lenient cross-origin " - "readable attribute %s.%s" % - (self.descriptor.name, - attr.identifier.name)) + raise TypeError( + "Can't handle lenient cross-origin " + "readable attribute %s.%s" + % (self.descriptor.name, attr.identifier.name) + ) if descriptor.interface.hasDescendantWithCrossOriginMembers: - accessor = ("GenericGetter" % - exceptionPolicy) + accessor = ( + "GenericGetter" + % exceptionPolicy + ) else: - accessor = ("GenericGetter" % - exceptionPolicy) + accessor = ( + "GenericGetter" % exceptionPolicy + ) elif attr.getExtendedAttribute("CrossOriginReadable"): - accessor = ("GenericGetter" % - exceptionPolicy) + accessor = ( + "GenericGetter" % exceptionPolicy + ) elif descriptor.interface.hasDescendantWithCrossOriginMembers: - accessor = ("GenericGetter" % - exceptionPolicy) + accessor = ( + "GenericGetter" + % exceptionPolicy + ) elif descriptor.interface.isOnGlobalProtoChain(): - accessor = ("GenericGetter" % - exceptionPolicy) + accessor = ( + "GenericGetter" % exceptionPolicy + ) else: - accessor = ("GenericGetter" % - exceptionPolicy) - jitinfo = ("&%s_getterinfo" % - IDLToCIdentifier(attr.identifier.name)) - return "%s, %s" % \ - (accessor, jitinfo) + accessor = "GenericGetter" % exceptionPolicy + jitinfo = "&%s_getterinfo" % IDLToCIdentifier(attr.identifier.name) + return "%s, %s" % (accessor, jitinfo) def setter(attr): - if (attr.readonly and - attr.getExtendedAttribute("PutForwards") is None and - attr.getExtendedAttribute("Replaceable") is None and - attr.getExtendedAttribute("LenientSetter") is None): + if ( + attr.readonly + and attr.getExtendedAttribute("PutForwards") is None + and attr.getExtendedAttribute("Replaceable") is None + and attr.getExtendedAttribute("LenientSetter") is None + ): return "nullptr, nullptr" if crossOriginOnly and not attr.getExtendedAttribute("CrossOriginWritable"): return "nullptr, nullptr" if static: - accessor = 'set_' + IDLToCIdentifier(attr.identifier.name) + accessor = "set_" + IDLToCIdentifier(attr.identifier.name) jitinfo = "nullptr" else: if attr.hasLenientThis(): if attr.getExtendedAttribute("CrossOriginWritable"): - raise TypeError("Can't handle lenient cross-origin " - "writable attribute %s.%s" % - (descriptor.name, - attr.identifier.name)) + raise TypeError( + "Can't handle lenient cross-origin " + "writable attribute %s.%s" + % (descriptor.name, attr.identifier.name) + ) if descriptor.interface.hasDescendantWithCrossOriginMembers: - accessor = "GenericSetter" + accessor = ( + "GenericSetter" + ) else: accessor = "GenericSetter" elif attr.getExtendedAttribute("CrossOriginWritable"): @@ -2836,8 +3188,7 @@ def setter(attr): else: accessor = "GenericSetter" jitinfo = "&%s_setterinfo" % IDLToCIdentifier(attr.identifier.name) - return "%s, %s" % \ - (accessor, jitinfo) + return "%s, %s" % (accessor, jitinfo) name, attr, flags = entry["name"], entry["attr"], entry["flags"] return (name, flags, getter(attr), setter(attr)) @@ -2845,7 +3196,11 @@ def setter(attr): @staticmethod def formatSpec(fields): if fields[0] == "@@toStringTag": - return ' JS_STRING_SYM_PS(%s, "%s", %s)' % (fields[0][2:], fields[2], fields[1]) + return ' JS_STRING_SYM_PS(%s, "%s", %s)' % ( + fields[0][2:], + fields[2], + fields[1], + ) return ' JSPropertySpec::nativeAccessors("%s", %s, %s, %s)' % fields @@ -2854,16 +3209,21 @@ def generateArray(self, array, name): return "" return self.generatePrefableArray( - array, name, self.formatSpec, - ' JS_PS_END', - 'JSPropertySpec', - self.condition, functools.partial(self.specData, static=self.static)) + array, + name, + self.formatSpec, + " JS_PS_END", + "JSPropertySpec", + self.condition, + functools.partial(self.specData, static=self.static), + ) class ConstDefiner(PropertyDefiner): """ A class for definining constants on the interface object """ + def __init__(self, descriptor, name): PropertyDefiner.__init__(self, descriptor, name) self.name = name @@ -2876,37 +3236,60 @@ def generateArray(self, array, name): return "" def specData(const, descriptor): - return (const.identifier.name, - convertConstIDLValueToJSVal(const.value)) + return (const.identifier.name, convertConstIDLValueToJSVal(const.value)) return self.generatePrefableArray( - array, name, + array, + name, lambda fields: ' { "%s", %s }' % fields, - ' { 0, JS::UndefinedValue() }', - 'ConstantSpec', - PropertyDefiner.getControllingCondition, specData) + " { 0, JS::UndefinedValue() }", + "ConstantSpec", + PropertyDefiner.getControllingCondition, + specData, + ) -class PropertyArrays(): +class PropertyArrays: def __init__(self, descriptor, crossOriginOnly=False): - self.staticMethods = MethodDefiner(descriptor, "StaticMethods", crossOriginOnly, - static=True) - self.staticAttrs = AttrDefiner(descriptor, "StaticAttributes", crossOriginOnly, - static=True) - self.methods = MethodDefiner(descriptor, "Methods", crossOriginOnly, static=False) - self.attrs = AttrDefiner(descriptor, "Attributes", crossOriginOnly, static=False) - self.unforgeableMethods = MethodDefiner(descriptor, "UnforgeableMethods", - crossOriginOnly, static=False, - unforgeable=True) - self.unforgeableAttrs = AttrDefiner(descriptor, "UnforgeableAttributes", - crossOriginOnly, static=False, - unforgeable=True) + self.staticMethods = MethodDefiner( + descriptor, "StaticMethods", crossOriginOnly, static=True + ) + self.staticAttrs = AttrDefiner( + descriptor, "StaticAttributes", crossOriginOnly, static=True + ) + self.methods = MethodDefiner( + descriptor, "Methods", crossOriginOnly, static=False + ) + self.attrs = AttrDefiner( + descriptor, "Attributes", crossOriginOnly, static=False + ) + self.unforgeableMethods = MethodDefiner( + descriptor, + "UnforgeableMethods", + crossOriginOnly, + static=False, + unforgeable=True, + ) + self.unforgeableAttrs = AttrDefiner( + descriptor, + "UnforgeableAttributes", + crossOriginOnly, + static=False, + unforgeable=True, + ) self.consts = ConstDefiner(descriptor, "Constants") @staticmethod def arrayNames(): - return ["staticMethods", "staticAttrs", "methods", "attrs", - "unforgeableMethods", "unforgeableAttrs", "consts"] + return [ + "staticMethods", + "staticAttrs", + "methods", + "attrs", + "unforgeableMethods", + "unforgeableAttrs", + "consts", + ] def hasChromeOnly(self): return any(getattr(self, a).hasChromeOnly() for a in self.arrayNames()) @@ -2927,19 +3310,20 @@ class CGConstDefinition(CGThing): for the member. Should be part of the interface namespace in the header file. """ + def __init__(self, member): - assert (member.isConst() and - member.value.type.isPrimitive() and - not member.value.type.nullable()) + assert ( + member.isConst() + and member.value.type.isPrimitive() + and not member.value.type.nullable() + ) name = CppKeywords.checkMethodName(IDLToCIdentifier(member.identifier.name)) tag = member.value.type.tag() value = member.value.value if tag == IDLType.Tags.bool: value = toStringBool(member.value.value) - self.const = "static const %s %s = %s;" % (builtinNames[tag], - name, - value) + self.const = "static const %s %s = %s;" % (builtinNames[tag], name, value) def declare(self): return self.const @@ -2989,12 +3373,13 @@ def check(p): break nativePropsInts.append(CGGeneric(str(iteratorAliasIndex))) - nativePropsDuos = \ - [CGWrapper(CGIndenter(CGList(nativePropsDuos, ",\n")), - pre='{\n', post='\n}')] + nativePropsDuos = [ + CGWrapper( + CGIndenter(CGList(nativePropsDuos, ",\n")), pre="{\n", post="\n}" + ) + ] - pre = ("static const NativePropertiesN<%d> %s = {\n" % - (duosOffset, name)) + pre = "static const NativePropertiesN<%d> %s = {\n" % (duosOffset, name) post = "\n};\n" if descriptor.wantsXrays: pre = fill( @@ -3006,7 +3391,8 @@ def check(p): """, name=name, size=idsOffset, - pre=pre) + pre=pre, + ) if iteratorAliasIndex > 0: # The iteratorAliasMethodIndex is a signed integer, so the # max value it can store is 2^(nbits-1)-1. @@ -3018,7 +3404,8 @@ def check(p): """, post=post, iteratorAliasIndex=iteratorAliasIndex, - name=name) + name=name, + ) post = fill( """ $*{post} @@ -3027,23 +3414,25 @@ def check(p): """, post=post, propertyInfoCount=idsOffset, - name=name) + name=name, + ) nativePropsInts.append(CGGeneric("%d" % idsOffset)) nativePropsPtrs.append(CGGeneric("%s_sortedPropertyIndices" % name)) else: nativePropsInts.append(CGGeneric("0")) nativePropsPtrs.append(CGGeneric("nullptr")) nativeProps = nativePropsInts + nativePropsPtrs + nativePropsDuos - return CGWrapper(CGIndenter(CGList(nativeProps, ",\n")), - pre=pre, post=post) + return CGWrapper(CGIndenter(CGList(nativeProps, ",\n")), pre=pre, post=post) nativeProperties = [] if properties.hasNonChromeOnly(): nativeProperties.append( - generateNativeProperties("sNativeProperties", False)) + generateNativeProperties("sNativeProperties", False) + ) if properties.hasChromeOnly(): nativeProperties.append( - generateNativeProperties("sChromeOnlyNativeProperties", True)) + generateNativeProperties("sChromeOnlyNativeProperties", True) + ) CGList.__init__(self, nativeProperties, "\n") @@ -3058,20 +3447,25 @@ class CGCollectJSONAttributesMethod(CGAbstractMethod): """ Generate the CollectJSONAttributes method for an interface descriptor """ + def __init__(self, descriptor, toJSONMethod): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('%s*' % descriptor.nativeType, 'self'), - Argument('JS::Rooted&', 'result')] - CGAbstractMethod.__init__(self, descriptor, 'CollectJSONAttributes', - 'bool', args, canRunScript=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("%s*" % descriptor.nativeType, "self"), + Argument("JS::Rooted&", "result"), + ] + CGAbstractMethod.__init__( + self, descriptor, "CollectJSONAttributes", "bool", args, canRunScript=True + ) self.toJSONMethod = toJSONMethod def definition_body(self): - ret = '' + ret = "" interface = self.descriptor.interface - toJSONCondition = PropertyDefiner.getControllingCondition(self.toJSONMethod, - self.descriptor) + toJSONCondition = PropertyDefiner.getControllingCondition( + self.toJSONMethod, self.descriptor + ) needUnwrappedObj = False for m in interface.members: if m.isAttr() and not m.isStatic() and m.type.isJSONType(): @@ -3085,14 +3479,15 @@ def definition_body(self): return false; } """, - name=IDLToCIdentifier(m.identifier.name)) + name=IDLToCIdentifier(m.identifier.name), + ) # Make sure we don't include things which are supposed to be # disabled. Things that either don't have disablers or whose # disablers match the disablers for our toJSON method can't # possibly be disabled, but other things might be. condition = PropertyDefiner.getControllingCondition(m, self.descriptor) if condition.hasDisablers() and condition != toJSONCondition: - needUnwrappedObj = True; + needUnwrappedObj = True ret += fill( """ // This is unfortunately a linear scan through sAttributes, but we @@ -3103,7 +3498,8 @@ def definition_body(self): } """, name=IDLToCIdentifier(m.identifier.name), - getAndDefine=getAndDefine) + getAndDefine=getAndDefine, + ) else: ret += fill( """ @@ -3111,8 +3507,9 @@ def definition_body(self): $*{getAndDefine} } """, - getAndDefine=getAndDefine) - ret += 'return true;\n' + getAndDefine=getAndDefine, + ) + ret += "return true;\n" if needUnwrappedObj: # If we started allowing cross-origin objects here, we'd need to @@ -3120,7 +3517,7 @@ def definition_body(self): # But in practice no one is trying to add toJSON methods to those, # so let's just guard against it. assert not self.descriptor.isMaybeCrossOriginObject() - ret= fill( + ret = fill( """ JS::Rooted unwrappedObj(cx, js::CheckedUnwrapStatic(obj)); if (!unwrappedObj) { @@ -3131,7 +3528,8 @@ def definition_body(self): $*{ret} """, - ret=ret); + ret=ret, + ) return ret @@ -3142,18 +3540,27 @@ class CGCreateInterfaceObjectsMethod(CGAbstractMethod): properties should be a PropertyArrays instance. """ - def __init__(self, descriptor, properties, haveUnscopables, haveLegacyWindowAliases): - args = [Argument('JSContext*', 'aCx'), - Argument('JS::Handle', 'aGlobal'), - Argument('ProtoAndIfaceCache&', 'aProtoAndIfaceCache'), - Argument('bool', 'aDefineOnGlobal')] - CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', 'void', args) + + def __init__( + self, descriptor, properties, haveUnscopables, haveLegacyWindowAliases + ): + args = [ + Argument("JSContext*", "aCx"), + Argument("JS::Handle", "aGlobal"), + Argument("ProtoAndIfaceCache&", "aProtoAndIfaceCache"), + Argument("bool", "aDefineOnGlobal"), + ] + CGAbstractMethod.__init__( + self, descriptor, "CreateInterfaceObjects", "void", args + ) self.properties = properties self.haveUnscopables = haveUnscopables self.haveLegacyWindowAliases = haveLegacyWindowAliases def definition_body(self): - (protoGetter, protoHandleGetter) = InterfacePrototypeObjectProtoGetter(self.descriptor) + (protoGetter, protoHandleGetter) = InterfacePrototypeObjectProtoGetter( + self.descriptor + ) if protoHandleGetter is None: parentProtoType = "Rooted" getParentProto = "aCx, " + protoGetter @@ -3172,7 +3579,9 @@ def definition_body(self): getConstructorProto += "(aCx)" needInterfaceObject = self.descriptor.interface.hasInterfaceObject() - needInterfacePrototypeObject = self.descriptor.interface.hasInterfacePrototypeObject() + needInterfacePrototypeObject = ( + self.descriptor.interface.hasInterfacePrototypeObject() + ) # if we don't need to create anything, why are we generating this? assert needInterfaceObject or needInterfacePrototypeObject @@ -3185,7 +3594,8 @@ def definition_body(self): } """, type=parentProtoType, - getParentProto=getParentProto) + getParentProto=getParentProto, + ) getConstructorProto = fill( """ @@ -3195,7 +3605,8 @@ def definition_body(self): } """, type=constructorProtoType, - getConstructorProto=getConstructorProto) + getConstructorProto=getConstructorProto, + ) idsToInit = [] # There is no need to init any IDs in bindings that don't want Xrays. @@ -3205,15 +3616,25 @@ def definition_body(self): if self.properties.hasChromeOnly(): idsToInit.append("sChromeOnlyNativeProperties") if len(idsToInit) > 0: - initIdCalls = ["!InitIds(aCx, %s.Upcast())" % (properties) - for properties in idsToInit] - idsInitedFlag = CGGeneric("static Atomic sIdsInited(false);\n") + initIdCalls = [ + "!InitIds(aCx, %s.Upcast())" % (properties) for properties in idsToInit + ] + idsInitedFlag = CGGeneric( + "static Atomic sIdsInited(false);\n" + ) setFlag = CGGeneric("sIdsInited = true;\n") - initIdConditionals = [CGIfWrapper(CGGeneric("return;\n"), call) - for call in initIdCalls] - initIds = CGList([idsInitedFlag, - CGIfWrapper(CGList(initIdConditionals + [setFlag]), - "!sIdsInited && NS_IsMainThread()")]) + initIdConditionals = [ + CGIfWrapper(CGGeneric("return;\n"), call) for call in initIdCalls + ] + initIds = CGList( + [ + idsInitedFlag, + CGIfWrapper( + CGList(initIdConditionals + [setFlag]), + "!sIdsInited && NS_IsMainThread()", + ), + ] + ) else: initIds = None @@ -3228,7 +3649,10 @@ def definition_body(self): if needInterfacePrototypeObject: protoClass = "&sPrototypeClass.mBase" - protoCache = "&aProtoAndIfaceCache.EntrySlotOrCreate(prototypes::id::%s)" % self.descriptor.name + protoCache = ( + "&aProtoAndIfaceCache.EntrySlotOrCreate(prototypes::id::%s)" + % self.descriptor.name + ) parentProto = "parentProto" getParentProto = CGGeneric(getParentProto) else: @@ -3239,7 +3663,10 @@ def definition_body(self): if needInterfaceObject: interfaceClass = "&sInterfaceObjectClass.mBase" - interfaceCache = "&aProtoAndIfaceCache.EntrySlotOrCreate(constructors::id::%s)" % self.descriptor.name + interfaceCache = ( + "&aProtoAndIfaceCache.EntrySlotOrCreate(constructors::id::%s)" + % self.descriptor.name + ) getConstructorProto = CGGeneric(getConstructorProto) constructorProto = "constructorProto" else: @@ -3285,10 +3712,15 @@ def definition_body(self): interfaceCache=interfaceCache, properties=properties, chromeProperties=chromeProperties, - name='"' + self.descriptor.interface.identifier.name + '"' if needInterfaceObject else "nullptr", + name='"' + self.descriptor.interface.identifier.name + '"' + if needInterfaceObject + else "nullptr", unscopableNames="unscopableNames" if self.haveUnscopables else "nullptr", isGlobal=toStringBool(isGlobal), - legacyWindowAliases="legacyWindowAliases" if self.haveLegacyWindowAliases else "nullptr") + legacyWindowAliases="legacyWindowAliases" + if self.haveLegacyWindowAliases + else "nullptr", + ) # If we fail after here, we must clear interface and prototype caches # using this code: intermediate failure must not expose the interface in @@ -3301,22 +3733,31 @@ def definition_body(self): *interfaceCache = nullptr; } return; - """) + """ + ) - aliasedMembers = [m for m in self.descriptor.interface.members if m.isMethod() and m.aliases] + aliasedMembers = [ + m for m in self.descriptor.interface.members if m.isMethod() and m.aliases + ] if aliasedMembers: assert needInterfacePrototypeObject def defineAlias(alias): if alias == "@@iterator": symbolJSID = "SYMBOL_TO_JSID(JS::GetWellKnownSymbol(aCx, JS::SymbolCode::iterator))" - getSymbolJSID = CGGeneric(fill("JS::Rooted iteratorId(aCx, ${symbolJSID});", - symbolJSID=symbolJSID)) + getSymbolJSID = CGGeneric( + fill( + "JS::Rooted iteratorId(aCx, ${symbolJSID});", + symbolJSID=symbolJSID, + ) + ) defineFn = "JS_DefinePropertyById" prop = "iteratorId" - enumFlags = "0" # Not enumerable, per spec. + enumFlags = "0" # Not enumerable, per spec. elif alias.startswith("@@"): - raise TypeError("Can't handle any well-known Symbol other than @@iterator") + raise TypeError( + "Can't handle any well-known Symbol other than @@iterator" + ) else: getSymbolJSID = None defineFn = "JS_DefineProperty" @@ -3325,34 +3766,49 @@ def defineAlias(alias): # be aliased, we should consider making the aliases # match the enumerability of the property being aliased. enumFlags = "JSPROP_ENUMERATE" - return CGList([ - getSymbolJSID, - CGGeneric(fill( - """ + return CGList( + [ + getSymbolJSID, + CGGeneric( + fill( + """ if (!${defineFn}(aCx, proto, ${prop}, aliasedVal, ${enumFlags})) { $*{failureCode} } """, - defineFn=defineFn, - prop=prop, - enumFlags=enumFlags, - failureCode=failureCode)) - ], "\n") + defineFn=defineFn, + prop=prop, + enumFlags=enumFlags, + failureCode=failureCode, + ) + ), + ], + "\n", + ) def defineAliasesFor(m): - return CGList([ - CGGeneric(fill( - """ + return CGList( + [ + CGGeneric( + fill( + """ if (!JS_GetProperty(aCx, proto, \"${prop}\", &aliasedVal)) { $*{failureCode} } """, - failureCode=failureCode, - prop=m.identifier.name)) - ] + [defineAlias(alias) for alias in sorted(m.aliases)]) + failureCode=failureCode, + prop=m.identifier.name, + ) + ) + ] + + [defineAlias(alias) for alias in sorted(m.aliases)] + ) - defineAliases = CGList([ - CGGeneric(fill(""" + defineAliases = CGList( + [ + CGGeneric( + fill( + """ // Set up aliases on the interface prototype object we just created. JS::Handle proto = GetProtoObjectHandle(aCx); if (!proto) { @@ -3360,10 +3816,16 @@ def defineAliasesFor(m): } """, - failureCode=failureCode)), - CGGeneric("JS::Rooted aliasedVal(aCx);\n\n") - ] + [defineAliasesFor(m) - for m in sorted(aliasedMembers, key=lambda m: m.identifier.name)]) + failureCode=failureCode, + ) + ), + CGGeneric("JS::Rooted aliasedVal(aCx);\n\n"), + ] + + [ + defineAliasesFor(m) + for m in sorted(aliasedMembers, key=lambda m: m.identifier.name) + ] + ) else: defineAliases = None @@ -3384,8 +3846,9 @@ def defineAliasesFor(m): else: holderClass = "sClass.ToJSClass()" holderProto = "*protoCache" - createUnforgeableHolder = CGGeneric(fill( - """ + createUnforgeableHolder = CGGeneric( + fill( + """ JS::Rooted unforgeableHolder(aCx); { JS::Rooted holderProto(aCx, ${holderProto}); @@ -3395,32 +3858,42 @@ def defineAliasesFor(m): } } """, - holderProto=holderProto, - holderClass=holderClass, - failureCode=failureCode)) - defineUnforgeables = InitUnforgeablePropertiesOnHolder(self.descriptor, - self.properties, - failureCode) + holderProto=holderProto, + holderClass=holderClass, + failureCode=failureCode, + ) + ) + defineUnforgeables = InitUnforgeablePropertiesOnHolder( + self.descriptor, self.properties, failureCode + ) createUnforgeableHolder = CGList( - [createUnforgeableHolder, defineUnforgeables]) + [createUnforgeableHolder, defineUnforgeables] + ) - installUnforgeableHolder = CGGeneric(dedent( - """ + installUnforgeableHolder = CGGeneric( + dedent( + """ if (*protoCache) { JS::SetReservedSlot(*protoCache, DOM_INTERFACE_PROTO_SLOTS_BASE, JS::ObjectValue(*unforgeableHolder)); } - """)) + """ + ) + ) unforgeableHolderSetup = CGList( - [createUnforgeableHolder, installUnforgeableHolder], "\n") + [createUnforgeableHolder, installUnforgeableHolder], "\n" + ) else: unforgeableHolderSetup = None - if (self.descriptor.interface.isOnGlobalProtoChain() and - needInterfacePrototypeObject): - makeProtoPrototypeImmutable = CGGeneric(fill( - """ + if ( + self.descriptor.interface.isOnGlobalProtoChain() + and needInterfacePrototypeObject + ): + makeProtoPrototypeImmutable = CGGeneric( + fill( + """ if (*${protoCache}) { bool succeeded; JS::Handle prot = GetProtoObjectHandle(aCx); @@ -3434,28 +3907,41 @@ def defineAliasesFor(m): "never be unsuccessful"); } """, - protoCache=protoCache, - failureCode=failureCode)) + protoCache=protoCache, + failureCode=failureCode, + ) + ) else: makeProtoPrototypeImmutable = None return CGList( - [getParentProto, getConstructorProto, initIds, - CGGeneric(call), defineAliases, unforgeableHolderSetup, - makeProtoPrototypeImmutable], - "\n").define() + [ + getParentProto, + getConstructorProto, + initIds, + CGGeneric(call), + defineAliases, + unforgeableHolderSetup, + makeProtoPrototypeImmutable, + ], + "\n", + ).define() class CGGetProtoObjectHandleMethod(CGAbstractMethod): """ A method for getting the interface prototype object. """ + def __init__(self, descriptor): CGAbstractMethod.__init__( - self, descriptor, "GetProtoObjectHandle", - 'JS::Handle', - [Argument('JSContext*', 'aCx')], - inline=True) + self, + descriptor, + "GetProtoObjectHandle", + "JS::Handle", + [Argument("JSContext*", "aCx")], + inline=True, + ) def definition_body(self): return fill( @@ -3467,17 +3953,23 @@ def definition_body(self): /* aDefineOnGlobal = */ true); """, - name=self.descriptor.name) + name=self.descriptor.name, + ) class CGGetProtoObjectMethod(CGAbstractMethod): """ A method for getting the interface prototype object. """ + def __init__(self, descriptor): CGAbstractMethod.__init__( - self, descriptor, "GetProtoObject", "JSObject*", - [Argument('JSContext*', 'aCx')]) + self, + descriptor, + "GetProtoObject", + "JSObject*", + [Argument("JSContext*", "aCx")], + ) def definition_body(self): return "return GetProtoObjectHandle(aCx);\n" @@ -3487,13 +3979,19 @@ class CGGetConstructorObjectHandleMethod(CGAbstractMethod): """ A method for getting the interface constructor object. """ + def __init__(self, descriptor): CGAbstractMethod.__init__( - self, descriptor, "GetConstructorObjectHandle", - 'JS::Handle', - [Argument('JSContext*', 'aCx'), - Argument('bool', 'aDefineOnGlobal', 'true')], - inline=True) + self, + descriptor, + "GetConstructorObjectHandle", + "JS::Handle", + [ + Argument("JSContext*", "aCx"), + Argument("bool", "aDefineOnGlobal", "true"), + ], + inline=True, + ) def definition_body(self): return fill( @@ -3505,17 +4003,23 @@ def definition_body(self): &CreateInterfaceObjects, aDefineOnGlobal); """, - name=self.descriptor.name) + name=self.descriptor.name, + ) class CGGetConstructorObjectMethod(CGAbstractMethod): """ A method for getting the interface constructor object. """ + def __init__(self, descriptor): CGAbstractMethod.__init__( - self, descriptor, "GetConstructorObject", "JSObject*", - [Argument('JSContext*', 'aCx')]) + self, + descriptor, + "GetConstructorObject", + "JSObject*", + [Argument("JSContext*", "aCx")], + ) def definition_body(self): return "return GetConstructorObjectHandle(aCx);\n" @@ -3523,10 +4027,10 @@ def definition_body(self): class CGGetNamedPropertiesObjectMethod(CGAbstractStaticMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'aCx')] - CGAbstractStaticMethod.__init__(self, descriptor, - 'GetNamedPropertiesObject', - 'JSObject*', args) + args = [Argument("JSContext*", "aCx")] + CGAbstractStaticMethod.__init__( + self, descriptor, "GetNamedPropertiesObject", "JSObject*", args + ) def definition_body(self): parentProtoName = self.descriptor.parentPrototypeName @@ -3541,7 +4045,8 @@ def definition_body(self): return nullptr; } """, - parent=toBindingNamespace(parentProtoName)) + parent=toBindingNamespace(parentProtoName), + ) parentProto = "parentProto" return fill( """ @@ -3574,10 +4079,11 @@ def definition_body(self): getParentProto=getParentProto, ifaceName=self.descriptor.name, parentProto=parentProto, - nativeType=self.descriptor.nativeType) + nativeType=self.descriptor.nativeType, + ) -def getRawConditionList(idlobj, cxName, objName, ignoreSecureContext = False): +def getRawConditionList(idlobj, cxName, objName, ignoreSecureContext=False): """ Get the list of conditions for idlobj (to be used in "is this enabled" checks). This will be returned as a CGList with " &&\n" as the separator, @@ -3601,11 +4107,14 @@ def getRawConditionList(idlobj, cxName, objName, ignoreSecureContext = False): assert isinstance(func, list) and len(func) == 1 conditions.append("%s(%s, %s)" % (func[0], cxName, objName)) if not ignoreSecureContext and idlobj.getExtendedAttribute("SecureContext"): - conditions.append("mozilla::dom::IsSecureContextOrObjectIsFromSecureContext(%s, %s)" % (cxName, objName)) + conditions.append( + "mozilla::dom::IsSecureContextOrObjectIsFromSecureContext(%s, %s)" + % (cxName, objName) + ) return conditions -def getConditionList(idlobj, cxName, objName, ignoreSecureContext = False): +def getConditionList(idlobj, cxName, objName, ignoreSecureContext=False): """ Get the list of conditions from getRawConditionList See comment on getRawConditionList above for more info about arguments. @@ -3622,11 +4131,15 @@ class CGConstructorEnabled(CGAbstractMethod): This can perform various tests depending on what conditions are specified on the interface. """ + def __init__(self, descriptor): - CGAbstractMethod.__init__(self, descriptor, - 'ConstructorEnabled', 'bool', - [Argument("JSContext*", "aCx"), - Argument("JS::Handle", "aObj")]) + CGAbstractMethod.__init__( + self, + descriptor, + "ConstructorEnabled", + "bool", + [Argument("JSContext*", "aCx"), Argument("JS::Handle", "aObj")], + ) def definition_body(self): body = CGList([], "\n") @@ -3637,24 +4150,33 @@ def definition_body(self): exposedInWindowCheck = dedent( """ MOZ_ASSERT(!NS_IsMainThread(), "Why did we even get called?"); - """) + """ + ) body.append(CGGeneric(exposedInWindowCheck)) if iface.isExposedInSomeButNotAllWorkers(): workerGlobals = sorted(iface.getWorkerExposureSet()) - workerCondition = CGList((CGGeneric('strcmp(name, "%s")' % workerGlobal) - for workerGlobal in workerGlobals), " && ") + workerCondition = CGList( + ( + CGGeneric('strcmp(name, "%s")' % workerGlobal) + for workerGlobal in workerGlobals + ), + " && ", + ) exposedInWorkerCheck = fill( """ const char* name = JS::GetClass(aObj)->name; if (${workerCondition}) { return false; } - """, workerCondition=workerCondition.define()) + """, + workerCondition=workerCondition.define(), + ) exposedInWorkerCheck = CGGeneric(exposedInWorkerCheck) if iface.isExposedInWindow(): - exposedInWorkerCheck = CGIfWrapper(exposedInWorkerCheck, - "!NS_IsMainThread()") + exposedInWorkerCheck = CGIfWrapper( + exposedInWorkerCheck, "!NS_IsMainThread()" + ) body.append(exposedInWorkerCheck) conditions = getConditionList(iface, "aCx", "aObj") @@ -3664,10 +4186,9 @@ def definition_body(self): conditionsWrapper = "" if len(conditions): - conditionsWrapper = CGWrapper(conditions, - pre="return ", - post=";\n", - reindent=True) + conditionsWrapper = CGWrapper( + conditions, pre="return ", post=";\n", reindent=True + ) else: conditionsWrapper = CGGeneric("return true;\n") @@ -3687,12 +4208,14 @@ class CGSerializer(CGAbstractStaticMethod): The caller is expected to pass in the object whose DOMJSClass it used to get the serializer. """ + def __init__(self, descriptor): - args = [Argument("JSContext*", "aCx"), - Argument("JSStructuredCloneWriter*", "aWriter"), - Argument("JS::Handle", "aObj")] - CGAbstractStaticMethod.__init__(self, descriptor, "Serialize", - "bool", args) + args = [ + Argument("JSContext*", "aCx"), + Argument("JSStructuredCloneWriter*", "aWriter"), + Argument("JS::Handle", "aObj"), + ] + CGAbstractStaticMethod.__init__(self, descriptor, "Serialize", "bool", args) def definition_body(self): return fill( @@ -3704,7 +4227,8 @@ def definition_body(self): UnwrapDOMObject<${type}>(aObj)->WriteStructuredClone(aCx, aWriter); """, tag=StructuredCloneTag(self.descriptor.name), - type=self.descriptor.nativeType) + type=self.descriptor.nativeType, + ) class CGDeserializer(CGAbstractMethod): @@ -3712,12 +4236,14 @@ class CGDeserializer(CGAbstractMethod): Implementation of deserialization for things marked [Serializable]. This will need to be accessed from WebIDLSerializable, so can't be static. """ + def __init__(self, descriptor): - args = [Argument("JSContext*", "aCx"), - Argument("nsIGlobalObject*", "aGlobal"), - Argument("JSStructuredCloneReader*", "aReader")] - CGAbstractMethod.__init__(self, descriptor, "Deserialize", - "JSObject*", args) + args = [ + Argument("JSContext*", "aCx"), + Argument("nsIGlobalObject*", "aGlobal"), + Argument("JSStructuredCloneReader*", "aReader"), + ] + CGAbstractMethod.__init__(self, descriptor, "Deserialize", "JSObject*", args) def definition_body(self): # WrapObject has different signatures depending on whether @@ -3729,14 +4255,16 @@ def definition_body(self): if (!result) { return nullptr; } - """) + """ + ) else: wrapCall = dedent( """ if (!obj->WrapObject(aCx, nullptr, &result)) { return nullptr; } - """) + """ + ) return fill( """ @@ -3752,7 +4280,9 @@ def definition_body(self): return result; """, type=self.descriptor.nativeType, - wrapCall=wrapCall) + wrapCall=wrapCall, + ) + def CreateBindingJSObject(descriptor, properties): objDecl = "BindingJSObjectCreator<%s> creator(aCx);\n" % descriptor.nativeType @@ -3760,7 +4290,7 @@ def CreateBindingJSObject(descriptor, properties): # We don't always need to root obj, but there are a variety # of cases where we do, so for simplicity, just always root it. if descriptor.proxy: - if descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + if descriptor.interface.getExtendedAttribute("OverrideBuiltins"): assert not descriptor.isMaybeCrossOriginObject() create = dedent( """ @@ -3769,7 +4299,8 @@ def CreateBindingJSObject(descriptor, properties): creator.CreateProxyObject(aCx, &sClass.mBase, DOMProxyHandler::getInstance(), proto, /* aLazyProto = */ false, aObject, expandoValue, aReflector); - """) + """ + ) else: if descriptor.isMaybeCrossOriginObject(): proto = "nullptr" @@ -3784,22 +4315,30 @@ def CreateBindingJSObject(descriptor, properties): aObject, JS::UndefinedHandleValue, aReflector); """, proto=proto, - lazyProto=lazyProto) + lazyProto=lazyProto, + ) else: create = dedent( """ creator.CreateObject(aCx, sClass.ToJSClass(), proto, aObject, aReflector); - """) - return objDecl + create + dedent( - """ + """ + ) + return ( + objDecl + + create + + dedent( + """ if (!aReflector) { return false; } - """) + """ + ) + ) -def InitUnforgeablePropertiesOnHolder(descriptor, properties, failureCode, - holderName="unforgeableHolder"): +def InitUnforgeablePropertiesOnHolder( + descriptor, properties, failureCode, holderName="unforgeableHolder" +): """ Define the unforgeable properties on the unforgeable holder for the interface represented by descriptor. @@ -3807,10 +4346,12 @@ def InitUnforgeablePropertiesOnHolder(descriptor, properties, failureCode, properties is a PropertyArrays instance. """ - assert (properties.unforgeableAttrs.hasNonChromeOnly() or - properties.unforgeableAttrs.hasChromeOnly() or - properties.unforgeableMethods.hasNonChromeOnly() or - properties.unforgeableMethods.hasChromeOnly()) + assert ( + properties.unforgeableAttrs.hasNonChromeOnly() + or properties.unforgeableAttrs.hasChromeOnly() + or properties.unforgeableMethods.hasNonChromeOnly() + or properties.unforgeableMethods.hasChromeOnly() + ) unforgeables = [] @@ -3821,7 +4362,8 @@ def InitUnforgeablePropertiesOnHolder(descriptor, properties, failureCode, } """, failureCode=failureCode, - holderName=holderName) + holderName=holderName, + ) defineUnforgeableMethods = fill( """ if (!DefineUnforgeableMethods(aCx, ${holderName}, %s)) { @@ -3829,25 +4371,31 @@ def InitUnforgeablePropertiesOnHolder(descriptor, properties, failureCode, } """, failureCode=failureCode, - holderName=holderName) + holderName=holderName, + ) unforgeableMembers = [ (defineUnforgeableAttrs, properties.unforgeableAttrs), - (defineUnforgeableMethods, properties.unforgeableMethods) + (defineUnforgeableMethods, properties.unforgeableMethods), ] for (template, array) in unforgeableMembers: if array.hasNonChromeOnly(): unforgeables.append(CGGeneric(template % array.variableName(False))) if array.hasChromeOnly(): unforgeables.append( - CGIfWrapper(CGGeneric(template % array.variableName(True)), - "nsContentUtils::ThreadsafeIsSystemCaller(aCx)")) + CGIfWrapper( + CGGeneric(template % array.variableName(True)), + "nsContentUtils::ThreadsafeIsSystemCaller(aCx)", + ) + ) if descriptor.interface.getExtendedAttribute("Unforgeable"): # We do our undefined toPrimitive here, not as a regular property # because we don't have a concept of value props anywhere in IDL. - unforgeables.append(CGGeneric(fill( - """ + unforgeables.append( + CGGeneric( + fill( + """ JS::RootedId toPrimitive(aCx, SYMBOL_TO_JSID(JS::GetWellKnownSymbol(aCx, JS::SymbolCode::toPrimitive))); if (!JS_DefinePropertyById(aCx, ${holderName}, toPrimitive, @@ -3856,8 +4404,11 @@ def InitUnforgeablePropertiesOnHolder(descriptor, properties, failureCode, $*{failureCode} } """, - failureCode=failureCode, - holderName=holderName))) + failureCode=failureCode, + holderName=holderName, + ) + ) + ) return CGWrapper(CGList(unforgeables), pre="\n") @@ -3867,47 +4418,60 @@ def CopyUnforgeablePropertiesToInstance(descriptor, failureCode): Copy the unforgeable properties from the unforgeable holder for this interface to the instance object we have. """ - assert not descriptor.isGlobal(); + assert not descriptor.isGlobal() if not descriptor.hasUnforgeableMembers: return "" copyCode = [ - CGGeneric(dedent( - """ + CGGeneric( + dedent( + """ // Important: do unforgeable property setup after we have handed // over ownership of the C++ object to obj as needed, so that if // we fail and it ends up GCed it won't have problems in the // finalizer trying to drop its ownership of the C++ object. - """)) + """ + ) + ) ] # For proxies, we want to define on the expando object, not directly on the # reflector, so we can make sure we don't get confused by named getters. if descriptor.proxy: - copyCode.append(CGGeneric(fill( - """ + copyCode.append( + CGGeneric( + fill( + """ JS::Rooted expando(aCx, DOMProxyHandler::EnsureExpandoObject(aCx, aReflector)); if (!expando) { $*{failureCode} } """, - failureCode=failureCode))) + failureCode=failureCode, + ) + ) + ) obj = "expando" else: obj = "aReflector" - copyCode.append(CGGeneric(fill( - """ + copyCode.append( + CGGeneric( + fill( + """ JS::Rooted unforgeableHolder(aCx, &JS::GetReservedSlot(canonicalProto, DOM_INTERFACE_PROTO_SLOTS_BASE).toObject()); if (!JS_InitializePropertiesFromCompatibleNativeObject(aCx, ${obj}, unforgeableHolder)) { $*{failureCode} } """, - obj=obj, - failureCode=failureCode))) + obj=obj, + failureCode=failureCode, + ) + ) + ) return CGWrapper(CGList(copyCode), pre="\n").define() @@ -3920,8 +4484,9 @@ def AssertInheritanceChain(descriptor): asserts += ( "MOZ_ASSERT(static_cast<%s*>(aObject) == \n" " reinterpret_cast<%s*>(aObject),\n" - " \"Multiple inheritance for %s is broken.\");\n" % - (desc.nativeType, desc.nativeType, desc.nativeType)) + ' "Multiple inheritance for %s is broken.");\n' + % (desc.nativeType, desc.nativeType, desc.nativeType) + ) iface = iface.parent asserts += "MOZ_ASSERT(ToSupportsIsCorrect(aObject));\n" return asserts @@ -3944,7 +4509,8 @@ def InitMemberSlots(descriptor, failureCode): $*{failureCode} } """, - failureCode=failureCode) + failureCode=failureCode, + ) def DeclareProto(descriptor): @@ -3958,7 +4524,8 @@ def DeclareProto(descriptor): return false; } JS::Rooted proto(aCx); - """) + """ + ) if descriptor.isMaybeCrossOriginObject(): return preamble + dedent( """ @@ -3967,7 +4534,8 @@ def DeclareProto(descriptor): // Set proto to canonicalProto to avoid preserving our wrapper if // we don't have to. proto = canonicalProto; - """) + """ + ) return preamble + dedent( """ @@ -3984,7 +4552,8 @@ def DeclareProto(descriptor): } else { proto = canonicalProto; } - """) + """ + ) class CGWrapWithCacheMethod(CGAbstractMethod): @@ -3993,14 +4562,17 @@ class CGWrapWithCacheMethod(CGAbstractMethod): properties should be a PropertyArrays instance. """ + def __init__(self, descriptor, properties): assert descriptor.interface.hasInterfacePrototypeObject() - args = [Argument('JSContext*', 'aCx'), - Argument(descriptor.nativeType + '*', 'aObject'), - Argument('nsWrapperCache*', 'aCache'), - Argument('JS::Handle', 'aGivenProto'), - Argument('JS::MutableHandle', 'aReflector')] - CGAbstractMethod.__init__(self, descriptor, 'Wrap', 'bool', args) + args = [ + Argument("JSContext*", "aCx"), + Argument(descriptor.nativeType + "*", "aObject"), + Argument("nsWrapperCache*", "aCache"), + Argument("JS::Handle", "aGivenProto"), + Argument("JS::MutableHandle", "aReflector"), + ] + CGAbstractMethod.__init__(self, descriptor, "Wrap", "bool", args) self.properties = properties def definition_body(self): @@ -4009,7 +4581,8 @@ def definition_body(self): aCache->ReleaseWrapper(aObject); aCache->ClearWrapper(); return false; - """) + """ + ) if self.descriptor.proxy: finalize = "DOMProxyHandler::getInstance()->finalize" @@ -4085,27 +4658,40 @@ def definition_body(self): assertInheritance=AssertInheritanceChain(self.descriptor), declareProto=DeclareProto(self.descriptor), createObject=CreateBindingJSObject(self.descriptor, self.properties), - unforgeable=CopyUnforgeablePropertiesToInstance(self.descriptor, - failureCode), + unforgeable=CopyUnforgeablePropertiesToInstance( + self.descriptor, failureCode + ), slots=InitMemberSlots(self.descriptor, failureCode), - finalize=finalize) + finalize=finalize, + ) class CGWrapMethod(CGAbstractMethod): def __init__(self, descriptor): # XXX can we wrap if we don't have an interface prototype object? assert descriptor.interface.hasInterfacePrototypeObject() - args = [Argument('JSContext*', 'aCx'), - Argument('T*', 'aObject'), - Argument('JS::Handle', 'aGivenProto')] - CGAbstractMethod.__init__(self, descriptor, 'Wrap', 'JSObject*', args, - inline=True, templateArgs=["class T"]) + args = [ + Argument("JSContext*", "aCx"), + Argument("T*", "aObject"), + Argument("JS::Handle", "aGivenProto"), + ] + CGAbstractMethod.__init__( + self, + descriptor, + "Wrap", + "JSObject*", + args, + inline=True, + templateArgs=["class T"], + ) def definition_body(self): - return dedent(""" + return dedent( + """ JS::Rooted reflector(aCx); return Wrap(aCx, aObject, aObject, aGivenProto, &reflector) ? reflector.get() : nullptr; - """) + """ + ) class CGWrapNonWrapperCacheMethod(CGAbstractMethod): @@ -4115,14 +4701,17 @@ class CGWrapNonWrapperCacheMethod(CGAbstractMethod): properties should be a PropertyArrays instance. """ + def __init__(self, descriptor, properties): # XXX can we wrap if we don't have an interface prototype object? assert descriptor.interface.hasInterfacePrototypeObject() - args = [Argument('JSContext*', 'aCx'), - Argument(descriptor.nativeType + '*', 'aObject'), - Argument('JS::Handle', 'aGivenProto'), - Argument('JS::MutableHandle', 'aReflector')] - CGAbstractMethod.__init__(self, descriptor, 'Wrap', 'bool', args) + args = [ + Argument("JSContext*", "aCx"), + Argument(descriptor.nativeType + "*", "aObject"), + Argument("JS::Handle", "aGivenProto"), + Argument("JS::MutableHandle", "aReflector"), + ] + CGAbstractMethod.__init__(self, descriptor, "Wrap", "bool", args) self.properties = properties def definition_body(self): @@ -4148,9 +4737,11 @@ def definition_body(self): assertions=AssertInheritanceChain(self.descriptor), declareProto=DeclareProto(self.descriptor), createObject=CreateBindingJSObject(self.descriptor, self.properties), - unforgeable=CopyUnforgeablePropertiesToInstance(self.descriptor, - failureCode), - slots=InitMemberSlots(self.descriptor, failureCode)) + unforgeable=CopyUnforgeablePropertiesToInstance( + self.descriptor, failureCode + ), + slots=InitMemberSlots(self.descriptor, failureCode), + ) class CGWrapGlobalMethod(CGAbstractMethod): @@ -4160,16 +4751,19 @@ class CGWrapGlobalMethod(CGAbstractMethod): properties should be a PropertyArrays instance. """ + def __init__(self, descriptor, properties): assert descriptor.interface.hasInterfacePrototypeObject() - args = [Argument('JSContext*', 'aCx'), - Argument(descriptor.nativeType + '*', 'aObject'), - Argument('nsWrapperCache*', 'aCache'), - Argument('JS::RealmOptions&', 'aOptions'), - Argument('JSPrincipals*', 'aPrincipal'), - Argument('bool', 'aInitStandardClasses'), - Argument('JS::MutableHandle', 'aReflector')] - CGAbstractMethod.__init__(self, descriptor, 'Wrap', 'bool', args) + args = [ + Argument("JSContext*", "aCx"), + Argument(descriptor.nativeType + "*", "aObject"), + Argument("nsWrapperCache*", "aCache"), + Argument("JS::RealmOptions&", "aOptions"), + Argument("JSPrincipals*", "aPrincipal"), + Argument("bool", "aInitStandardClasses"), + Argument("JS::MutableHandle", "aReflector"), + ] + CGAbstractMethod.__init__(self, descriptor, "Wrap", "bool", args) self.descriptor = descriptor self.properties = properties @@ -4188,12 +4782,13 @@ def definition_body(self): aCache->ReleaseWrapper(aObject); aCache->ClearWrapper(); return false; - """); + """ + ) if self.descriptor.hasUnforgeableMembers: unforgeable = InitUnforgeablePropertiesOnHolder( - self.descriptor, self.properties, failureCode, - "aReflector").define(); + self.descriptor, self.properties, failureCode, "aReflector" + ).define() else: unforgeable = "" @@ -4233,27 +4828,32 @@ def definition_body(self): chromeProperties=chromeProperties, failureCode=failureCode, unforgeable=unforgeable, - slots=InitMemberSlots(self.descriptor, failureCode)) + slots=InitMemberSlots(self.descriptor, failureCode), + ) class CGUpdateMemberSlotsMethod(CGAbstractStaticMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'aCx'), - Argument('JS::Handle', 'aWrapper'), - Argument(descriptor.nativeType + '*', 'aObject')] - CGAbstractStaticMethod.__init__(self, descriptor, 'UpdateMemberSlots', 'bool', args) + args = [ + Argument("JSContext*", "aCx"), + Argument("JS::Handle", "aWrapper"), + Argument(descriptor.nativeType + "*", "aObject"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, "UpdateMemberSlots", "bool", args + ) def definition_body(self): - body = ("JS::Rooted temp(aCx);\n" - "JSJitGetterCallArgs args(&temp);\n") + body = "JS::Rooted temp(aCx);\n" "JSJitGetterCallArgs args(&temp);\n" for m in self.descriptor.interface.members: if m.isAttr() and m.getExtendedAttribute("StoreInSlot"): # Skip doing this for the "window" and "self" attributes on the # Window interface, because those can't be gotten safely until # we have hooked it up correctly to the outer window. The # window code handles doing the get itself. - if (self.descriptor.interface.identifier.name == "Window" and - (m.identifier.name == "window" or m.identifier.name == "self")): + if self.descriptor.interface.identifier.name == "Window" and ( + m.identifier.name == "window" or m.identifier.name == "self" + ): continue body += fill( """ @@ -4267,7 +4867,8 @@ def definition_body(self): """, slot=memberReservedSlot(m, self.descriptor), interface=self.descriptor.interface.identifier.name, - member=m.identifier.name) + member=m.identifier.name, + ) body += "\nreturn true;\n" return body @@ -4278,12 +4879,12 @@ def __init__(self, descriptor, member): self.member = member # If we're StoreInSlot, we'll need to call the getter if member.getExtendedAttribute("StoreInSlot"): - args = [Argument('JSContext*', 'aCx')] - returnType = 'bool' + args = [Argument("JSContext*", "aCx")] + returnType = "bool" else: args = [] - returnType = 'void' - args.append(Argument(descriptor.nativeType + '*', 'aObject')) + returnType = "void" + args.append(Argument(descriptor.nativeType + "*", "aObject")) name = MakeClearCachedValueNativeName(member) CGAbstractMethod.__init__(self, descriptor, name, returnType, args) @@ -4295,8 +4896,9 @@ def definition_body(self): declObj = "JS::Rooted obj(aCx);\n" noopRetval = " true" saveMember = ( - "JS::Rooted oldValue(aCx, JS::GetReservedSlot(obj, %s));\n" % - slotIndex) + "JS::Rooted oldValue(aCx, JS::GetReservedSlot(obj, %s));\n" + % slotIndex + ) regetMember = fill( """ JS::Rooted temp(aCx); @@ -4309,7 +4911,8 @@ def definition_body(self): return true; """, name=self.member.identifier.name, - slotIndex=slotIndex) + slotIndex=slotIndex, + ) else: declObj = "JSObject* obj;\n" noopRetval = "" @@ -4321,9 +4924,11 @@ def definition_body(self): """ xpc::ClearXrayExpandoSlots(obj, ${xraySlotIndex}); """, - xraySlotIndex=memberXrayExpandoReservedSlot(self.member, - self.descriptor)) - else : + xraySlotIndex=memberXrayExpandoReservedSlot( + self.member, self.descriptor + ), + ) + else: clearXrayExpandoSlots = "" return fill( @@ -4343,7 +4948,8 @@ def definition_body(self): saveMember=saveMember, slotIndex=slotIndex, clearXrayExpandoSlots=clearXrayExpandoSlots, - regetMember=regetMember) + regetMember=regetMember, + ) class CGCrossOriginProperties(CGThing): @@ -4353,66 +4959,115 @@ def __init__(self, descriptor): methods = [] chromeOnlyMethods = [] for m in descriptor.interface.members: - if m.isAttr() and (m.getExtendedAttribute("CrossOriginReadable") or - m.getExtendedAttribute("CrossOriginWritable")): + if m.isAttr() and ( + m.getExtendedAttribute("CrossOriginReadable") + or m.getExtendedAttribute("CrossOriginWritable") + ): if m.isStatic(): - raise TypeError("Don't know how to deal with static method %s" % - m.identifier.name) - if PropertyDefiner.getControllingCondition(m, descriptor).hasDisablers(): - raise TypeError("Don't know how to deal with disabler for %s" % - m.identifier.name) + raise TypeError( + "Don't know how to deal with static method %s" + % m.identifier.name + ) + if PropertyDefiner.getControllingCondition( + m, descriptor + ).hasDisablers(): + raise TypeError( + "Don't know how to deal with disabler for %s" + % m.identifier.name + ) if len(m.bindingAliases) > 0: - raise TypeError("Don't know how to deal with aliases for %s" % - m.identifier.name) + raise TypeError( + "Don't know how to deal with aliases for %s" % m.identifier.name + ) if m.getExtendedAttribute("ChromeOnly") is not None: chromeOnlyAttrs.extend(AttrDefiner.attrData(m, overrideFlags="0")) else: attrs.extend(AttrDefiner.attrData(m, overrideFlags="0")) elif m.isMethod() and m.getExtendedAttribute("CrossOriginCallable"): if m.isStatic(): - raise TypeError("Don't know how to deal with static method %s" % - m.identifier.name) - if PropertyDefiner.getControllingCondition(m, descriptor).hasDisablers(): - raise TypeError("Don't know how to deal with disabler for %s" % - m.identifier.name) + raise TypeError( + "Don't know how to deal with static method %s" + % m.identifier.name + ) + if PropertyDefiner.getControllingCondition( + m, descriptor + ).hasDisablers(): + raise TypeError( + "Don't know how to deal with disabler for %s" + % m.identifier.name + ) if len(m.aliases) > 0: - raise TypeError("Don't know how to deal with aliases for %s" % - m.identifier.name) + raise TypeError( + "Don't know how to deal with aliases for %s" % m.identifier.name + ) if m.getExtendedAttribute("ChromeOnly") is not None: - chromeOnlyMethods.append(MethodDefiner.methodData(m, descriptor, overrideFlags="JSPROP_READONLY")) + chromeOnlyMethods.append( + MethodDefiner.methodData( + m, descriptor, overrideFlags="JSPROP_READONLY" + ) + ) else: - methods.append(MethodDefiner.methodData(m, descriptor, overrideFlags="JSPROP_READONLY")) + methods.append( + MethodDefiner.methodData( + m, descriptor, overrideFlags="JSPROP_READONLY" + ) + ) if len(attrs) > 0: self.attributeSpecs, _ = PropertyDefiner.generatePrefableArrayValues( - attrs, descriptor, AttrDefiner.formatSpec, ' JS_PS_END\n', - AttrDefiner.condition, functools.partial(AttrDefiner.specData, crossOriginOnly=True)) + attrs, + descriptor, + AttrDefiner.formatSpec, + " JS_PS_END\n", + AttrDefiner.condition, + functools.partial(AttrDefiner.specData, crossOriginOnly=True), + ) else: - self.attributeSpecs = [' JS_PS_END\n'] + self.attributeSpecs = [" JS_PS_END\n"] if len(methods) > 0: self.methodSpecs, _ = PropertyDefiner.generatePrefableArrayValues( - methods, descriptor, MethodDefiner.formatSpec, ' JS_FS_END\n', - MethodDefiner.condition, MethodDefiner.specData) + methods, + descriptor, + MethodDefiner.formatSpec, + " JS_FS_END\n", + MethodDefiner.condition, + MethodDefiner.specData, + ) else: - self.methodSpecs = [' JS_FS_END\n'] + self.methodSpecs = [" JS_FS_END\n"] if len(chromeOnlyAttrs) > 0: - self.chromeOnlyAttributeSpecs, _ = PropertyDefiner.generatePrefableArrayValues( - chromeOnlyAttrs, descriptor, AttrDefiner.formatSpec, ' JS_PS_END\n', - AttrDefiner.condition, functools.partial(AttrDefiner.specData, crossOriginOnly=True)) + ( + self.chromeOnlyAttributeSpecs, + _, + ) = PropertyDefiner.generatePrefableArrayValues( + chromeOnlyAttrs, + descriptor, + AttrDefiner.formatSpec, + " JS_PS_END\n", + AttrDefiner.condition, + functools.partial(AttrDefiner.specData, crossOriginOnly=True), + ) else: self.chromeOnlyAttributeSpecs = [] if len(chromeOnlyMethods) > 0: self.chromeOnlyMethodSpecs, _ = PropertyDefiner.generatePrefableArrayValues( - chromeOnlyMethods, descriptor, MethodDefiner.formatSpec, ' JS_FS_END\n', - MethodDefiner.condition, MethodDefiner.specData) + chromeOnlyMethods, + descriptor, + MethodDefiner.formatSpec, + " JS_FS_END\n", + MethodDefiner.condition, + MethodDefiner.specData, + ) else: self.chromeOnlyMethodSpecs = [] def declare(self): - return dedent(""" + return dedent( + """ extern const CrossOriginProperties sCrossOriginProperties; - """) + """ + ) def define(self): def defineChromeOnly(name, specs, specType): @@ -4427,11 +5082,16 @@ def defineChromeOnly(name, specs, specType): """, specType=specType, name=name, - specs=",\n".join(specs)) + specs=",\n".join(specs), + ) return (define, name) - chromeOnlyAttributes = defineChromeOnly("Attributes", self.chromeOnlyAttributeSpecs, "JSPropertySpec") - chromeOnlyMethods = defineChromeOnly("Methods", self.chromeOnlyMethodSpecs, "JSFunctionSpec") + chromeOnlyAttributes = defineChromeOnly( + "Attributes", self.chromeOnlyAttributeSpecs, "JSPropertySpec" + ) + chromeOnlyMethods = defineChromeOnly( + "Methods", self.chromeOnlyMethodSpecs, "JSFunctionSpec" + ) return fill( """ // We deliberately use brace-elision to make Visual Studio produce better initalization code. @@ -4455,36 +5115,47 @@ def defineChromeOnly(name, specs, specType): chromeOnlyAttributeSpecs=chromeOnlyAttributes[0], chromeOnlyMethodSpecs=chromeOnlyMethods[0], chromeOnlyAttributes=chromeOnlyAttributes[1], - chromeOnlyMethods=chromeOnlyMethods[1]) + chromeOnlyMethods=chromeOnlyMethods[1], + ) class CGCycleCollectionTraverseForOwningUnionMethod(CGAbstractMethod): """ ImplCycleCollectionUnlink for owning union type. """ + def __init__(self, type): self.type = type - args = [Argument("nsCycleCollectionTraversalCallback&", "aCallback"), - Argument("%s&" % CGUnionStruct.unionTypeName(type, True), "aUnion"), - Argument("const char*", "aName"), - Argument("uint32_t", "aFlags", "0")] - CGAbstractMethod.__init__(self, None, "ImplCycleCollectionTraverse", "void", args) + args = [ + Argument("nsCycleCollectionTraversalCallback&", "aCallback"), + Argument("%s&" % CGUnionStruct.unionTypeName(type, True), "aUnion"), + Argument("const char*", "aName"), + Argument("uint32_t", "aFlags", "0"), + ] + CGAbstractMethod.__init__( + self, None, "ImplCycleCollectionTraverse", "void", args + ) def deps(self): return self.type.getDeps() def definition_body(self): - memberNames = [getUnionMemberName(t) - for t in self.type.flatMemberTypes - if idlTypeNeedsCycleCollection(t)] + memberNames = [ + getUnionMemberName(t) + for t in self.type.flatMemberTypes + if idlTypeNeedsCycleCollection(t) + ] assert memberNames - conditionTemplate = 'aUnion.Is%s()' - functionCallTemplate = 'ImplCycleCollectionTraverse(aCallback, aUnion.GetAs%s(), "m%s", aFlags);\n' + conditionTemplate = "aUnion.Is%s()" + functionCallTemplate = ( + 'ImplCycleCollectionTraverse(aCallback, aUnion.GetAs%s(), "m%s", aFlags);\n' + ) - ifStaments = (CGIfWrapper(CGGeneric(functionCallTemplate % (m, m)), - conditionTemplate % m) - for m in memberNames) + ifStaments = ( + CGIfWrapper(CGGeneric(functionCallTemplate % (m, m)), conditionTemplate % m) + for m in memberNames + ) return CGElseChain(ifStaments).define() @@ -4493,6 +5164,7 @@ class CGCycleCollectionUnlinkForOwningUnionMethod(CGAbstractMethod): """ ImplCycleCollectionUnlink for owning union type. """ + def __init__(self, type): self.type = type args = [Argument("%s&" % CGUnionStruct.unionTypeName(type, True), "aUnion")] @@ -4506,40 +5178,39 @@ def definition_body(self): builtinNames = { - IDLType.Tags.bool: 'bool', - IDLType.Tags.int8: 'int8_t', - IDLType.Tags.int16: 'int16_t', - IDLType.Tags.int32: 'int32_t', - IDLType.Tags.int64: 'int64_t', - IDLType.Tags.uint8: 'uint8_t', - IDLType.Tags.uint16: 'uint16_t', - IDLType.Tags.uint32: 'uint32_t', - IDLType.Tags.uint64: 'uint64_t', - IDLType.Tags.unrestricted_float: 'float', - IDLType.Tags.float: 'float', - IDLType.Tags.unrestricted_double: 'double', - IDLType.Tags.double: 'double' + IDLType.Tags.bool: "bool", + IDLType.Tags.int8: "int8_t", + IDLType.Tags.int16: "int16_t", + IDLType.Tags.int32: "int32_t", + IDLType.Tags.int64: "int64_t", + IDLType.Tags.uint8: "uint8_t", + IDLType.Tags.uint16: "uint16_t", + IDLType.Tags.uint32: "uint32_t", + IDLType.Tags.uint64: "uint64_t", + IDLType.Tags.unrestricted_float: "float", + IDLType.Tags.float: "float", + IDLType.Tags.unrestricted_double: "double", + IDLType.Tags.double: "double", } numericSuffixes = { - IDLType.Tags.int8: '', - IDLType.Tags.uint8: '', - IDLType.Tags.int16: '', - IDLType.Tags.uint16: '', - IDLType.Tags.int32: '', - IDLType.Tags.uint32: 'U', - IDLType.Tags.int64: 'LL', - IDLType.Tags.uint64: 'ULL', - IDLType.Tags.unrestricted_float: 'F', - IDLType.Tags.float: 'F', - IDLType.Tags.unrestricted_double: '', - IDLType.Tags.double: '' + IDLType.Tags.int8: "", + IDLType.Tags.uint8: "", + IDLType.Tags.int16: "", + IDLType.Tags.uint16: "", + IDLType.Tags.int32: "", + IDLType.Tags.uint32: "U", + IDLType.Tags.int64: "LL", + IDLType.Tags.uint64: "ULL", + IDLType.Tags.unrestricted_float: "F", + IDLType.Tags.float: "F", + IDLType.Tags.unrestricted_double: "", + IDLType.Tags.double: "", } def numericValue(t, v): - if (t == IDLType.Tags.unrestricted_double or - t == IDLType.Tags.unrestricted_float): + if t == IDLType.Tags.unrestricted_double or t == IDLType.Tags.unrestricted_float: typeName = builtinNames[t] if v == float("inf"): return "mozilla::PositiveInfinity<%s>()" % typeName @@ -4550,7 +5221,7 @@ def numericValue(t, v): return "%s%s" % (v, numericSuffixes[t]) -class CastableObjectUnwrapper(): +class CastableObjectUnwrapper: """ A class for unwrapping an object stored in a JS Value (or MutableHandle or Handle) named by the "source" and @@ -4565,8 +5236,17 @@ class CastableObjectUnwrapper(): JS-implemented, fall back to just creating the right object if what we have isn't one already. """ - def __init__(self, descriptor, source, mutableSource, target, codeOnFailure, - exceptionCode=None, isCallbackReturnValue=False): + + def __init__( + self, + descriptor, + source, + mutableSource, + target, + codeOnFailure, + exceptionCode=None, + isCallbackReturnValue=False, + ): self.substitution = { "type": descriptor.nativeType, "protoID": "prototypes::id::" + descriptor.name, @@ -4576,8 +5256,7 @@ def __init__(self, descriptor, source, mutableSource, target, codeOnFailure, "mutableSource": mutableSource, } - if (isCallbackReturnValue == "JSImpl" and - descriptor.interface.isJSImplemented()): + if isCallbackReturnValue == "JSImpl" and descriptor.interface.isJSImplemented(): exceptionCode = exceptionCode or codeOnFailure self.substitution["codeOnFailure"] = fill( """ @@ -4602,14 +5281,15 @@ def __init__(self, descriptor, source, mutableSource, target, codeOnFailure, } """, exceptionCode=exceptionCode, - **self.substitution) + **self.substitution + ) else: self.substitution["codeOnFailure"] = codeOnFailure def __str__(self): substitution = self.substitution.copy() substitution["codeOnFailure"] %= { - 'securityError': 'rv == NS_ERROR_XPC_SECURITY_MANAGER_VETO' + "securityError": "rv == NS_ERROR_XPC_SECURITY_MANAGER_VETO" } return fill( """ @@ -4621,26 +5301,42 @@ def __str__(self): } } """, - **substitution) + **substitution + ) class FailureFatalCastableObjectUnwrapper(CastableObjectUnwrapper): """ As CastableObjectUnwrapper, but defaulting to throwing if unwrapping fails """ - def __init__(self, descriptor, source, mutableSource, target, exceptionCode, - isCallbackReturnValue, sourceDescription): + + def __init__( + self, + descriptor, + source, + mutableSource, + target, + exceptionCode, + isCallbackReturnValue, + sourceDescription, + ): CastableObjectUnwrapper.__init__( - self, descriptor, source, mutableSource, target, + self, + descriptor, + source, + mutableSource, + target, 'cx.ThrowErrorMessage("%s", "%s");\n' - '%s' % (sourceDescription, descriptor.interface.identifier.name, - exceptionCode), + "%s" + % (sourceDescription, descriptor.interface.identifier.name, exceptionCode), exceptionCode, - isCallbackReturnValue) + isCallbackReturnValue, + ) -def getCallbackConversionInfo(type, idlObject, isMember, isCallbackReturnValue, - isOptional): +def getCallbackConversionInfo( + type, idlObject, isMember, isCallbackReturnValue, isOptional +): """ Returns a tuple containing the declType, declArgs, and basic conversion for the given callback type, with the given callback @@ -4650,8 +5346,7 @@ def getCallbackConversionInfo(type, idlObject, isMember, isCallbackReturnValue, # We can't use fast callbacks if isOptional because then we get an # Optional thing, which is not transparent to consumers. - useFastCallback = (not isMember and not isCallbackReturnValue and - not isOptional) + useFastCallback = not isMember and not isCallbackReturnValue and not isOptional if useFastCallback: name = "binding_detail::Fast%s" % name rootArgs = "" @@ -4661,7 +5356,8 @@ def getCallbackConversionInfo(type, idlObject, isMember, isCallbackReturnValue, """ JS::Rooted tempRoot(cx, &${val}.toObject()); JS::Rooted tempGlobalRoot(cx, JS::CurrentGlobalOrNull(cx)); - """) + """ + ) args = "cx, tempRoot, tempGlobalRoot, GetIncumbentGlobal()" if type.nullable() or isCallbackReturnValue: @@ -4684,17 +5380,25 @@ def getCallbackConversionInfo(type, idlObject, isMember, isCallbackReturnValue, """, rootArgs=rootArgs, name=name, - args=args) + args=args, + ) return (declType, declArgs, conversion) -class JSToNativeConversionInfo(): +class JSToNativeConversionInfo: """ An object representing information about a JS-to-native conversion. """ - def __init__(self, template, declType=None, holderType=None, - dealWithOptional=False, declArgs=None, - holderArgs=None): + + def __init__( + self, + template, + declType=None, + holderType=None, + dealWithOptional=False, + declArgs=None, + holderArgs=None, + ): """ template: A string representing the conversion code. This will have template substitution performed on it as follows: @@ -4776,14 +5480,16 @@ def handleDefaultStringValue(defaultValue, method): pair that sets this string default value. This string is suitable for passing as the second argument of handleDefault. """ - assert (defaultValue.type.isDOMString() or - defaultValue.type.isUSVString() or - defaultValue.type.isUTF8String() or - defaultValue.type.isByteString()) + assert ( + defaultValue.type.isDOMString() + or defaultValue.type.isUSVString() + or defaultValue.type.isUTF8String() + or defaultValue.type.isByteString() + ) # There shouldn't be any non-ASCII or embedded nulls in here; if # it ever sneaks in we will need to think about how to properly # represent that in the C++. - assert(all(ord(c) < 128 and ord(c) > 0 for c in defaultValue.value)) + assert all(ord(c) < 128 and ord(c) > 0 for c in defaultValue.value) if defaultValue.type.isByteString() or defaultValue.type.isUTF8String(): prefix = "" else: @@ -4794,7 +5500,8 @@ def handleDefaultStringValue(defaultValue, method): """, method=method, prefix=prefix, - value=defaultValue.value) + value=defaultValue.value, + ) def recordKeyType(recordType): @@ -4828,20 +5535,24 @@ def initializerForType(type): # CGNativeMember.getRetvalInfo accordingly. The latter cares about the decltype # and holdertype we end up using, because it needs to be able to return the code # that will convert those to the actual return value of the callback function. -def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None, - isDefinitelyObject=False, - isMember=False, - isOptional=False, - invalidEnumValueFatal=True, - defaultValue=None, - isNullOrUndefined=False, - isKnownMissing=False, - exceptionCode=None, - lenientFloatCode=None, - allowTreatNonCallableAsNull=False, - isCallbackReturnValue=False, - sourceDescription="value", - nestingLevel=""): +def getJSToNativeConversionInfo( + type, + descriptorProvider, + failureCode=None, + isDefinitelyObject=False, + isMember=False, + isOptional=False, + invalidEnumValueFatal=True, + defaultValue=None, + isNullOrUndefined=False, + isKnownMissing=False, + exceptionCode=None, + lenientFloatCode=None, + allowTreatNonCallableAsNull=False, + isCallbackReturnValue=False, + sourceDescription="value", + nestingLevel="", +): """ Get a template for converting a JS value to a native object based on the given type and descriptor. If failureCode is given, then we're actually @@ -4941,15 +5652,21 @@ def firstCap(string): # wrong type of value def onFailureNotAnObject(failureCode): return CGGeneric( - failureCode or - ('cx.ThrowErrorMessage("%s");\n' - '%s' % (firstCap(sourceDescription), exceptionCode))) + failureCode + or ( + 'cx.ThrowErrorMessage("%s");\n' + "%s" % (firstCap(sourceDescription), exceptionCode) + ) + ) def onFailureBadType(failureCode, typeName): return CGGeneric( - failureCode or - ('cx.ThrowErrorMessage("%s", "%s");\n' - '%s' % (firstCap(sourceDescription), typeName, exceptionCode))) + failureCode + or ( + 'cx.ThrowErrorMessage("%s", "%s");\n' + "%s" % (firstCap(sourceDescription), typeName, exceptionCode) + ) + ) # It's a failure in the committed-to conversion, not a failure to match up # to a type, so we don't want to use failureCode in here. We want to just @@ -4957,13 +5674,17 @@ def onFailureBadType(failureCode, typeName): def onFailureIsShared(): return CGGeneric( 'cx.ThrowErrorMessage("%s");\n' - '%s' % (firstCap(sourceDescription), exceptionCode)) + "%s" % (firstCap(sourceDescription), exceptionCode) + ) def onFailureNotCallable(failureCode): return CGGeneric( - failureCode or - ('cx.ThrowErrorMessage("%s");\n' - '%s' % (firstCap(sourceDescription), exceptionCode))) + failureCode + or ( + 'cx.ThrowErrorMessage("%s");\n' + "%s" % (firstCap(sourceDescription), exceptionCode) + ) + ) # A helper function for handling default values. Takes a template # body and the C++ code to set the default value and wraps the @@ -4979,7 +5700,8 @@ def handleDefault(template, setDefault): $*{setDefault} } """, - setDefault=setDefault) + setDefault=setDefault, + ) return fill( """ if ($${haveValue}) { @@ -4989,7 +5711,8 @@ def handleDefault(template, setDefault): } """, templateBody=template, - setDefault=setDefault) + setDefault=setDefault, + ) # A helper function for wrapping up the template body for # possibly-nullable objecty stuff @@ -5025,7 +5748,8 @@ def wrapObjectTemplate(templateBody, type, codeToSetNull, failureCode=None): templateBody=templateBody, elifLine=elifLine, elifBody=elifBody, - failureBody=onFailureNotAnObject(failureCode).define()) + failureBody=onFailureNotAnObject(failureCode).define(), + ) if isinstance(defaultValue, IDLNullValue): assert type.nullable() # Parser should enforce this @@ -5039,7 +5763,9 @@ def wrapObjectTemplate(templateBody, type, codeToSetNull, failureCode=None): return templateBody # A helper function for converting things that look like a JSObject*. - def handleJSObjectType(type, isMember, failureCode, exceptionCode, sourceDescription): + def handleJSObjectType( + type, isMember, failureCode, exceptionCode, sourceDescription + ): if not isMember: if isOptional: # We have a specialization of Optional that will use a @@ -5049,8 +5775,13 @@ def handleJSObjectType(type, isMember, failureCode, exceptionCode, sourceDescrip declType = CGGeneric("JS::Rooted") declArgs = "cx" else: - assert (isMember in - ("Sequence", "Variadic", "Dictionary", "OwningUnion", "Record")) + assert isMember in ( + "Sequence", + "Variadic", + "Dictionary", + "OwningUnion", + "Record", + ) # We'll get traced by the sequence or dictionary or union tracer declType = CGGeneric("JSObject*") declArgs = None @@ -5062,9 +5793,13 @@ def handleJSObjectType(type, isMember, failureCode, exceptionCode, sourceDescrip # $${passedToJSImpl} is the literal `false`. But Apple is shipping a # buggy clang (clang 3.9) in Xcode 8.3, so there even the parens are not # enough. So we manually disable some warnings in clang. - if not isinstance(descriptorProvider, Descriptor) or descriptorProvider.interface.isJSImplemented(): - templateBody = fill( - """ + if ( + not isinstance(descriptorProvider, Descriptor) + or descriptorProvider.interface.isJSImplemented() + ): + templateBody = ( + fill( + """ #ifdef __clang__ #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wunreachable-code" @@ -5078,15 +5813,17 @@ def handleJSObjectType(type, isMember, failureCode, exceptionCode, sourceDescrip #pragma clang diagnostic pop #endif // __clang__ """, - sourceDescription=sourceDescription, - exceptionCode=exceptionCode) + templateBody + sourceDescription=sourceDescription, + exceptionCode=exceptionCode, + ) + + templateBody + ) setToNullCode = "${declName} = nullptr;\n" - template = wrapObjectTemplate(templateBody, type, setToNullCode, - failureCode) - return JSToNativeConversionInfo(template, declType=declType, - dealWithOptional=isOptional, - declArgs=declArgs) + template = wrapObjectTemplate(templateBody, type, setToNullCode, failureCode) + return JSToNativeConversionInfo( + template, declType=declType, dealWithOptional=isOptional, declArgs=declArgs + ) def incrementNestingLevel(): if nestingLevel == "": @@ -5099,8 +5836,10 @@ def incrementNestingLevel(): assert not isEnforceRange and not isClamp and not isAllowShared if failureCode is None: - notSequence = ('cx.ThrowErrorMessage("%s");\n' - "%s" % (firstCap(sourceDescription), exceptionCode)) + notSequence = 'cx.ThrowErrorMessage("%s");\n' "%s" % ( + firstCap(sourceDescription), + exceptionCode, + ) else: notSequence = failureCode @@ -5134,11 +5873,15 @@ def incrementNestingLevel(): # XXXbz we can't include the index in the sourceDescription, because # we don't really have a way to pass one in dynamically at runtime... elementInfo = getJSToNativeConversionInfo( - elementType, descriptorProvider, isMember="Sequence", - exceptionCode=exceptionCode, lenientFloatCode=lenientFloatCode, + elementType, + descriptorProvider, + isMember="Sequence", + exceptionCode=exceptionCode, + lenientFloatCode=lenientFloatCode, isCallbackReturnValue=isCallbackReturnValue, sourceDescription="element of %s" % sourceDescription, - nestingLevel=incrementNestingLevel()) + nestingLevel=incrementNestingLevel(), + ) if elementInfo.dealWithOptional: raise TypeError("Shouldn't have optional things in sequences") if elementInfo.holderType is not None: @@ -5152,16 +5895,18 @@ def incrementNestingLevel(): else: arrayRef = "${declName}" - elementConversion = string.Template(elementInfo.template).substitute({ - "val": "temp" + str(nestingLevel), - "maybeMutableVal": "&temp" + str(nestingLevel), - "declName": "slot" + str(nestingLevel), - # We only need holderName here to handle isExternal() - # interfaces, which use an internal holder for the - # conversion even when forceOwningType ends up true. - "holderName": "tempHolder" + str(nestingLevel), - "passedToJSImpl": "${passedToJSImpl}" - }) + elementConversion = string.Template(elementInfo.template).substitute( + { + "val": "temp" + str(nestingLevel), + "maybeMutableVal": "&temp" + str(nestingLevel), + "declName": "slot" + str(nestingLevel), + # We only need holderName here to handle isExternal() + # interfaces, which use an internal holder for the + # conversion even when forceOwningType ends up true. + "holderName": "tempHolder" + str(nestingLevel), + "passedToJSImpl": "${passedToJSImpl}", + } + ) elementInitializer = initializerForType(elementType) if elementInitializer is None: @@ -5205,15 +5950,19 @@ def incrementNestingLevel(): elementType=elementInfo.declType.define(), elementConversion=elementConversion, elementInitializer=elementInitializer, - nestingLevel=str(nestingLevel)) + nestingLevel=str(nestingLevel), + ) - templateBody = wrapObjectTemplate(templateBody, type, - "${declName}.SetNull();\n", notSequence) + templateBody = wrapObjectTemplate( + templateBody, type, "${declName}.SetNull();\n", notSequence + ) if isinstance(defaultValue, IDLEmptySequenceValue): if type.nullable(): codeToSetEmpty = "${declName}.SetValue();\n" else: - codeToSetEmpty = "/* ${declName} array is already empty; nothing to do */\n" + codeToSetEmpty = ( + "/* ${declName} array is already empty; nothing to do */\n" + ) templateBody = handleDefault(templateBody, codeToSetEmpty) # Sequence arguments that might contain traceable things need @@ -5228,16 +5977,21 @@ def incrementNestingLevel(): holderType = None holderArgs = None - return JSToNativeConversionInfo(templateBody, declType=typeName, - holderType=holderType, - dealWithOptional=isOptional, - holderArgs=holderArgs) + return JSToNativeConversionInfo( + templateBody, + declType=typeName, + holderType=holderType, + dealWithOptional=isOptional, + holderArgs=holderArgs, + ) if type.isRecord(): assert not isEnforceRange and not isClamp and not isAllowShared if failureCode is None: - notRecord = ('cx.ThrowErrorMessage("%s");\n' - "%s" % (firstCap(sourceDescription), exceptionCode)) + notRecord = 'cx.ThrowErrorMessage("%s");\n' "%s" % ( + firstCap(sourceDescription), + exceptionCode, + ) else: notRecord = failureCode @@ -5250,18 +6004,23 @@ def incrementNestingLevel(): valueType = recordType.inner valueInfo = getJSToNativeConversionInfo( - valueType, descriptorProvider, isMember="Record", - exceptionCode=exceptionCode, lenientFloatCode=lenientFloatCode, + valueType, + descriptorProvider, + isMember="Record", + exceptionCode=exceptionCode, + lenientFloatCode=lenientFloatCode, isCallbackReturnValue=isCallbackReturnValue, sourceDescription="value in %s" % sourceDescription, - nestingLevel=incrementNestingLevel()) + nestingLevel=incrementNestingLevel(), + ) if valueInfo.dealWithOptional: raise TypeError("Shouldn't have optional things in record") if valueInfo.holderType is not None: raise TypeError("Shouldn't need holders for record") - declType = CGTemplatedType("Record", [recordKeyDeclType(recordType), - valueInfo.declType]) + declType = CGTemplatedType( + "Record", [recordKeyDeclType(recordType), valueInfo.declType] + ) typeName = declType.define() if nullable: declType = CGTemplatedType("Nullable", declType) @@ -5269,20 +6028,24 @@ def incrementNestingLevel(): else: recordRef = "${declName}" - valueConversion = string.Template(valueInfo.template).substitute({ - "val": "temp", - "maybeMutableVal": "&temp", - "declName": "slot", - # We only need holderName here to handle isExternal() - # interfaces, which use an internal holder for the - # conversion even when forceOwningType ends up true. - "holderName": "tempHolder", - "passedToJSImpl": "${passedToJSImpl}" - }) + valueConversion = string.Template(valueInfo.template).substitute( + { + "val": "temp", + "maybeMutableVal": "&temp", + "declName": "slot", + # We only need holderName here to handle isExternal() + # interfaces, which use an internal holder for the + # conversion even when forceOwningType ends up true. + "holderName": "tempHolder", + "passedToJSImpl": "${passedToJSImpl}", + } + ) keyType = recordKeyType(recordType) if recordType.keyType.isJSString(): - raise TypeError("Have do deal with JSString record type, but don't know how") + raise TypeError( + "Have do deal with JSString record type, but don't know how" + ) if recordType.keyType.isByteString() or recordType.keyType.isUTF8String(): hashKeyType = "nsCStringHashKey" if recordType.keyType.isByteString(): @@ -5379,11 +6142,12 @@ def incrementNestingLevel(): sourceDescription=sourceDescription, typeName=typeName, valueType=valueInfo.declType.define(), - valueConversion=valueConversion) + valueConversion=valueConversion, + ) - templateBody = wrapObjectTemplate(templateBody, type, - "${declName}.SetNull();\n", - notRecord) + templateBody = wrapObjectTemplate( + templateBody, type, "${declName}.SetNull();\n", notRecord + ) declArgs = None holderType = None @@ -5395,19 +6159,22 @@ def incrementNestingLevel(): declType = CGWrapper(declType, post="&") declArgs = "aRetVal" elif not isMember and typeNeedsRooting(valueType): - holderType = CGTemplatedType("RecordRooter", - [recordKeyDeclType(recordType), - valueInfo.declType]) + holderType = CGTemplatedType( + "RecordRooter", [recordKeyDeclType(recordType), valueInfo.declType] + ) # If our record is nullable, this will set the Nullable to be # not-null, but that's ok because we make an explicit SetNull() call # on it as needed if our JS value is actually null. holderArgs = "cx, &%s" % recordRef - return JSToNativeConversionInfo(templateBody, declType=declType, - declArgs=declArgs, - holderType=holderType, - dealWithOptional=isOptional, - holderArgs=holderArgs) + return JSToNativeConversionInfo( + templateBody, + declType=declType, + declArgs=declArgs, + holderType=holderType, + dealWithOptional=isOptional, + holderArgs=holderArgs, + ) if type.isUnion(): nullable = type.nullable() @@ -5424,18 +6191,24 @@ def incrementNestingLevel(): memberTypes = type.flatMemberTypes prettyNames = [] - interfaceMemberTypes = [ - t for t in memberTypes if t.isNonCallbackInterface()] + interfaceMemberTypes = [t for t in memberTypes if t.isNonCallbackInterface()] if len(interfaceMemberTypes) > 0: interfaceObject = [] for memberType in interfaceMemberTypes: name = getUnionMemberName(memberType) interfaceObject.append( - CGGeneric("(failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext" % - (unionArgumentObj, name))) + CGGeneric( + "(failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext" + % (unionArgumentObj, name) + ) + ) prettyNames.append(memberType.prettyName()) - interfaceObject = CGWrapper(CGList(interfaceObject, " ||\n"), - pre="done = ", post=";\n\n", reindent=True) + interfaceObject = CGWrapper( + CGList(interfaceObject, " ||\n"), + pre="done = ", + post=";\n\n", + reindent=True, + ) else: interfaceObject = None @@ -5445,21 +6218,24 @@ def incrementNestingLevel(): memberType = sequenceObjectMemberTypes[0] name = getUnionMemberName(memberType) sequenceObject = CGGeneric( - "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" % - (unionArgumentObj, name)) + "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" + % (unionArgumentObj, name) + ) prettyNames.append(memberType.prettyName()) else: sequenceObject = None callbackMemberTypes = [ - t for t in memberTypes if t.isCallback() or t.isCallbackInterface()] + t for t in memberTypes if t.isCallback() or t.isCallbackInterface() + ] if len(callbackMemberTypes) > 0: assert len(callbackMemberTypes) == 1 memberType = callbackMemberTypes[0] name = getUnionMemberName(memberType) callbackObject = CGGeneric( - "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" % - (unionArgumentObj, name)) + "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" + % (unionArgumentObj, name) + ) prettyNames.append(memberType.prettyName()) else: callbackObject = None @@ -5470,8 +6246,9 @@ def incrementNestingLevel(): memberType = dictionaryMemberTypes[0] name = getUnionMemberName(memberType) setDictionary = CGGeneric( - "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" % - (unionArgumentObj, name)) + "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" + % (unionArgumentObj, name) + ) prettyNames.append(memberType.prettyName()) else: setDictionary = None @@ -5482,8 +6259,9 @@ def incrementNestingLevel(): memberType = recordMemberTypes[0] name = getUnionMemberName(memberType) recordObject = CGGeneric( - "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" % - (unionArgumentObj, name)) + "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext, ${passedToJSImpl})) || !tryNext;\n" + % (unionArgumentObj, name) + ) prettyNames.append(memberType.prettyName()) else: recordObject = None @@ -5494,18 +6272,28 @@ def incrementNestingLevel(): # Very important to NOT construct a temporary Rooted here, since the # SetToObject call can call a Rooted constructor and we need to keep # stack discipline for Rooted. - object = CGGeneric("if (!%s.SetToObject(cx, &${val}.toObject(), ${passedToJSImpl})) {\n" - "%s" - "}\n" - "done = true;\n" % (unionArgumentObj, indent(exceptionCode))) + object = CGGeneric( + "if (!%s.SetToObject(cx, &${val}.toObject(), ${passedToJSImpl})) {\n" + "%s" + "}\n" + "done = true;\n" % (unionArgumentObj, indent(exceptionCode)) + ) prettyNames.append(objectMemberTypes[0].prettyName()) else: object = None - hasObjectTypes = interfaceObject or sequenceObject or callbackObject or object or recordObject + hasObjectTypes = ( + interfaceObject + or sequenceObject + or callbackObject + or object + or recordObject + ) if hasObjectTypes: # "object" is not distinguishable from other types - assert not object or not (interfaceObject or sequenceObject or callbackObject or recordObject) + assert not object or not ( + interfaceObject or sequenceObject or callbackObject or recordObject + ) if sequenceObject or callbackObject: # An object can be both an sequence object and a callback or # dictionary, but we shouldn't have both in the union's members @@ -5523,8 +6311,9 @@ def incrementNestingLevel(): templateBody = CGList([templateBody, object]) if recordObject: - templateBody = CGList([templateBody, - CGIfWrapper(recordObject, "!done")]) + templateBody = CGList( + [templateBody, CGIfWrapper(recordObject, "!done")] + ) templateBody = CGIfWrapper(templateBody, "${val}.isObject()") else: @@ -5532,8 +6321,7 @@ def incrementNestingLevel(): if setDictionary: assert not object - templateBody = CGList([templateBody, - CGIfWrapper(setDictionary, "!done")]) + templateBody = CGList([templateBody, CGIfWrapper(setDictionary, "!done")]) stringTypes = [t for t in memberTypes if t.isString() or t.isEnum()] numericTypes = [t for t in memberTypes if t.isNumeric()] @@ -5547,30 +6335,40 @@ def incrementNestingLevel(): # can use "break" for flow control. def getStringOrPrimitiveConversion(memberType): name = getUnionMemberName(memberType) - return CGGeneric("done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext)) || !tryNext;\n" - "break;\n" % (unionArgumentObj, name)) + return CGGeneric( + "done = (failed = !%s.TrySetTo%s(cx, ${val}, tryNext)) || !tryNext;\n" + "break;\n" % (unionArgumentObj, name) + ) + other = CGList([]) stringConversion = [getStringOrPrimitiveConversion(t) for t in stringTypes] - numericConversion = [getStringOrPrimitiveConversion(t) for t in numericTypes] - booleanConversion = [getStringOrPrimitiveConversion(t) for t in booleanTypes] + numericConversion = [ + getStringOrPrimitiveConversion(t) for t in numericTypes + ] + booleanConversion = [ + getStringOrPrimitiveConversion(t) for t in booleanTypes + ] if stringConversion: if booleanConversion: - other.append(CGIfWrapper(booleanConversion[0], - "${val}.isBoolean()")) + other.append( + CGIfWrapper(booleanConversion[0], "${val}.isBoolean()") + ) if numericConversion: - other.append(CGIfWrapper(numericConversion[0], - "${val}.isNumber()")) + other.append(CGIfWrapper(numericConversion[0], "${val}.isNumber()")) other.append(stringConversion[0]) elif numericConversion: if booleanConversion: - other.append(CGIfWrapper(booleanConversion[0], - "${val}.isBoolean()")) + other.append( + CGIfWrapper(booleanConversion[0], "${val}.isBoolean()") + ) other.append(numericConversion[0]) else: assert booleanConversion other.append(booleanConversion[0]) - other = CGWrapper(CGIndenter(other), pre="do {\n", post="} while (false);\n") + other = CGWrapper( + CGIndenter(other), pre="do {\n", post="} while (false);\n" + ) if hasObjectTypes or setDictionary: other = CGWrapper(CGIndenter(other), "{\n", post="}\n") if object: @@ -5584,9 +6382,12 @@ def getStringOrPrimitiveConversion(memberType): else: other = None - templateBody = CGWrapper(templateBody, pre="bool done = false, failed = false, tryNext;\n") - throw = CGGeneric(fill( - """ + templateBody = CGWrapper( + templateBody, pre="bool done = false, failed = false, tryNext;\n" + ) + throw = CGGeneric( + fill( + """ if (failed) { $*{exceptionCode} } @@ -5595,11 +6396,15 @@ def getStringOrPrimitiveConversion(memberType): $*{exceptionCode} } """, - exceptionCode=exceptionCode, - desc=firstCap(sourceDescription), - names=", ".join(prettyNames))) + exceptionCode=exceptionCode, + desc=firstCap(sourceDescription), + names=", ".join(prettyNames), + ) + ) - templateBody = CGWrapper(CGIndenter(CGList([templateBody, throw])), pre="{\n", post="}\n") + templateBody = CGWrapper( + CGIndenter(CGList([templateBody, throw])), pre="{\n", post="}\n" + ) typeName = CGUnionStruct.unionTypeDecl(type, isOwningUnion) argumentTypeName = typeName + "Argument" @@ -5608,9 +6413,9 @@ def getStringOrPrimitiveConversion(memberType): def handleNull(templateBody, setToNullVar, extraConditionForNull=""): nullTest = "%s${val}.isNullOrUndefined()" % extraConditionForNull - return CGIfElseWrapper(nullTest, - CGGeneric("%s.SetNull();\n" % setToNullVar), - templateBody) + return CGIfElseWrapper( + nullTest, CGGeneric("%s.SetNull();\n" % setToNullVar), templateBody + ) if type.hasNullableType: assert not nullable @@ -5620,8 +6425,11 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): extraConditionForNull = "!(${haveValue}) || " else: extraConditionForNull = "" - templateBody = handleNull(templateBody, unionArgumentObj, - extraConditionForNull=extraConditionForNull) + templateBody = handleNull( + templateBody, + unionArgumentObj, + extraConditionForNull=extraConditionForNull, + ) declType = CGGeneric(typeName) if isOwningUnion: @@ -5662,9 +6470,11 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): else: declArgs = None - if (defaultValue and - not isinstance(defaultValue, IDLNullValue) and - not isinstance(defaultValue, IDLDefaultDictionaryValue)): + if ( + defaultValue + and not isinstance(defaultValue, IDLNullValue) + and not isinstance(defaultValue, IDLDefaultDictionaryValue) + ): tag = defaultValue.type.tag() if tag in numericSuffixes or tag is IDLType.Tags.bool: @@ -5672,29 +6482,35 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): # Make sure we actually construct the thing inside the nullable. value = declLoc + (".SetValue()" if nullable else "") name = getUnionMemberName(defaultValue.type) - default = CGGeneric("%s.RawSetAs%s() = %s;\n" % - (value, name, defaultStr)) + default = CGGeneric( + "%s.RawSetAs%s() = %s;\n" % (value, name, defaultStr) + ) elif isinstance(defaultValue, IDLEmptySequenceValue): name = getUnionMemberName(defaultValue.type) # Make sure we actually construct the thing inside the nullable. value = declLoc + (".SetValue()" if nullable else "") # It's enough to set us to the right type; that will # create an empty array, which is all we need here. - default = CGGeneric("%s.RawSetAs%s();\n" % - (value, name)) + default = CGGeneric("%s.RawSetAs%s();\n" % (value, name)) elif defaultValue.type.isEnum(): name = getUnionMemberName(defaultValue.type) # Make sure we actually construct the thing inside the nullable. value = declLoc + (".SetValue()" if nullable else "") default = CGGeneric( - "%s.RawSetAs%s() = %s::%s;\n" % - (value, name, - defaultValue.type.inner.identifier.name, - getEnumValueName(defaultValue.value))) + "%s.RawSetAs%s() = %s::%s;\n" + % ( + value, + name, + defaultValue.type.inner.identifier.name, + getEnumValueName(defaultValue.value), + ) + ) else: default = CGGeneric( handleDefaultStringValue( - defaultValue, "%s.SetStringLiteral" % unionArgumentObj)) + defaultValue, "%s.SetStringLiteral" % unionArgumentObj + ) + ) templateBody = CGIfElseWrapper("!(${haveValue})", default, templateBody) @@ -5708,10 +6524,14 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): extraConditionForNull = "(${haveValue}) && " else: extraConditionForNull = "" - templateBody = handleNull(templateBody, declLoc, - extraConditionForNull=extraConditionForNull) - elif (not type.hasNullableType and defaultValue and - isinstance(defaultValue, IDLDefaultDictionaryValue)): + templateBody = handleNull( + templateBody, declLoc, extraConditionForNull=extraConditionForNull + ) + elif ( + not type.hasNullableType + and defaultValue + and isinstance(defaultValue, IDLDefaultDictionaryValue) + ): assert type.hasDictionaryType() assert defaultValue.type.isDictionary() if not isOwningUnion and typeNeedsRooting(defaultValue.type): @@ -5720,21 +6540,30 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): ctorArgs = "" initDictionaryWithNull = CGIfWrapper( CGGeneric("return false;\n"), - ('!%s.RawSetAs%s(%s).Init(cx, JS::NullHandleValue, "Member of %s")' - % (declLoc, getUnionMemberName(defaultValue.type), - ctorArgs, type.prettyName()))) - templateBody = CGIfElseWrapper("!(${haveValue})", - initDictionaryWithNull, - templateBody) + ( + '!%s.RawSetAs%s(%s).Init(cx, JS::NullHandleValue, "Member of %s")' + % ( + declLoc, + getUnionMemberName(defaultValue.type), + ctorArgs, + type.prettyName(), + ) + ), + ) + templateBody = CGIfElseWrapper( + "!(${haveValue})", initDictionaryWithNull, templateBody + ) templateBody = CGList([constructDecl, templateBody]) - return JSToNativeConversionInfo(templateBody.define(), - declType=declType, - declArgs=declArgs, - holderType=holderType, - holderArgs=holderArgs, - dealWithOptional=isOptional and (not nullable or isOwningUnion)) + return JSToNativeConversionInfo( + templateBody.define(), + declType=declType, + declArgs=declArgs, + holderType=holderType, + holderArgs=holderArgs, + dealWithOptional=isOptional and (not nullable or isOwningUnion), + ) if type.isPromise(): assert not type.nullable() @@ -5818,7 +6647,8 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): } globalObj = JS::GetNonCCWObjectGlobal(unwrappedVal); """, - sourceDescription=sourceDescription) + sourceDescription=sourceDescription, + ) elif isCallbackReturnValue == "Callback": getPromiseGlobal = dedent( """ @@ -5826,12 +6656,14 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): // and use GetEntryGlobal() to get it, with whatever // principal-clamping it ends up doing. globalObj = GetEntryGlobal()->GetGlobalJSObject(); - """) + """ + ) else: getPromiseGlobal = dedent( """ globalObj = JS::CurrentGlobalOrNull(cx); - """) + """ + ) templateBody = fill( """ @@ -5866,32 +6698,35 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): } """, getPromiseGlobal=getPromiseGlobal, - exceptionCode=exceptionCode) + exceptionCode=exceptionCode, + ) - return JSToNativeConversionInfo(templateBody, - declType=declType, - dealWithOptional=isOptional) + return JSToNativeConversionInfo( + templateBody, declType=declType, dealWithOptional=isOptional + ) if type.isGeckoInterface(): assert not isEnforceRange and not isClamp and not isAllowShared descriptor = descriptorProvider.getDescriptor( - type.unroll().inner.identifier.name) + type.unroll().inner.identifier.name + ) - assert descriptor.nativeType != 'JSObject' + assert descriptor.nativeType != "JSObject" if descriptor.interface.isCallback(): - (declType, declArgs, - conversion) = getCallbackConversionInfo(type, descriptor.interface, - isMember, - isCallbackReturnValue, - isOptional) - template = wrapObjectTemplate(conversion, type, - "${declName} = nullptr;\n", - failureCode) - return JSToNativeConversionInfo(template, declType=declType, - declArgs=declArgs, - dealWithOptional=isOptional) + (declType, declArgs, conversion) = getCallbackConversionInfo( + type, descriptor.interface, isMember, isCallbackReturnValue, isOptional + ) + template = wrapObjectTemplate( + conversion, type, "${declName} = nullptr;\n", failureCode + ) + return JSToNativeConversionInfo( + template, + declType=declType, + declArgs=declArgs, + dealWithOptional=isOptional, + ) if descriptor.interface.identifier.name == "WindowProxy": declType = CGGeneric("mozilla::dom::WindowProxyHolder") @@ -5901,19 +6736,25 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): else: windowProxyHolderRef = "${declName}" - failureCode = onFailureBadType(failureCode, descriptor.interface.identifier.name).define() - templateBody = fill(""" + failureCode = onFailureBadType( + failureCode, descriptor.interface.identifier.name + ).define() + templateBody = fill( + """ JS::Rooted source(cx, &$${val}.toObject()); if (NS_FAILED(UnwrapWindowProxyArg(cx, source, ${windowProxyHolderRef}))) { $*{onFailure} } """, windowProxyHolderRef=windowProxyHolderRef, - onFailure=failureCode) - templateBody = wrapObjectTemplate(templateBody, type, - "${declName}.SetNull();\n", failureCode) - return JSToNativeConversionInfo(templateBody, declType=declType, - dealWithOptional=isOptional) + onFailure=failureCode, + ) + templateBody = wrapObjectTemplate( + templateBody, type, "${declName}.SetNull();\n", failureCode + ) + return JSToNativeConversionInfo( + templateBody, declType=declType, dealWithOptional=isOptional + ) # This is an interface that we implement as a concrete class # or an XPCOM interface. @@ -5921,8 +6762,9 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): # Allow null pointers for nullable types and old-binding classes, and # use an RefPtr or raw pointer for callback return values to make # them easier to return. - argIsPointer = (type.nullable() or type.unroll().inner.isExternal() or - isCallbackReturnValue) + argIsPointer = ( + type.nullable() or type.unroll().inner.isExternal() or isCallbackReturnValue + ) # Sequence and dictionary members, as well as owning unions (which can # appear here as return values in JS-implemented interfaces) have to @@ -5963,25 +6805,32 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): """ static_assert(IsRefcounted<${typeName}>::value, "We can only store refcounted classes."); """, - typeName=typeName) + typeName=typeName, + ) - if (not descriptor.interface.isExternal()): + if not descriptor.interface.isExternal(): if failureCode is not None: - templateBody += str(CastableObjectUnwrapper( - descriptor, - "${val}", - "${maybeMutableVal}", - "${declName}", - failureCode)) + templateBody += str( + CastableObjectUnwrapper( + descriptor, + "${val}", + "${maybeMutableVal}", + "${declName}", + failureCode, + ) + ) else: - templateBody += str(FailureFatalCastableObjectUnwrapper( - descriptor, - "${val}", - "${maybeMutableVal}", - "${declName}", - exceptionCode, - isCallbackReturnValue, - firstCap(sourceDescription))) + templateBody += str( + FailureFatalCastableObjectUnwrapper( + descriptor, + "${val}", + "${maybeMutableVal}", + "${declName}", + exceptionCode, + isCallbackReturnValue, + firstCap(sourceDescription), + ) + ) else: # External interface. We always have a holder for these, because we # don't actually know whether we have to addref when unwrapping or not. @@ -5994,12 +6843,15 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): else: holderType = "RefPtr<" + typeName + ">" templateBody += ( - "JS::Rooted source(cx, &${val}.toObject());\n" + - "if (NS_FAILED(UnwrapArg<" + typeName + ">(cx, source, getter_AddRefs(${holderName})))) {\n") - templateBody += CGIndenter(onFailureBadType(failureCode, - descriptor.interface.identifier.name)).define() - templateBody += ("}\n" - "MOZ_ASSERT(${holderName});\n") + "JS::Rooted source(cx, &${val}.toObject());\n" + + "if (NS_FAILED(UnwrapArg<" + + typeName + + ">(cx, source, getter_AddRefs(${holderName})))) {\n" + ) + templateBody += CGIndenter( + onFailureBadType(failureCode, descriptor.interface.identifier.name) + ).define() + templateBody += "}\n" "MOZ_ASSERT(${holderName});\n" # And store our value in ${declName} templateBody += "${declName} = ${holderName};\n" @@ -6007,17 +6859,19 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): # Just pass failureCode, not onFailureBadType, here, so we'll report # the thing as not an object as opposed to not implementing whatever # our interface is. - templateBody = wrapObjectTemplate(templateBody, type, - "${declName} = nullptr;\n", - failureCode) + templateBody = wrapObjectTemplate( + templateBody, type, "${declName} = nullptr;\n", failureCode + ) declType = CGGeneric(declType) if holderType is not None: holderType = CGGeneric(holderType) - return JSToNativeConversionInfo(templateBody, - declType=declType, - holderType=holderType, - dealWithOptional=isOptional) + return JSToNativeConversionInfo( + templateBody, + declType=declType, + holderType=holderType, + dealWithOptional=isOptional, + ) if type.isSpiderMonkeyInterface(): assert not isEnforceRange and not isClamp @@ -6040,7 +6894,8 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): } """, objRef=objRef, - badType=onFailureBadType(failureCode, type.name).define()) + badType=onFailureBadType(failureCode, type.name).define(), + ) if not isAllowShared and type.isBufferSource(): if type.isArrayBuffer(): isSharedMethod = "JS::IsSharedArrayBufferObject" @@ -6055,9 +6910,11 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): """, isSharedMethod=isSharedMethod, objRef=objRef, - badType=onFailureIsShared().define()) - template = wrapObjectTemplate(template, type, "${declName}.SetNull();\n", - failureCode) + badType=onFailureIsShared().define(), + ) + template = wrapObjectTemplate( + template, type, "${declName}.SetNull();\n", failureCode + ) if not isMember: # This is a bit annoying. In a union we don't want to have a # holder, since unions don't support that. But if we're optional we @@ -6066,7 +6923,9 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): # holder if we're optional and use a RootedSpiderMonkeyInterface # otherwise. if isOptional: - holderType = CGTemplatedType("SpiderMonkeyInterfaceRooter", interfaceType) + holderType = CGTemplatedType( + "SpiderMonkeyInterfaceRooter", interfaceType + ) # If our SpiderMonkey interface is nullable, this will set the # Nullable to be not-null, but that's ok because we make an # explicit SetNull() call on it as needed if our JS value is @@ -6084,17 +6943,19 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): holderType = None holderArgs = None declArgs = None - return JSToNativeConversionInfo(template, - declType=declType, - holderType=holderType, - dealWithOptional=isOptional, - declArgs=declArgs, - holderArgs=holderArgs) + return JSToNativeConversionInfo( + template, + declType=declType, + holderType=holderType, + dealWithOptional=isOptional, + declArgs=declArgs, + holderArgs=holderArgs, + ) if type.isJSString(): assert not isEnforceRange and not isClamp and not isAllowShared if type.nullable(): - raise TypeError("Nullable JSString not supported"); + raise TypeError("Nullable JSString not supported") declArgs = "cx" if isMember: @@ -6103,32 +6964,36 @@ def handleNull(templateBody, setToNullVar, extraConditionForNull=""): declType = "JS::Rooted" if isOptional: - raise TypeError("JSString not supported as optional"); - templateBody = fill(""" + raise TypeError("JSString not supported as optional") + templateBody = fill( + """ if (!($${declName} = ConvertJSValueToJSString(cx, $${val}))) { $*{exceptionCode} } - """ - , - exceptionCode=exceptionCode) + """, + exceptionCode=exceptionCode, + ) if defaultValue is not None: assert not isinstance(defaultValue, IDLNullValue) - defaultCode = fill(""" + defaultCode = fill( + """ static const char data[] = { ${data} }; $${declName} = JS_NewStringCopyN(cx, data, ArrayLength(data) - 1); if (!$${declName}) { $*{exceptionCode} } """, - data=", ".join(["'" + char + "'" for char in - defaultValue.value] + ["0"]), - exceptionCode=exceptionCode) + data=", ".join( + ["'" + char + "'" for char in defaultValue.value] + ["0"] + ), + exceptionCode=exceptionCode, + ) templateBody = handleDefault(templateBody, defaultCode) - return JSToNativeConversionInfo(templateBody, - declType=CGGeneric(declType), - declArgs=declArgs) + return JSToNativeConversionInfo( + templateBody, declType=CGGeneric(declType), declArgs=declArgs + ) if type.isDOMString() or type.isUSVString() or type.isUTF8String(): assert not isEnforceRange and not isClamp and not isAllowShared @@ -6162,30 +7027,33 @@ def getConversionCode(varName): } """, var=varName, - exceptionCode=exceptionCode) + exceptionCode=exceptionCode, + ) - conversionCode = fill(""" + conversionCode = fill( + """ if (!ConvertJSValueToString(cx, $${val}, ${nullBehavior}, ${undefinedBehavior}, ${varName})) { $*{exceptionCode} } $*{normalizeCode} - """ - , + """, nullBehavior=nullBehavior, undefinedBehavior=undefinedBehavior, varName=varName, exceptionCode=exceptionCode, - normalizeCode=normalizeCode) + normalizeCode=normalizeCode, + ) if defaultValue is None: return conversionCode if isinstance(defaultValue, IDLNullValue): - assert(type.nullable()) + assert type.nullable() defaultCode = "%s.SetIsVoid(true);\n" % varName else: defaultCode = handleDefaultStringValue( - defaultValue, "%s.AssignLiteral" % varName) + defaultValue, "%s.AssignLiteral" % varName + ) return handleDefault(conversionCode, defaultCode) if isMember: @@ -6197,7 +7065,8 @@ def getConversionCode(varName): return JSToNativeConversionInfo( getConversionCode("${declName}"), declType=CGGeneric(declType), - dealWithOptional=isOptional) + dealWithOptional=isOptional, + ) if isOptional: if type.isUTF8String(): @@ -6206,9 +7075,9 @@ def getConversionCode(varName): else: declType = "Optional" holderType = CGGeneric("binding_detail::FakeString") - conversionCode = ("%s" - "${declName} = &${holderName};\n" % - getConversionCode("${holderName}")) + conversionCode = "%s" "${declName} = &${holderName};\n" % getConversionCode( + "${holderName}" + ) else: if type.isUTF8String(): declType = "binding_detail::FakeString" @@ -6219,37 +7088,38 @@ def getConversionCode(varName): # No need to deal with optional here; we handled it already return JSToNativeConversionInfo( - conversionCode, - declType=CGGeneric(declType), - holderType=holderType) + conversionCode, declType=CGGeneric(declType), holderType=holderType + ) if type.isByteString(): assert not isEnforceRange and not isClamp and not isAllowShared nullable = toStringBool(type.nullable()) - conversionCode = fill(""" + conversionCode = fill( + """ if (!ConvertJSValueToByteString(cx, $${val}, ${nullable}, "${sourceDescription}", $${declName})) { $*{exceptionCode} } """, nullable=nullable, sourceDescription=sourceDescription, - exceptionCode=exceptionCode) + exceptionCode=exceptionCode, + ) if defaultValue is not None: if isinstance(defaultValue, IDLNullValue): - assert(type.nullable()) + assert type.nullable() defaultCode = "${declName}.SetIsVoid(true);\n" else: defaultCode = handleDefaultStringValue( - defaultValue, "${declName}.AssignLiteral") + defaultValue, "${declName}.AssignLiteral" + ) conversionCode = handleDefault(conversionCode, defaultCode) return JSToNativeConversionInfo( - conversionCode, - declType=CGGeneric("nsCString"), - dealWithOptional=isOptional) + conversionCode, declType=CGGeneric("nsCString"), dealWithOptional=isOptional + ) if type.isEnum(): assert not isEnforceRange and not isClamp and not isAllowShared @@ -6272,11 +7142,13 @@ def getConversionCode(varName): # conversion" code starts passing in an exceptionCode. At which # point we'll need to figure out what that even means. assert exceptionCode == "return false;\n" - handleInvalidEnumValueCode = dedent(""" + handleInvalidEnumValueCode = dedent( + """ if (index < 0) { return true; } - """) + """ + ) template = fill( """ @@ -6295,27 +7167,32 @@ def getConversionCode(varName): handleInvalidEnumValueCode=handleInvalidEnumValueCode, exceptionCode=exceptionCode, enumLoc=enumLoc, - sourceDescription=sourceDescription) + sourceDescription=sourceDescription, + ) setNull = "${declName}.SetNull();\n" if type.nullable(): - template = CGIfElseWrapper("${val}.isNullOrUndefined()", - CGGeneric(setNull), - CGGeneric(template)).define() + template = CGIfElseWrapper( + "${val}.isNullOrUndefined()", CGGeneric(setNull), CGGeneric(template) + ).define() if defaultValue is not None: if isinstance(defaultValue, IDLNullValue): assert type.nullable() template = handleDefault(template, setNull) else: - assert(defaultValue.type.tag() == IDLType.Tags.domstring) - template = handleDefault(template, - ("%s = %s::%s;\n" % - (enumLoc, enumName, - getEnumValueName(defaultValue.value)))) - return JSToNativeConversionInfo(template, declType=CGGeneric(declType), - dealWithOptional=isOptional) + assert defaultValue.type.tag() == IDLType.Tags.domstring + template = handleDefault( + template, + ( + "%s = %s::%s;\n" + % (enumLoc, enumName, getEnumValueName(defaultValue.value)) + ), + ) + return JSToNativeConversionInfo( + template, declType=CGGeneric(declType), dealWithOptional=isOptional + ) if type.isCallback(): assert not isEnforceRange and not isClamp and not isAllowShared @@ -6325,48 +7202,49 @@ def getConversionCode(varName): callback = type.unroll().callback name = callback.identifier.name - (declType, declArgs, - conversion) = getCallbackConversionInfo(type, callback, isMember, - isCallbackReturnValue, - isOptional) + (declType, declArgs, conversion) = getCallbackConversionInfo( + type, callback, isMember, isCallbackReturnValue, isOptional + ) if allowTreatNonCallableAsNull and type.treatNonCallableAsNull(): haveCallable = "JS::IsCallable(&${val}.toObject())" if not isDefinitelyObject: haveCallable = "${val}.isObject() && " + haveCallable if defaultValue is not None: - assert(isinstance(defaultValue, IDLNullValue)) + assert isinstance(defaultValue, IDLNullValue) haveCallable = "(${haveValue}) && " + haveCallable template = ( - ("if (%s) {\n" % haveCallable) + - conversion + - "} else {\n" + ("if (%s) {\n" % haveCallable) + conversion + "} else {\n" " ${declName} = nullptr;\n" - "}\n") + "}\n" + ) elif allowTreatNonCallableAsNull and type.treatNonObjectAsNull(): if not isDefinitelyObject: haveObject = "${val}.isObject()" if defaultValue is not None: - assert(isinstance(defaultValue, IDLNullValue)) + assert isinstance(defaultValue, IDLNullValue) haveObject = "(${haveValue}) && " + haveObject - template = CGIfElseWrapper(haveObject, - CGGeneric(conversion), - CGGeneric("${declName} = nullptr;\n")).define() + template = CGIfElseWrapper( + haveObject, + CGGeneric(conversion), + CGGeneric("${declName} = nullptr;\n"), + ).define() else: template = conversion else: template = wrapObjectTemplate( - "if (JS::IsCallable(&${val}.toObject())) {\n" + - conversion + - "} else {\n" + - indent(onFailureNotCallable(failureCode).define()) + - "}\n", + "if (JS::IsCallable(&${val}.toObject())) {\n" + + conversion + + "} else {\n" + + indent(onFailureNotCallable(failureCode).define()) + + "}\n", type, "${declName} = nullptr;\n", - failureCode) - return JSToNativeConversionInfo(template, declType=declType, - declArgs=declArgs, - dealWithOptional=isOptional) + failureCode, + ) + return JSToNativeConversionInfo( + template, declType=declType, declArgs=declArgs, dealWithOptional=isOptional + ) if type.isAny(): assert not isEnforceRange and not isClamp and not isAllowShared @@ -6389,9 +7267,13 @@ def getConversionCode(varName): # $${passedToJSImpl} is the literal `false`. But Apple is shipping a # buggy clang (clang 3.9) in Xcode 8.3, so there even the parens are not # enough. So we manually disable some warnings in clang. - if not isinstance(descriptorProvider, Descriptor) or descriptorProvider.interface.isJSImplemented(): - templateBody = fill( - """ + if ( + not isinstance(descriptorProvider, Descriptor) + or descriptorProvider.interface.isJSImplemented() + ): + templateBody = ( + fill( + """ #ifdef __clang__ #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wunreachable-code" @@ -6405,8 +7287,11 @@ def getConversionCode(varName): #pragma clang diagnostic pop #endif // __clang__ """, - sourceDescription=sourceDescription, - exceptionCode=exceptionCode) + templateBody + sourceDescription=sourceDescription, + exceptionCode=exceptionCode, + ) + + templateBody + ) # We may not have a default value if we're being converted for # a setter, say. @@ -6417,19 +7302,24 @@ def getConversionCode(varName): assert isinstance(defaultValue, IDLUndefinedValue) defaultHandling = "${declName} = JS::UndefinedValue();\n" templateBody = handleDefault(templateBody, defaultHandling) - return JSToNativeConversionInfo(templateBody, - declType=CGGeneric(declType), - declArgs=declArgs) + return JSToNativeConversionInfo( + templateBody, declType=CGGeneric(declType), declArgs=declArgs + ) if type.isObject(): assert not isEnforceRange and not isClamp and not isAllowShared - return handleJSObjectType(type, isMember, failureCode, exceptionCode, sourceDescription) + return handleJSObjectType( + type, isMember, failureCode, exceptionCode, sourceDescription + ) if type.isDictionary(): # There are no nullable dictionary-typed arguments or dictionary-typed # dictionary members. - assert(not type.nullable() or isCallbackReturnValue or - (isMember and isMember != "Dictionary")) + assert ( + not type.nullable() + or isCallbackReturnValue + or (isMember and isMember != "Dictionary") + ) # All optional dictionary-typed arguments always have default values, # but dictionary-typed dictionary members can be optional. assert not isOptional or isMember == "Dictionary" @@ -6452,9 +7342,12 @@ def getConversionCode(varName): # NOTE: if isNullOrUndefined or isDefinitelyObject are true, # we know we have a value, so we don't have to worry about the # default value. - if (not isNullOrUndefined and not isDefinitelyObject and - defaultValue is not None): - assert(isinstance(defaultValue, IDLDefaultDictionaryValue)) + if ( + not isNullOrUndefined + and not isDefinitelyObject + and defaultValue is not None + ): + assert isinstance(defaultValue, IDLDefaultDictionaryValue) # Initializing from JS null does the right thing to give # us a default-initialized dictionary. val = "(${haveValue}) ? ${val} : JS::NullHandleValue" @@ -6472,7 +7365,8 @@ def getConversionCode(varName): # conversion from JS has a dictionary-typed member with a default # value of {}. args = "" - conversionCode = fill(""" + conversionCode = fill( + """ if (!${dictLoc}.Init(${args}"${desc}", $${passedToJSImpl})) { $*{exceptionCode} } @@ -6480,7 +7374,8 @@ def getConversionCode(varName): dictLoc=dictLoc, args=args, desc=firstCap(sourceDescription), - exceptionCode=exceptionCode) + exceptionCode=exceptionCode, + ) if failureCode is not None: # This means we're part of an overload or union conversion, and @@ -6500,15 +7395,18 @@ def getConversionCode(varName): """, val=val, failureCode=failureCode, - conversionCode=conversionCode) + conversionCode=conversionCode, + ) else: template = conversionCode if type.nullable(): declType = CGTemplatedType("Nullable", declType) - template = CGIfElseWrapper("${val}.isNullOrUndefined()", - CGGeneric("${declName}.SetNull();\n"), - CGGeneric(template)).define() + template = CGIfElseWrapper( + "${val}.isNullOrUndefined()", + CGGeneric("${declName}.SetNull();\n"), + CGGeneric(template), + ).define() # Dictionary arguments that might contain traceable things need to get # traced @@ -6522,9 +7420,9 @@ def getConversionCode(varName): else: declArgs = None - return JSToNativeConversionInfo(template, declType=declType, - declArgs=declArgs, - dealWithOptional=isOptional) + return JSToNativeConversionInfo( + template, declType=declType, declArgs=declArgs, dealWithOptional=isOptional + ) if type.isVoid(): assert not isOptional @@ -6558,7 +7456,8 @@ def getConversionCode(varName): template = dedent( """ ${declName}.SetNull(); - """) + """ + ) if not alwaysNull: template = fill( """ @@ -6573,13 +7472,14 @@ def getConversionCode(varName): conversionBehavior=conversionBehavior, sourceDescription=firstCap(sourceDescription), writeLoc=writeLoc, - exceptionCode=exceptionCode) + exceptionCode=exceptionCode, + ) else: - assert(defaultValue is None or - not isinstance(defaultValue, IDLNullValue)) + assert defaultValue is None or not isinstance(defaultValue, IDLNullValue) writeLoc = "${declName}" readLoc = writeLoc - template = fill(""" + template = fill( + """ if (!ValueToPrimitive<${typeName}, ${conversionBehavior}>(cx, $${val}, "${sourceDescription}", &${writeLoc})) { $*{exceptionCode} } @@ -6588,38 +7488,45 @@ def getConversionCode(varName): conversionBehavior=conversionBehavior, sourceDescription=firstCap(sourceDescription), writeLoc=writeLoc, - exceptionCode=exceptionCode) + exceptionCode=exceptionCode, + ) declType = CGGeneric(typeName) if type.isFloat() and not type.isUnrestricted() and not alwaysNull: if lenientFloatCode is not None: nonFiniteCode = lenientFloatCode else: - nonFiniteCode = ('cx.ThrowErrorMessage("%s");\n' - "%s" % (firstCap(sourceDescription), exceptionCode)) + nonFiniteCode = 'cx.ThrowErrorMessage("%s");\n' "%s" % ( + firstCap(sourceDescription), + exceptionCode, + ) # We're appending to an if-block brace, so strip trailing whitespace # and add an extra space before the else. template = template.rstrip() - template += fill(""" + template += fill( + """ else if (!mozilla::IsFinite(${readLoc})) { $*{nonFiniteCode} } """, readLoc=readLoc, - nonFiniteCode=nonFiniteCode) + nonFiniteCode=nonFiniteCode, + ) - if (defaultValue is not None and + if ( + defaultValue is not None + and # We already handled IDLNullValue, so just deal with the other ones - not isinstance(defaultValue, IDLNullValue)): + not isinstance(defaultValue, IDLNullValue) + ): tag = defaultValue.type.tag() defaultStr = getHandleDefault(defaultValue) - template = handleDefault( - template, - "%s = %s;\n" % (writeLoc, defaultStr)) + template = handleDefault(template, "%s = %s;\n" % (writeLoc, defaultStr)) - return JSToNativeConversionInfo(template, declType=declType, - dealWithOptional=isOptional) + return JSToNativeConversionInfo( + template, declType=declType, dealWithOptional=isOptional + ) def instantiateJSToNativeConversion(info, replacements, checkForValue=False): @@ -6632,13 +7539,19 @@ def instantiateJSToNativeConversion(info, replacements, checkForValue=False): a check for ${haveValue}. """ templateBody, declType, holderType, dealWithOptional = ( - info.template, info.declType, info.holderType, info.dealWithOptional) + info.template, + info.declType, + info.holderType, + info.dealWithOptional, + ) if dealWithOptional and not checkForValue: raise TypeError("Have to deal with optional things, but don't know how") if checkForValue and declType is None: - raise TypeError("Need to predeclare optional things, so they will be " - "outside the check for big enough arg count!") + raise TypeError( + "Need to predeclare optional things, so they will be " + "outside the check for big enough arg count!" + ) # We can't precompute our holder constructor arguments, since # those might depend on ${declName}, which we change below. Just @@ -6656,14 +7569,20 @@ def getArgsCGThing(args): declType = CGTemplatedType("Optional", declType) declCtorArgs = None elif info.declArgs is not None: - declCtorArgs = CGWrapper(getArgsCGThing(info.declArgs), - pre="(", post=")") + declCtorArgs = CGWrapper(getArgsCGThing(info.declArgs), pre="(", post=")") else: declCtorArgs = None result.append( - CGList([declType, CGGeneric(" "), + CGList( + [ + declType, + CGGeneric(" "), CGGeneric(originalDeclName), - declCtorArgs, CGGeneric(";\n")])) + declCtorArgs, + CGGeneric(";\n"), + ] + ) + ) originalHolderName = replacements["holderName"] if holderType is not None: @@ -6672,48 +7591,68 @@ def getArgsCGThing(args): holderType = CGTemplatedType("Maybe", holderType) holderCtorArgs = None elif info.holderArgs is not None: - holderCtorArgs = CGWrapper(getArgsCGThing(info.holderArgs), - pre="(", post=")") + holderCtorArgs = CGWrapper( + getArgsCGThing(info.holderArgs), pre="(", post=")" + ) else: holderCtorArgs = None result.append( - CGList([holderType, CGGeneric(" "), + CGList( + [ + holderType, + CGGeneric(" "), CGGeneric(originalHolderName), - holderCtorArgs, CGGeneric(";\n")])) + holderCtorArgs, + CGGeneric(";\n"), + ] + ) + ) if "maybeMutableVal" not in replacements: replacements["maybeMutableVal"] = replacements["val"] - conversion = CGGeneric( - string.Template(templateBody).substitute(replacements)) + conversion = CGGeneric(string.Template(templateBody).substitute(replacements)) if checkForValue: if dealWithOptional: declConstruct = CGIndenter( - CGGeneric("%s.Construct(%s);\n" % - (originalDeclName, - getArgsCGThing(info.declArgs).define() if - info.declArgs else ""))) + CGGeneric( + "%s.Construct(%s);\n" + % ( + originalDeclName, + getArgsCGThing(info.declArgs).define() if info.declArgs else "", + ) + ) + ) if holderType is not None: holderConstruct = CGIndenter( - CGGeneric("%s.emplace(%s);\n" % - (originalHolderName, - getArgsCGThing(info.holderArgs).define() if - info.holderArgs else ""))) + CGGeneric( + "%s.emplace(%s);\n" + % ( + originalHolderName, + getArgsCGThing(info.holderArgs).define() + if info.holderArgs + else "", + ) + ) + ) else: holderConstruct = None else: declConstruct = None holderConstruct = None - conversion = CGList([ - CGGeneric( - string.Template("if (${haveValue}) {\n").substitute(replacements)), - declConstruct, - holderConstruct, - CGIndenter(conversion), - CGGeneric("}\n") - ]) + conversion = CGList( + [ + CGGeneric( + string.Template("if (${haveValue}) {\n").substitute(replacements) + ), + declConstruct, + holderConstruct, + CGIndenter(conversion), + CGGeneric("}\n"), + ] + ) result.append(conversion) return result @@ -6725,8 +7664,13 @@ def convertConstIDLValueToJSVal(value): if isinstance(value, IDLUndefinedValue): return "JS::UndefinedValue()" tag = value.type.tag() - if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, - IDLType.Tags.uint16, IDLType.Tags.int32]: + if tag in [ + IDLType.Tags.int8, + IDLType.Tags.uint8, + IDLType.Tags.int16, + IDLType.Tags.uint16, + IDLType.Tags.int32, + ]: return "JS::Int32Value(%s)" % (value.value) if tag == IDLType.Tags.uint32: return "JS::NumberValue(%sU)" % (value.value) @@ -6750,23 +7694,30 @@ class CGArgumentConverter(CGThing): sentence. If it ends up at the beginning of a sentence, its first character will be automatically uppercased. """ - def __init__(self, argument, index, descriptorProvider, - argDescription, member, - invalidEnumValueFatal=True, lenientFloatCode=None): + + def __init__( + self, + argument, + index, + descriptorProvider, + argDescription, + member, + invalidEnumValueFatal=True, + lenientFloatCode=None, + ): CGThing.__init__(self) self.argument = argument self.argDescription = argDescription - assert(not argument.defaultValue or argument.optional) + assert not argument.defaultValue or argument.optional - replacer = { - "index": index, - "argc": "args.length()" - } + replacer = {"index": index, "argc": "args.length()"} self.replacementVariables = { "declName": "arg%d" % index, "holderName": ("arg%d" % index) + "_holder", "obj": "obj", - "passedToJSImpl": toStringBool(isJSImplementedDescriptor(descriptorProvider)) + "passedToJSImpl": toStringBool( + isJSImplementedDescriptor(descriptorProvider) + ), } # If we have a method generated by the maplike/setlike portion of an # interface, arguments can possibly be undefined, but will need to be @@ -6776,14 +7727,18 @@ def __init__(self, argument, index, descriptorProvider, # don't take arguments. if member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod(): self.replacementVariables["val"] = string.Template( - "args.get(${index})").substitute(replacer) + "args.get(${index})" + ).substitute(replacer) self.replacementVariables["maybeMutableVal"] = string.Template( - "args[${index}]").substitute(replacer) + "args[${index}]" + ).substitute(replacer) else: self.replacementVariables["val"] = string.Template( - "args[${index}]").substitute(replacer) - haveValueCheck = string.Template( - "args.hasDefined(${index})").substitute(replacer) + "args[${index}]" + ).substitute(replacer) + haveValueCheck = string.Template("args.hasDefined(${index})").substitute( + replacer + ) self.replacementVariables["haveValue"] = haveValueCheck self.descriptorProvider = descriptorProvider if self.argument.canHaveMissingValue(): @@ -6797,20 +7752,19 @@ def define(self): typeConversion = getJSToNativeConversionInfo( self.argument.type, self.descriptorProvider, - isOptional=(self.argcAndIndex is not None and - not self.argument.variadic), + isOptional=(self.argcAndIndex is not None and not self.argument.variadic), invalidEnumValueFatal=self.invalidEnumValueFatal, defaultValue=self.argument.defaultValue, lenientFloatCode=self.lenientFloatCode, isMember="Variadic" if self.argument.variadic else False, allowTreatNonCallableAsNull=self.argument.allowTreatNonCallableAsNull(), - sourceDescription=self.argDescription) + sourceDescription=self.argDescription, + ) if not self.argument.variadic: return instantiateJSToNativeConversion( - typeConversion, - self.replacementVariables, - self.argcAndIndex is not None).define() + typeConversion, self.replacementVariables, self.argcAndIndex is not None + ).define() # Variadic arguments get turned into a sequence. if typeConversion.dealWithOptional: @@ -6819,11 +7773,14 @@ def define(self): raise TypeError("Shouldn't need holders for variadics") replacer = dict(self.argcAndIndex, **self.replacementVariables) - replacer["seqType"] = CGTemplatedType("AutoSequence", - typeConversion.declType).define() + replacer["seqType"] = CGTemplatedType( + "AutoSequence", typeConversion.declType + ).define() if typeNeedsRooting(self.argument.type): - rooterDecl = ("SequenceRooter<%s> ${holderName}(cx, &${declName});\n" % - typeConversion.declType.define()) + rooterDecl = ( + "SequenceRooter<%s> ${holderName}(cx, &${declName});\n" + % typeConversion.declType.define() + ) else: rooterDecl = "" replacer["elemType"] = typeConversion.declType.define() @@ -6832,9 +7789,10 @@ def define(self): # NOTE: Keep this in sync with sequence conversions as needed variadicConversion = string.Template( - "${seqType} ${declName};\n" + - rooterDecl + - dedent(""" + "${seqType} ${declName};\n" + + rooterDecl + + dedent( + """ if (${argc} > ${index}) { if (!${declName}.SetCapacity(${argc} - ${index}, mozilla::fallible)) { JS_ReportOutOfMemory(cx); @@ -6843,26 +7801,32 @@ def define(self): for (uint32_t variadicArg = ${index}; variadicArg < ${argc}; ++variadicArg) { // OK to do infallible append here, since we ensured capacity already. ${elemType}& slot = *${declName}.AppendElement(${elementInitializer}); - """) + """ + ) ).substitute(replacer) val = string.Template("args[variadicArg]").substitute(replacer) variadicConversion += indent( - string.Template(typeConversion.template).substitute({ - "val": val, - "maybeMutableVal": val, - "declName": "slot", - # We only need holderName here to handle isExternal() - # interfaces, which use an internal holder for the - # conversion even when forceOwningType ends up true. - "holderName": "tempHolder", - # Use the same ${obj} as for the variadic arg itself - "obj": replacer["obj"], - "passedToJSImpl": toStringBool(isJSImplementedDescriptor(self.descriptorProvider)) - }), 4) + string.Template(typeConversion.template).substitute( + { + "val": val, + "maybeMutableVal": val, + "declName": "slot", + # We only need holderName here to handle isExternal() + # interfaces, which use an internal holder for the + # conversion even when forceOwningType ends up true. + "holderName": "tempHolder", + # Use the same ${obj} as for the variadic arg itself + "obj": replacer["obj"], + "passedToJSImpl": toStringBool( + isJSImplementedDescriptor(self.descriptorProvider) + ), + } + ), + 4, + ) - variadicConversion += (" }\n" - "}\n") + variadicConversion += " }\n" "}\n" return variadicConversion @@ -6894,10 +7858,16 @@ def getMaybeWrapValueFuncForType(type): recordWrapLevel = 0 -def getWrapTemplateForType(type, descriptorProvider, result, successCode, - returnsNewObject, exceptionCode, - spiderMonkeyInterfacesAreStructs, - isConstructorRetval=False): +def getWrapTemplateForType( + type, + descriptorProvider, + result, + successCode, + returnsNewObject, + exceptionCode, + spiderMonkeyInterfacesAreStructs, + isConstructorRetval=False, +): """ Reflect a C++ value stored in "result", of IDL type "type" into JS. The "successCode" is the code to run once we have successfully done the @@ -6974,7 +7944,8 @@ def _setValue(value, wrapAsType=None, setter="set"): """, maybeWrap=getMaybeWrapValueFuncForType(wrapAsType), exceptionCode=exceptionCode, - successCode=successCode) + successCode=successCode, + ) return ("${jsvalRef}.%s(%s);\n" % (setter, value)) + tail def wrapAndSetPtr(wrapCall, failureCode=None): @@ -6993,17 +7964,23 @@ def wrapAndSetPtr(wrapCall, failureCode=None): """, wrapCall=wrapCall, failureCode=failureCode, - successCode=successCode) + successCode=successCode, + ) if type is None or type.isVoid(): return (setUndefined(), True) if (type.isSequence() or type.isRecord()) and type.nullable(): # These are both wrapped in Nullable<> - recTemplate, recInfall = getWrapTemplateForType(type.inner, descriptorProvider, - "%s.Value()" % result, successCode, - returnsNewObject, exceptionCode, - spiderMonkeyInterfacesAreStructs) + recTemplate, recInfall = getWrapTemplateForType( + type.inner, + descriptorProvider, + "%s.Value()" % result, + successCode, + returnsNewObject, + exceptionCode, + spiderMonkeyInterfacesAreStructs, + ) code = fill( """ @@ -7014,7 +7991,8 @@ def wrapAndSetPtr(wrapCall, failureCode=None): """, result=result, setNull=setNull(), - recTemplate=recTemplate) + recTemplate=recTemplate, + ) return code, recInfall if type.isSequence(): @@ -7026,17 +8004,19 @@ def wrapAndSetPtr(wrapCall, failureCode=None): index = "sequenceIdx%d" % sequenceWrapLevel sequenceWrapLevel += 1 innerTemplate = wrapForType( - type.inner, descriptorProvider, + type.inner, + descriptorProvider, { - 'result': "%s[%s]" % (result, index), - 'successCode': "break;\n", - 'jsvalRef': "tmp", - 'jsvalHandle': "&tmp", - 'returnsNewObject': returnsNewObject, - 'exceptionCode': exceptionCode, - 'obj': "returnArray", - 'spiderMonkeyInterfacesAreStructs': spiderMonkeyInterfacesAreStructs - }) + "result": "%s[%s]" % (result, index), + "successCode": "break;\n", + "jsvalRef": "tmp", + "jsvalHandle": "&tmp", + "returnsNewObject": returnsNewObject, + "exceptionCode": exceptionCode, + "obj": "returnArray", + "spiderMonkeyInterfacesAreStructs": spiderMonkeyInterfacesAreStructs, + }, + ) sequenceWrapLevel -= 1 code = fill( """ @@ -7067,7 +8047,8 @@ def wrapAndSetPtr(wrapCall, failureCode=None): exceptionCode=exceptionCode, index=index, innerTemplate=innerTemplate, - set=setObject("*returnArray")) + set=setObject("*returnArray"), + ) return (code, False) @@ -7080,17 +8061,19 @@ def wrapAndSetPtr(wrapCall, failureCode=None): valueName = "recordValue%d" % recordWrapLevel recordWrapLevel += 1 innerTemplate = wrapForType( - type.inner, descriptorProvider, + type.inner, + descriptorProvider, { - 'result': valueName, - 'successCode': "break;\n", - 'jsvalRef': "tmp", - 'jsvalHandle': "&tmp", - 'returnsNewObject': returnsNewObject, - 'exceptionCode': exceptionCode, - 'obj': "returnObj", - 'spiderMonkeyInterfacesAreStructs': spiderMonkeyInterfacesAreStructs - }) + "result": valueName, + "successCode": "break;\n", + "jsvalRef": "tmp", + "jsvalHandle": "&tmp", + "returnsNewObject": returnsNewObject, + "exceptionCode": exceptionCode, + "obj": "returnObj", + "spiderMonkeyInterfacesAreStructs": spiderMonkeyInterfacesAreStructs, + }, + ) recordWrapLevel -= 1 if type.keyType.isByteString(): # There is no length-taking JS_DefineProperty. So to keep @@ -7143,7 +8126,8 @@ def wrapAndSetPtr(wrapCall, failureCode=None): innerTemplate=innerTemplate, expandedKeyDecl=expandedKeyDecl, keyName=keyName, - set=setObject("*returnObj")) + set=setObject("*returnObj"), + ) return (code, False) @@ -7157,25 +8141,32 @@ def wrapAndSetPtr(wrapCall, failureCode=None): # conversion by hand here is pretty annoying because we have to handle # the various RefPtr, rawptr, NonNull, etc cases, which ToJSValue will # handle for us. So just eat the cost of the function call. - return (wrapAndSetPtr("ToJSValue(cx, %s, ${jsvalHandle})" % result), - False) + return (wrapAndSetPtr("ToJSValue(cx, %s, ${jsvalHandle})" % result), False) if type.isGeckoInterface() and not type.isCallbackInterface(): - descriptor = descriptorProvider.getDescriptor(type.unroll().inner.identifier.name) + descriptor = descriptorProvider.getDescriptor( + type.unroll().inner.identifier.name + ) if type.nullable(): if descriptor.interface.identifier.name == "WindowProxy": - template, infal = getWrapTemplateForType(type.inner, descriptorProvider, - "%s.Value()" % result, successCode, - returnsNewObject, exceptionCode, - spiderMonkeyInterfacesAreStructs) - return ("if (%s.IsNull()) {\n" % result + - indent(setNull()) + - "}\n" + - template, infal) - - wrappingCode = ("if (!%s) {\n" % (result) + - indent(setNull()) + - "}\n") + template, infal = getWrapTemplateForType( + type.inner, + descriptorProvider, + "%s.Value()" % result, + successCode, + returnsNewObject, + exceptionCode, + spiderMonkeyInterfacesAreStructs, + ) + return ( + "if (%s.IsNull()) {\n" % result + + indent(setNull()) + + "}\n" + + template, + infal, + ) + + wrappingCode = "if (!%s) {\n" % (result) + indent(setNull()) + "}\n" else: wrappingCode = "" @@ -7191,8 +8182,7 @@ def wrapAndSetPtr(wrapCall, failureCode=None): wrap = "%s(%s)" % (wrapMethod, wrapArgs) # Can only fail to wrap as a new-binding object if they already # threw an exception. - failed = ("MOZ_ASSERT(JS_IsExceptionPending(cx));\n" + - exceptionCode) + failed = "MOZ_ASSERT(JS_IsExceptionPending(cx));\n" + exceptionCode else: if descriptor.notflattened: getIID = "&NS_GET_IID(%s), " % descriptor.nativeType @@ -7209,21 +8199,45 @@ def wrapAndSetPtr(wrapCall, failureCode=None): if type.isDOMString() or type.isUSVString(): if type.nullable(): - return (wrapAndSetPtr("xpc::StringToJsval(cx, %s, ${jsvalHandle})" % result), False) + return ( + wrapAndSetPtr("xpc::StringToJsval(cx, %s, ${jsvalHandle})" % result), + False, + ) else: - return (wrapAndSetPtr("xpc::NonVoidStringToJsval(cx, %s, ${jsvalHandle})" % result), False) + return ( + wrapAndSetPtr( + "xpc::NonVoidStringToJsval(cx, %s, ${jsvalHandle})" % result + ), + False, + ) if type.isByteString(): if type.nullable(): - return (wrapAndSetPtr("ByteStringToJsval(cx, %s, ${jsvalHandle})" % result), False) + return ( + wrapAndSetPtr("ByteStringToJsval(cx, %s, ${jsvalHandle})" % result), + False, + ) else: - return (wrapAndSetPtr("NonVoidByteStringToJsval(cx, %s, ${jsvalHandle})" % result), False) + return ( + wrapAndSetPtr( + "NonVoidByteStringToJsval(cx, %s, ${jsvalHandle})" % result + ), + False, + ) if type.isUTF8String(): if type.nullable(): - return (wrapAndSetPtr("UTF8StringToJsval(cx, %s, ${jsvalHandle})" % result), False) + return ( + wrapAndSetPtr("UTF8StringToJsval(cx, %s, ${jsvalHandle})" % result), + False, + ) else: - return (wrapAndSetPtr("NonVoidUTF8StringToJsval(cx, %s, ${jsvalHandle})" % result), False) + return ( + wrapAndSetPtr( + "NonVoidUTF8StringToJsval(cx, %s, ${jsvalHandle})" % result + ), + False, + ) if type.isEnum(): if type.nullable(): @@ -7239,28 +8253,29 @@ def wrapAndSetPtr(wrapCall, failureCode=None): """, result=resultLoc, exceptionCode=exceptionCode, - successCode=successCode) + successCode=successCode, + ) if type.nullable(): conversion = CGIfElseWrapper( - "%s.IsNull()" % result, - CGGeneric(setNull()), - CGGeneric(conversion)).define() + "%s.IsNull()" % result, CGGeneric(setNull()), CGGeneric(conversion) + ).define() return conversion, False if type.isCallback() or type.isCallbackInterface(): # Callbacks can store null if we nuked the compartments their # objects lived in. wrapCode = setObjectOrNull( - "GetCallbackFromCallbackObject(cx, %(result)s)", - wrapAsType=type) + "GetCallbackFromCallbackObject(cx, %(result)s)", wrapAsType=type + ) if type.nullable(): wrapCode = ( - "if (%(result)s) {\n" + - indent(wrapCode) + - "} else {\n" + - indent(setNull()) + - "}\n") + "if (%(result)s) {\n" + + indent(wrapCode) + + "} else {\n" + + indent(setNull()) + + "}\n" + ) wrapCode = wrapCode % {"result": result} return wrapCode, False @@ -7271,8 +8286,9 @@ def wrapAndSetPtr(wrapCall, failureCode=None): head = "JS::ExposeValueToActiveJS(%s);\n" % result return (head + _setValue(result, wrapAsType=type), False) - if (type.isObject() or (type.isSpiderMonkeyInterface() and - not spiderMonkeyInterfacesAreStructs)): + if type.isObject() or ( + type.isSpiderMonkeyInterface() and not spiderMonkeyInterfacesAreStructs + ): # See comments in GetOrCreateDOMReflector explaining why we need # to wrap here. if type.nullable(): @@ -7281,7 +8297,10 @@ def wrapAndSetPtr(wrapCall, failureCode=None): head = """if (%s) { JS::ExposeObjectToActiveJS(%s); } - """ % (result, result) + """ % ( + result, + result, + ) else: toValue = "*%s" setter = setObject @@ -7289,45 +8308,64 @@ def wrapAndSetPtr(wrapCall, failureCode=None): # NB: setObject{,OrNull}(..., some-object-type) calls JS_WrapValue(), so is fallible return (head + setter(toValue % result, wrapAsType=type), False) - if not (type.isUnion() or type.isPrimitive() or type.isDictionary() or - (type.isSpiderMonkeyInterface() and spiderMonkeyInterfacesAreStructs)): + if not ( + type.isUnion() + or type.isPrimitive() + or type.isDictionary() + or (type.isSpiderMonkeyInterface() and spiderMonkeyInterfacesAreStructs) + ): raise TypeError("Need to learn to wrap %s" % type) if type.nullable(): - recTemplate, recInfal = getWrapTemplateForType(type.inner, descriptorProvider, - "%s.Value()" % result, successCode, - returnsNewObject, exceptionCode, - spiderMonkeyInterfacesAreStructs) - return ("if (%s.IsNull()) {\n" % result + - indent(setNull()) + - "}\n" + - recTemplate, recInfal) + recTemplate, recInfal = getWrapTemplateForType( + type.inner, + descriptorProvider, + "%s.Value()" % result, + successCode, + returnsNewObject, + exceptionCode, + spiderMonkeyInterfacesAreStructs, + ) + return ( + "if (%s.IsNull()) {\n" % result + indent(setNull()) + "}\n" + recTemplate, + recInfal, + ) if type.isSpiderMonkeyInterface(): assert spiderMonkeyInterfacesAreStructs # See comments in GetOrCreateDOMReflector explaining why we need # to wrap here. # NB: setObject(..., some-object-type) calls JS_WrapValue(), so is fallible - return (setObject("*%s.Obj()" % result, - wrapAsType=type), False) + return (setObject("*%s.Obj()" % result, wrapAsType=type), False) if type.isUnion(): - return (wrapAndSetPtr("%s.ToJSVal(cx, ${obj}, ${jsvalHandle})" % result), - False) + return (wrapAndSetPtr("%s.ToJSVal(cx, ${obj}, ${jsvalHandle})" % result), False) if type.isDictionary(): - return (wrapAndSetPtr("%s.ToObjectInternal(cx, ${jsvalHandle})" % result), - False) + return ( + wrapAndSetPtr("%s.ToObjectInternal(cx, ${jsvalHandle})" % result), + False, + ) tag = type.tag() - if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16, - IDLType.Tags.uint16, IDLType.Tags.int32]: + if tag in [ + IDLType.Tags.int8, + IDLType.Tags.uint8, + IDLType.Tags.int16, + IDLType.Tags.uint16, + IDLType.Tags.int32, + ]: return (setInt32("int32_t(%s)" % result), True) - elif tag in [IDLType.Tags.int64, IDLType.Tags.uint64, - IDLType.Tags.unrestricted_float, IDLType.Tags.float, - IDLType.Tags.unrestricted_double, IDLType.Tags.double]: + elif tag in [ + IDLType.Tags.int64, + IDLType.Tags.uint64, + IDLType.Tags.unrestricted_float, + IDLType.Tags.float, + IDLType.Tags.unrestricted_double, + IDLType.Tags.double, + ]: # XXXbz will cast to double do the "even significand" thing that webidl # calls for for 64-bit ints? Do we care? return (setDouble("double(%s)" % result), True) @@ -7377,15 +8415,17 @@ def wrapForType(type, descriptorProvider, templateValues): return value. """ wrap = getWrapTemplateForType( - type, descriptorProvider, - templateValues.get('result', 'result'), - templateValues.get('successCode', None), - templateValues.get('returnsNewObject', False), - templateValues.get('exceptionCode', "return false;\n"), - templateValues.get('spiderMonkeyInterfacesAreStructs', False), - isConstructorRetval=templateValues.get('isConstructorRetval', False))[0] - - defaultValues = {'obj': 'obj'} + type, + descriptorProvider, + templateValues.get("result", "result"), + templateValues.get("successCode", None), + templateValues.get("returnsNewObject", False), + templateValues.get("exceptionCode", "return false;\n"), + templateValues.get("spiderMonkeyInterfacesAreStructs", False), + isConstructorRetval=templateValues.get("isConstructorRetval", False), + )[0] + + defaultValues = {"obj": "obj"} return string.Template(wrap).substitute(defaultValues, **templateValues) @@ -7400,14 +8440,24 @@ def infallibleForMember(member, type, descriptorProvider): We assume that successCode for wrapping up return values cannot contain failure conditions. """ - return getWrapTemplateForType(type, descriptorProvider, 'result', None, - memberReturnsNewObject(member), "return false;\n", - False)[1] + return getWrapTemplateForType( + type, + descriptorProvider, + "result", + None, + memberReturnsNewObject(member), + "return false;\n", + False, + )[1] def leafTypeNeedsCx(type, retVal): - return (type.isAny() or type.isObject() or type.isJSString() or - (retVal and type.isSpiderMonkeyInterface())) + return ( + type.isAny() + or type.isObject() + or type.isJSString() + or (retVal and type.isSpiderMonkeyInterface()) + ) def leafTypeNeedsScopeObject(type, retVal): @@ -7419,18 +8469,15 @@ def leafTypeNeedsRooting(type): def typeNeedsRooting(type): - return typeMatchesLambda(type, - lambda t: leafTypeNeedsRooting(t)) + return typeMatchesLambda(type, lambda t: leafTypeNeedsRooting(t)) def typeNeedsCx(type, retVal=False): - return typeMatchesLambda(type, - lambda t: leafTypeNeedsCx(t, retVal)) + return typeMatchesLambda(type, lambda t: leafTypeNeedsCx(t, retVal)) def typeNeedsScopeObject(type, retVal=False): - return typeMatchesLambda(type, - lambda t: leafTypeNeedsScopeObject(t, retVal)) + return typeMatchesLambda(type, lambda t: leafTypeNeedsScopeObject(t, retVal)) def typeMatchesLambda(type, func): @@ -7441,22 +8488,21 @@ def typeMatchesLambda(type, func): if type.isSequence() or type.isRecord(): return typeMatchesLambda(type.inner, func) if type.isUnion(): - return any(typeMatchesLambda(t, func) for t in - type.unroll().flatMemberTypes) + return any(typeMatchesLambda(t, func) for t in type.unroll().flatMemberTypes) if type.isDictionary(): return dictionaryMatchesLambda(type.inner, func) return func(type) def dictionaryMatchesLambda(dictionary, func): - return (any(typeMatchesLambda(m.type, func) for m in dictionary.members) or - (dictionary.parent and dictionaryMatchesLambda(dictionary.parent, func))) + return any(typeMatchesLambda(m.type, func) for m in dictionary.members) or ( + dictionary.parent and dictionaryMatchesLambda(dictionary.parent, func) + ) # Whenever this is modified, please update CGNativeMember.getRetvalInfo as # needed to keep the types compatible. -def getRetvalDeclarationForType(returnType, descriptorProvider, - isMember=False): +def getRetvalDeclarationForType(returnType, descriptorProvider, isMember=False): """ Returns a tuple containing five things: @@ -7534,13 +8580,14 @@ def getRetvalDeclarationForType(returnType, descriptorProvider, nullable = returnType.nullable() if nullable: returnType = returnType.inner - result, _, _, _, _ = getRetvalDeclarationForType(returnType.inner, - descriptorProvider, - isMember="Sequence") + result, _, _, _, _ = getRetvalDeclarationForType( + returnType.inner, descriptorProvider, isMember="Sequence" + ) # While we have our inner type, set up our rooter, if needed if not isMember and typeNeedsRooting(returnType): - rooter = CGGeneric("SequenceRooter<%s > resultRooter(cx, &result);\n" % - result.define()) + rooter = CGGeneric( + "SequenceRooter<%s > resultRooter(cx, &result);\n" % result.define() + ) else: rooter = None result = CGTemplatedType("nsTArray", result) @@ -7551,17 +8598,18 @@ def getRetvalDeclarationForType(returnType, descriptorProvider, nullable = returnType.nullable() if nullable: returnType = returnType.inner - result, _, _, _, _ = getRetvalDeclarationForType(returnType.inner, - descriptorProvider, - isMember="Record") + result, _, _, _, _ = getRetvalDeclarationForType( + returnType.inner, descriptorProvider, isMember="Record" + ) # While we have our inner type, set up our rooter, if needed if not isMember and typeNeedsRooting(returnType): - rooter = CGGeneric("RecordRooter<%s> resultRooter(cx, &result);\n" % - ("nsString, " + result.define())) + rooter = CGGeneric( + "RecordRooter<%s> resultRooter(cx, &result);\n" + % ("nsString, " + result.define()) + ) else: rooter = None - result = CGTemplatedType("Record", [recordKeyDeclType(returnType), - result]) + result = CGTemplatedType("Record", [recordKeyDeclType(returnType), result]) if nullable: result = CGTemplatedType("Nullable", result) return result, "ref", rooter, None, None @@ -7593,27 +8641,35 @@ def getRetvalDeclarationForType(returnType, descriptorProvider, result = CGTemplatedType("Nullable", result) resultArgs = None return result, "ref", None, resultArgs, None - raise TypeError("Don't know how to declare return value for %s" % - returnType) + raise TypeError("Don't know how to declare return value for %s" % returnType) -def needCx(returnType, arguments, extendedAttributes, considerTypes, - static=False): - return (not static and considerTypes and - (typeNeedsCx(returnType, True) or - any(typeNeedsCx(a.type) for a in arguments)) or - 'implicitJSContext' in extendedAttributes) +def needCx(returnType, arguments, extendedAttributes, considerTypes, static=False): + return ( + not static + and considerTypes + and ( + typeNeedsCx(returnType, True) or any(typeNeedsCx(a.type) for a in arguments) + ) + or "implicitJSContext" in extendedAttributes + ) -def needScopeObject(returnType, arguments, extendedAttributes, - isWrapperCached, considerTypes, isMember): +def needScopeObject( + returnType, arguments, extendedAttributes, isWrapperCached, considerTypes, isMember +): """ isMember should be true if we're dealing with an attribute annotated as [StoreInSlot]. """ - return (considerTypes and not isWrapperCached and - ((not isMember and typeNeedsScopeObject(returnType, True)) or - any(typeNeedsScopeObject(a.type) for a in arguments))) + return ( + considerTypes + and not isWrapperCached + and ( + (not isMember and typeNeedsScopeObject(returnType, True)) + or any(typeNeedsScopeObject(a.type) for a in arguments) + ) + ) def callerTypeGetterForDescriptor(descriptor): @@ -7623,6 +8679,7 @@ def callerTypeGetterForDescriptor(descriptor): systemCallerGetter = "nsContentUtils::IsSystemCaller" return "%s(cx) ? CallerType::System : CallerType::NonSystem" % systemCallerGetter + class CGCallGenerator(CGThing): """ A class to generate an actual call to a C++ object. Assumes that the C++ @@ -7640,14 +8697,33 @@ class CGCallGenerator(CGThing): context: The context string to pass to MaybeSetPendingException. """ - def __init__(self, isFallible, needsCallerType, isChromeOnly, - arguments, argsPre, returnType, extendedAttributes, descriptor, - nativeMethodName, static, object="self", argsPost=[], - resultVar=None, context="nullptr"): + + def __init__( + self, + isFallible, + needsCallerType, + isChromeOnly, + arguments, + argsPre, + returnType, + extendedAttributes, + descriptor, + nativeMethodName, + static, + object="self", + argsPost=[], + resultVar=None, + context="nullptr", + ): CGThing.__init__(self) - result, resultOutParam, resultRooter, resultArgs, resultConversion = \ - getRetvalDeclarationForType(returnType, descriptor) + ( + result, + resultOutParam, + resultRooter, + resultArgs, + resultConversion, + ) = getRetvalDeclarationForType(returnType, descriptor) args = CGList([CGGeneric(arg) for arg in argsPre], ", ") for a, name in arguments: @@ -7679,13 +8755,14 @@ def needsConst(a): if a.type.isSpiderMonkeyInterface(): return True return False + if needsConst(a): arg = CGWrapper(arg, pre="Constify(", post=")") # And convert NonNull to T& - if (((a.type.isGeckoInterface() or a.type.isCallback() or - a.type.isPromise()) and - not a.type.nullable()) or - a.type.isDOMString()): + if ( + (a.type.isGeckoInterface() or a.type.isCallback() or a.type.isPromise()) + and not a.type.nullable() + ) or a.type.isDOMString(): arg = CGWrapper(arg, pre="NonNullHelper(", post=")") # If it's a refcounted object, let the static analysis know it's @@ -7714,14 +8791,16 @@ def needsConst(a): needsSubjectPrincipal = "needsSubjectPrincipal" in extendedAttributes if needsSubjectPrincipal: needsNonSystemPrincipal = ( - "needsNonSystemSubjectPrincipal" in extendedAttributes) + "needsNonSystemSubjectPrincipal" in extendedAttributes + ) if needsNonSystemPrincipal: checkPrincipal = dedent( """ if (principal->IsSystemPrincipal()) { principal = nullptr; } - """) + """ + ) else: checkPrincipal = "" @@ -7733,37 +8812,48 @@ def needsConst(a): nsIPrincipal* principal = nsJSPrincipals::get(principals); ${checkPrincipal} """, - checkPrincipal=checkPrincipal) + checkPrincipal=checkPrincipal, + ) if descriptor.interface.isExposedInAnyWorker(): - self.cgRoot.append(CGGeneric(fill( - """ + self.cgRoot.append( + CGGeneric( + fill( + """ Maybe subjectPrincipal; if (NS_IsMainThread()) { $*{getPrincipal} subjectPrincipal.emplace(principal); } """, - getPrincipal=getPrincipal))) + getPrincipal=getPrincipal, + ) + ) + ) subjectPrincipalArg = "subjectPrincipal" else: if needsNonSystemPrincipal: - principalType = "nsIPrincipal*"; + principalType = "nsIPrincipal*" subjectPrincipalArg = "subjectPrincipal" else: principalType = "NonNull" subjectPrincipalArg = "NonNullHelper(subjectPrincipal)" - self.cgRoot.append(CGGeneric(fill( - """ + self.cgRoot.append( + CGGeneric( + fill( + """ ${principalType} subjectPrincipal; { $*{getPrincipal} subjectPrincipal = principal; } """, - principalType=principalType, - getPrincipal=getPrincipal))) + principalType=principalType, + getPrincipal=getPrincipal, + ) + ) + ) args.append(CGGeneric("MOZ_KnownLive(%s)" % subjectPrincipalArg)) @@ -7784,16 +8874,22 @@ def needsConst(a): if not static: call = CGWrapper(call, pre="%s->" % object) call = CGList([call, CGWrapper(args, pre="(", post=")")]) - if (returnType is None or returnType.isVoid() or - resultOutParam is not None): + if returnType is None or returnType.isVoid() or resultOutParam is not None: assert resultConversion is None - call = CGList([ - CGWrapper( + call = CGList( + [ + CGWrapper( + call, + pre=( + "// NOTE: This assert does NOT call the function.\n" + "static_assert(std::is_void_v unwrappedObj(cx, js::CheckedUnwrapStatic(obj)); // Caller should have ensured that "obj" can be unwrapped already. MOZ_DIAGNOSTIC_ASSERT(unwrappedObj); - """))) + """ + ) + ) + ) argsPre.append("unwrappedObj") if needsUnwrap and needsUnwrappedVar: @@ -8158,19 +9328,28 @@ def __init__(self, returnType, arguments, nativeMethodName, static, # to do it, before we might start setting up Rooted things for our # arguments, so that we don't violate the stack discipline Rooted # depends on. - cgThings.append(CGGeneric( - "bool objIsXray = xpc::WrapperFactory::IsXrayWrapper(obj);\n")) + cgThings.append( + CGGeneric("bool objIsXray = xpc::WrapperFactory::IsXrayWrapper(obj);\n") + ) if needsUnwrappedVar: - cgThings.append(CGIfWrapper( - CGGeneric("unwrappedObj.emplace(cx, obj);\n"), - "objIsXray")) + cgThings.append( + CGIfWrapper( + CGGeneric("unwrappedObj.emplace(cx, obj);\n"), "objIsXray" + ) + ) for i in range(argConversionStartsAt, self.argCount): cgThings.append( - CGArgumentConverter(arguments[i], i, self.descriptor, - argDescription % {"index": i + 1}, - idlNode, invalidEnumValueFatal=not setter, - lenientFloatCode=lenientFloatCode)) + CGArgumentConverter( + arguments[i], + i, + self.descriptor, + argDescription % {"index": i + 1}, + idlNode, + invalidEnumValueFatal=not setter, + lenientFloatCode=lenientFloatCode, + ) + ) # Now that argument processing is done, enforce the LenientFloat stuff if lenientFloatCode: @@ -8182,14 +9361,20 @@ def __init__(self, returnType, arguments, nativeMethodName, static, """ args.rval().setUndefined(); return true; - """) - cgThings.append(CGGeneric(fill( - """ + """ + ) + cgThings.append( + CGGeneric( + fill( + """ if (foundNonFiniteFloat) { $*{returnSteps} } """, - returnSteps=foundNonFiniteFloatBehavior))) + returnSteps=foundNonFiniteFloatBehavior, + ) + ) + ) if needsUnwrap: # Something depends on having the unwrapped object, so unwrap it now. @@ -8197,8 +9382,9 @@ def __init__(self, returnType, arguments, nativeMethodName, static, # XXXkhuey we should be able to MOZ_ASSERT that ${obj} is # not null. xraySteps.append( - CGGeneric(fill( - """ + CGGeneric( + fill( + """ // Since our object is an Xray, we can just CheckedUnwrapStatic: // we know Xrays have no dynamic unwrap behavior. ${obj} = js::CheckedUnwrapStatic(${obj}); @@ -8206,7 +9392,10 @@ def __init__(self, returnType, arguments, nativeMethodName, static, return false; } """, - obj=unwrappedVar))) + obj=unwrappedVar, + ) + ) + ) if isConstructor: # If we're called via an xray, we need to enter the underlying # object's compartment and then wrap up all of our arguments into @@ -8219,44 +9408,62 @@ def __init__(self, returnType, arguments, nativeMethodName, static, # original list of JS::Values. cgThings.append(CGGeneric("Maybe ar;\n")) xraySteps.append(CGGeneric("ar.emplace(cx, obj);\n")) - xraySteps.append(CGGeneric(dedent( - """ + xraySteps.append( + CGGeneric( + dedent( + """ if (!JS_WrapObject(cx, &desiredProto)) { return false; } - """))) + """ + ) + ) + ) xraySteps.extend( wrapArgIntoCurrentCompartment(arg, argname, isMember=False) - for arg, argname in self.getArguments()) + for arg, argname in self.getArguments() + ) - cgThings.append( - CGIfWrapper(CGList(xraySteps), - "objIsXray")) + cgThings.append(CGIfWrapper(CGList(xraySteps), "objIsXray")) - if (idlNode.getExtendedAttribute('CEReactions') is not None and - not getter): - cgThings.append(CGGeneric(dedent( - """ + if idlNode.getExtendedAttribute("CEReactions") is not None and not getter: + cgThings.append( + CGGeneric( + dedent( + """ Maybe ceReaction; DocGroup* docGroup = self->GetDocGroup(); if (docGroup) { ceReaction.emplace(docGroup->CustomElementReactionsStack(), cx); } - """))) + """ + ) + ) + ) # If this is a method that was generated by a maplike/setlike # interface, use the maplike/setlike generator to fill in the body. # Otherwise, use CGCallGenerator to call the native method. if idlNode.isMethod() and idlNode.isMaplikeOrSetlikeOrIterableMethod(): - if (idlNode.maplikeOrSetlikeOrIterable.isMaplike() or - idlNode.maplikeOrSetlikeOrIterable.isSetlike()): - cgThings.append(CGMaplikeOrSetlikeMethodGenerator(descriptor, - idlNode.maplikeOrSetlikeOrIterable, - idlNode.identifier.name)) + if ( + idlNode.maplikeOrSetlikeOrIterable.isMaplike() + or idlNode.maplikeOrSetlikeOrIterable.isSetlike() + ): + cgThings.append( + CGMaplikeOrSetlikeMethodGenerator( + descriptor, + idlNode.maplikeOrSetlikeOrIterable, + idlNode.identifier.name, + ) + ) else: - cgThings.append(CGIterableMethodGenerator(descriptor, - idlNode.maplikeOrSetlikeOrIterable, - idlNode.identifier.name)) + cgThings.append( + CGIterableMethodGenerator( + descriptor, + idlNode.maplikeOrSetlikeOrIterable, + idlNode.identifier.name, + ) + ) else: context = GetLabelForErrorReporting(descriptor, idlNode, isConstructor) if getter: @@ -8266,20 +9473,28 @@ def __init__(self, returnType, arguments, nativeMethodName, static, # Callee expects a quoted string for the context if # there's a context. context = '"%s"' % context - cgThings.append(CGCallGenerator( - self.isFallible(), - needsCallerType(idlNode), - isChromeOnly(idlNode), - self.getArguments(), argsPre, returnType, - self.extendedAttributes, descriptor, - nativeMethodName, - static, - # We know our "self" must be being kept alive; otherwise we have - # a serious problem. In common cases it's just an argument and - # we're MOZ_CAN_RUN_SCRIPT, but in some cases it's on the stack - # and being kept alive via references from JS. - object="MOZ_KnownLive(self)", - argsPost=argsPost, resultVar=resultVar, context=context)) + cgThings.append( + CGCallGenerator( + self.isFallible(), + needsCallerType(idlNode), + isChromeOnly(idlNode), + self.getArguments(), + argsPre, + returnType, + self.extendedAttributes, + descriptor, + nativeMethodName, + static, + # We know our "self" must be being kept alive; otherwise we have + # a serious problem. In common cases it's just an argument and + # we're MOZ_CAN_RUN_SCRIPT, but in some cases it's on the stack + # and being kept alive via references from JS. + object="MOZ_KnownLive(self)", + argsPost=argsPost, + resultVar=resultVar, + context=context, + ) + ) if useCounterName: # Generate a telemetry call for when [UseCounter] is used. @@ -8287,12 +9502,14 @@ def __init__(self, returnType, arguments, nativeMethodName, static, """ SetUseCounter(obj, eUseCounter_${useCounterName}); """, - useCounterName = useCounterName) + useCounterName=useCounterName, + ) workerCode = fill( """ SetUseCounter(UseCounterWorker::${useCounterName}); """, - useCounterName = useCounterName) + useCounterName=useCounterName, + ) code = "" if idlNode.isExposedInWindow() and idlNode.isExposedInAnyWorker(): code += fill( @@ -8304,7 +9521,8 @@ def __init__(self, returnType, arguments, nativeMethodName, static, } """, windowCode=windowCode, - workerCode=workerCode) + workerCode=workerCode, + ) elif idlNode.isExposedInWindow(): code += windowCode elif idlNode.isExposedInAnyWorker(): @@ -8318,20 +9536,21 @@ def getArguments(self): return [(a, "arg" + str(i)) for i, a in enumerate(self.arguments)] def isFallible(self): - return 'infallible' not in self.extendedAttributes + return "infallible" not in self.extendedAttributes def wrap_return_value(self): wrapCode = "" returnsNewObject = memberReturnsNewObject(self.idlNode) - if (returnsNewObject and - (self.returnType.isGeckoInterface() or - self.returnType.isPromise())): + if returnsNewObject and ( + self.returnType.isGeckoInterface() or self.returnType.isPromise() + ): wrapCode += dedent( """ static_assert(!std::is_pointer_v, "NewObject implies that we need to keep the object alive with a strong reference."); - """) + """ + ) if self.setSlot: # For attributes in slots, we want to do some @@ -8341,16 +9560,16 @@ def wrap_return_value(self): successCode = None resultTemplateValues = { - 'jsvalRef': 'args.rval()', - 'jsvalHandle': 'args.rval()', - 'returnsNewObject': returnsNewObject, - 'isConstructorRetval': self.isConstructor, - 'successCode': successCode, + "jsvalRef": "args.rval()", + "jsvalHandle": "args.rval()", + "returnsNewObject": returnsNewObject, + "isConstructorRetval": self.isConstructor, + "successCode": successCode, # 'obj' in this dictionary is the thing whose compartment we are # trying to do the to-JS conversion in. We're going to put that # thing in a variable named "conversionScope" if setSlot is true. # Otherwise, just use "obj" for lack of anything better. - 'obj': "conversionScope" if self.setSlot else "obj" + "obj": "conversionScope" if self.setSlot else "obj", } wrapCode += wrapForType(self.returnType, self.descriptor, resultTemplateValues) @@ -8361,9 +9580,12 @@ def wrap_return_value(self): "Attribute %s.%s is static, so we don't have a useful slot " "to cache it in, because we don't have support for that on " "interface objects. See " - "https://bugzilla.mozilla.org/show_bug.cgi?id=1363870" % - (self.descriptor.interface.identifier.name, - self.idlNode.identifier.name)) + "https://bugzilla.mozilla.org/show_bug.cgi?id=1363870" + % ( + self.descriptor.interface.identifier.name, + self.idlNode.identifier.name, + ) + ) # When using a slot on the Xray expando, we need to make sure that # our initial conversion to a JS::Value is done in the caller @@ -8396,22 +9618,25 @@ def wrap_return_value(self): } } - """) + """ + ) if self.idlNode.getExtendedAttribute("Frozen"): - assert self.idlNode.type.isSequence() or self.idlNode.type.isDictionary() + assert ( + self.idlNode.type.isSequence() or self.idlNode.type.isDictionary() + ) freezeValue = CGGeneric( "JS::Rooted rvalObj(cx, &args.rval().toObject());\n" "if (!JS_FreezeObject(cx, rvalObj)) {\n" " return false;\n" - "}\n") + "}\n" + ) if self.idlNode.type.nullable(): - freezeValue = CGIfWrapper(freezeValue, - "args.rval().isObject()") + freezeValue = CGIfWrapper(freezeValue, "args.rval().isObject()") postConversionSteps += freezeValue.define() # slotStorageSteps are steps that run once we have entered the # slotStorage compartment. - slotStorageSteps= fill( + slotStorageSteps = fill( """ // Make a copy so that we don't do unnecessary wrapping on args.rval(). JS::Rooted storedVal(cx, args.rval()); @@ -8420,7 +9645,8 @@ def wrap_return_value(self): } JS::SetReservedSlot(slotStorage, slotIndex, storedVal); """, - maybeWrap=getMaybeWrapValueFuncForType(self.idlNode.type)) + maybeWrap=getMaybeWrapValueFuncForType(self.idlNode.type), + ) checkForXray = mayUseXrayExpandoSlots(self.descriptor, self.idlNode) @@ -8432,12 +9658,15 @@ def wrap_return_value(self): # don't do this for StoreInSlot, since those get dealt with during # wrapper setup, and failure would involve us trying to clear an # already-preserved wrapper. - if (self.idlNode.getExtendedAttribute("Cached") and - self.descriptor.wrapperCache): + if ( + self.idlNode.getExtendedAttribute("Cached") + and self.descriptor.wrapperCache + ): preserveWrapper = dedent( """ PreserveWrapper(self); - """) + """ + ) if checkForXray: preserveWrapper = fill( """ @@ -8447,7 +9676,8 @@ def wrap_return_value(self): $*{preserveWrapper} } """, - preserveWrapper=preserveWrapper) + preserveWrapper=preserveWrapper, + ) slotStorageSteps += preserveWrapper if checkForXray: @@ -8479,11 +9709,12 @@ def wrap_return_value(self): wrapCode=wrapCode, postConversionSteps=postConversionSteps, slotStorageSteps=slotStorageSteps, - maybeWrap=getMaybeWrapValueFuncForType(self.idlNode.type)) + maybeWrap=getMaybeWrapValueFuncForType(self.idlNode.type), + ) return wrapCode def define(self): - return (self.cgRoot.define() + self.wrap_return_value()) + return self.cgRoot.define() + self.wrap_return_value() class CGSwitch(CGList): @@ -8496,6 +9727,7 @@ class CGSwitch(CGList): Each case is a CGCase. The default is a CGThing for the body of the default case, if any. """ + def __init__(self, expression, cases, default=None): CGList.__init__(self, [CGIndenter(c) for c in cases]) self.prepend(CGGeneric("switch (" + expression + ") {\n")) @@ -8503,9 +9735,10 @@ def __init__(self, expression, cases, default=None): self.append( CGIndenter( CGWrapper( - CGIndenter(default), - pre="default: {\n", - post=" break;\n}\n"))) + CGIndenter(default), pre="default: {\n", post=" break;\n}\n" + ) + ) + ) self.append(CGGeneric("}\n")) @@ -8518,6 +9751,7 @@ class CGCase(CGList): the body (allowed to be None if there is no body), and an optional argument (defaulting to False) for whether to fall through. """ + def __init__(self, expression, body, fallThrough=False): CGList.__init__(self, []) self.append(CGGeneric("case " + expression + ": {\n")) @@ -8535,8 +9769,10 @@ class CGMethodCall(CGThing): A class to generate selection of a method signature from a set of signatures and generation of a call to that signature. """ - def __init__(self, nativeMethodName, static, descriptor, method, - isConstructor=False): + + def __init__( + self, nativeMethodName, static, descriptor, method, isConstructor=False + ): CGThing.__init__(self) methodName = GetLabelForErrorReporting(descriptor, method, isConstructor) @@ -8549,8 +9785,10 @@ def __init__(self, nativeMethodName, static, descriptor, method, if method.isStatic(): nativeType = descriptor.nativeType - staticTypeOverride = PropertyDefiner.getStringAttr(method, "StaticClassOverride") - if (staticTypeOverride): + staticTypeOverride = PropertyDefiner.getStringAttr( + method, "StaticClassOverride" + ) + if staticTypeOverride: nativeType = staticTypeOverride nativeMethodName = "%s::%s" % (nativeType, nativeMethodName) @@ -8559,17 +9797,22 @@ def requiredArgCount(signature): if len(arguments) == 0: return 0 requiredArgs = len(arguments) - while requiredArgs and arguments[requiredArgs-1].optional: + while requiredArgs and arguments[requiredArgs - 1].optional: requiredArgs -= 1 return requiredArgs def getPerSignatureCall(signature, argConversionStartsAt=0): - return CGPerSignatureCall(signature[0], signature[1], - nativeMethodName, static, descriptor, - method, - argConversionStartsAt=argConversionStartsAt, - isConstructor=isConstructor, - useCounterName=useCounterName) + return CGPerSignatureCall( + signature[0], + signature[1], + nativeMethodName, + static, + descriptor, + method, + argConversionStartsAt=argConversionStartsAt, + isConstructor=isConstructor, + useCounterName=useCounterName, + ) signatures = method.signatures() if len(signatures) == 1: @@ -8591,7 +9834,8 @@ def getPerSignatureCall(signature, argConversionStartsAt=0): } """, requiredArgs=requiredArgs, - methodName=methodName) + methodName=methodName, + ) self.cgRoot.prepend(CGGeneric(code)) return @@ -8606,17 +9850,22 @@ def getPerSignatureCall(signature, argConversionStartsAt=0): # Try to optimize away cases when the next argCount in the list # will have the same code as us; if it does, we can fall through to # that case. - if argCountIdx+1 < len(allowedArgCounts): - nextPossibleSignatures = method.signaturesForArgCount(allowedArgCounts[argCountIdx+1]) + if argCountIdx + 1 < len(allowedArgCounts): + nextPossibleSignatures = method.signaturesForArgCount( + allowedArgCounts[argCountIdx + 1] + ) else: nextPossibleSignatures = None if possibleSignatures == nextPossibleSignatures: # Same set of signatures means we better have the same # distinguishing index. So we can in fact just fall through to # the next case here. - assert (len(possibleSignatures) == 1 or - (method.distinguishingIndexForArgCount(argCount) == - method.distinguishingIndexForArgCount(allowedArgCounts[argCountIdx+1]))) + assert len(possibleSignatures) == 1 or ( + method.distinguishingIndexForArgCount(argCount) + == method.distinguishingIndexForArgCount( + allowedArgCounts[argCountIdx + 1] + ) + ) argCountCases.append(CGCase(str(argCount), None, True)) continue @@ -8624,7 +9873,8 @@ def getPerSignatureCall(signature, argConversionStartsAt=0): # easy case! signature = possibleSignatures[0] argCountCases.append( - CGCase(str(argCount), getPerSignatureCall(signature))) + CGCase(str(argCount), getPerSignatureCall(signature)) + ) continue distinguishingIndex = method.distinguishingIndexForArgCount(argCount) @@ -8646,9 +9896,10 @@ def distinguishingType(signature): assert not distinguishingType(sig).isAny() # We can't handle unions at the distinguishing index. if distinguishingType(sig).isUnion(): - raise TypeError("No support for unions as distinguishing " - "arguments yet: %s" % - distinguishingArgument(sig).location) + raise TypeError( + "No support for unions as distinguishing " + "arguments yet: %s" % distinguishingArgument(sig).location + ) # We don't support variadics as the distinguishingArgument yet. # If you want to add support, consider this case: # @@ -8664,24 +9915,32 @@ def distinguishingType(signature): # fact at distinguishingIndex, perhaps. Would need to # double-check. if distinguishingArgument(sig).variadic: - raise TypeError("No support for variadics as distinguishing " - "arguments yet: %s" % - distinguishingArgument(sig).location) + raise TypeError( + "No support for variadics as distinguishing " + "arguments yet: %s" % distinguishingArgument(sig).location + ) # Convert all our arguments up to the distinguishing index. # Doesn't matter which of the possible signatures we use, since # they all have the same types up to that point; just use # possibleSignatures[0] - caseBody = [CGArgumentConverter(possibleSignatures[0][1][i], - i, descriptor, - argDesc % (i + 1), method) - for i in range(0, distinguishingIndex)] + caseBody = [ + CGArgumentConverter( + possibleSignatures[0][1][i], + i, + descriptor, + argDesc % (i + 1), + method, + ) + for i in range(0, distinguishingIndex) + ] # Select the right overload from our set. distinguishingArg = "args[%d]" % distinguishingIndex - def tryCall(signature, indent, isDefinitelyObject=False, - isNullOrUndefined=False): + def tryCall( + signature, indent, isDefinitelyObject=False, isNullOrUndefined=False + ): assert not isDefinitelyObject or not isNullOrUndefined assert isDefinitelyObject or isNullOrUndefined if isDefinitelyObject: @@ -8696,30 +9955,38 @@ def tryCall(signature, indent, isDefinitelyObject=False, # case undefined was passed in. argIsOptional = distinguishingArgument(signature).canHaveMissingValue() testCode = instantiateJSToNativeConversion( - getJSToNativeConversionInfo(type, descriptor, - failureCode=failureCode, - isDefinitelyObject=isDefinitelyObject, - isNullOrUndefined=isNullOrUndefined, - isOptional=argIsOptional, - sourceDescription=(argDesc % (distinguishingIndex + 1))), + getJSToNativeConversionInfo( + type, + descriptor, + failureCode=failureCode, + isDefinitelyObject=isDefinitelyObject, + isNullOrUndefined=isNullOrUndefined, + isOptional=argIsOptional, + sourceDescription=(argDesc % (distinguishingIndex + 1)), + ), { "declName": "arg%d" % distinguishingIndex, "holderName": ("arg%d" % distinguishingIndex) + "_holder", "val": distinguishingArg, "obj": "obj", "haveValue": "args.hasDefined(%d)" % distinguishingIndex, - "passedToJSImpl": toStringBool(isJSImplementedDescriptor(descriptor)) + "passedToJSImpl": toStringBool( + isJSImplementedDescriptor(descriptor) + ), }, - checkForValue=argIsOptional) + checkForValue=argIsOptional, + ) caseBody.append(CGIndenter(testCode, indent)) # If we got this far, we know we unwrapped to the right # C++ type, so just do the call. Start conversion with # distinguishingIndex + 1, since we already converted # distinguishingIndex. - caseBody.append(CGIndenter( - getPerSignatureCall(signature, distinguishingIndex + 1), - indent)) + caseBody.append( + CGIndenter( + getPerSignatureCall(signature, distinguishingIndex + 1), indent + ) + ) def hasConditionalConversion(type): """ @@ -8736,24 +10003,27 @@ def hasConditionalConversion(type): if type.isString() or type.isEnum(): return False if type.isBoolean(): - distinguishingTypes = (distinguishingType(s) for s in - possibleSignatures) - return any(t.isString() or t.isEnum() or t.isNumeric() - for t in distinguishingTypes) + distinguishingTypes = ( + distinguishingType(s) for s in possibleSignatures + ) + return any( + t.isString() or t.isEnum() or t.isNumeric() + for t in distinguishingTypes + ) if type.isNumeric(): - distinguishingTypes = (distinguishingType(s) for s in - possibleSignatures) - return any(t.isString() or t.isEnum() - for t in distinguishingTypes) + distinguishingTypes = ( + distinguishingType(s) for s in possibleSignatures + ) + return any(t.isString() or t.isEnum() for t in distinguishingTypes) return True def needsNullOrUndefinedCase(type): """ Return true if the type needs a special isNullOrUndefined() case """ - return ((type.nullable() and - hasConditionalConversion(type)) or - type.isDictionary()) + return ( + type.nullable() and hasConditionalConversion(type) + ) or type.isDictionary() # First check for undefined and optional distinguishing arguments # and output a special branch for that case. Note that we don't @@ -8765,17 +10035,21 @@ def needsNullOrUndefinedCase(type): # distinguishing argument, so if we're it we'll definitely get # picked up by the nullable handling. Also, we can skip this check # if the argument has an unconditional conversion later on. - undefSigs = [s for s in possibleSignatures if - distinguishingIndex < len(s[1]) and - s[1][distinguishingIndex].optional and - hasConditionalConversion(s[1][distinguishingIndex].type) and - not needsNullOrUndefinedCase(s[1][distinguishingIndex].type)] + undefSigs = [ + s + for s in possibleSignatures + if distinguishingIndex < len(s[1]) + and s[1][distinguishingIndex].optional + and hasConditionalConversion(s[1][distinguishingIndex].type) + and not needsNullOrUndefinedCase(s[1][distinguishingIndex].type) + ] # Can't have multiple signatures with an optional argument at the # same index. assert len(undefSigs) < 2 if len(undefSigs) > 0: - caseBody.append(CGGeneric("if (%s.isUndefined()) {\n" % - distinguishingArg)) + caseBody.append( + CGGeneric("if (%s.isUndefined()) {\n" % distinguishingArg) + ) tryCall(undefSigs[0], 2, isNullOrUndefined=True) caseBody.append(CGGeneric("}\n")) @@ -8789,13 +10063,17 @@ def needsNullOrUndefinedCase(type): # through to the unconditional conversion we have, if any, since # they will fail whatever the conditions on the input value are for # our other conversions. - nullOrUndefSigs = [s for s in possibleSignatures - if needsNullOrUndefinedCase(distinguishingType(s))] + nullOrUndefSigs = [ + s + for s in possibleSignatures + if needsNullOrUndefinedCase(distinguishingType(s)) + ] # Can't have multiple nullable types here assert len(nullOrUndefSigs) < 2 if len(nullOrUndefSigs) > 0: - caseBody.append(CGGeneric("if (%s.isNullOrUndefined()) {\n" % - distinguishingArg)) + caseBody.append( + CGGeneric("if (%s.isNullOrUndefined()) {\n" % distinguishingArg) + ) tryCall(nullOrUndefSigs[0], 2, isNullOrUndefined=True) caseBody.append(CGGeneric("}\n")) @@ -8814,25 +10092,35 @@ def needsNullOrUndefinedCase(type): # since if those are present no other object-typed argument will # be. objectSigs = [ - s for s in possibleSignatures - if (distinguishingType(s).isObject() or - distinguishingType(s).isNonCallbackInterface())] + s + for s in possibleSignatures + if ( + distinguishingType(s).isObject() + or distinguishingType(s).isNonCallbackInterface() + ) + ] # And all the overloads that take callbacks - objectSigs.extend(s for s in possibleSignatures - if distinguishingType(s).isCallback()) + objectSigs.extend( + s for s in possibleSignatures if distinguishingType(s).isCallback() + ) # And all the overloads that take sequences - objectSigs.extend(s for s in possibleSignatures - if distinguishingType(s).isSequence()) + objectSigs.extend( + s for s in possibleSignatures if distinguishingType(s).isSequence() + ) # Now append all the overloads that take a dictionary or callback # interface or record. There should be only one of these! genericObjectSigs = [ - s for s in possibleSignatures - if (distinguishingType(s).isDictionary() or - distinguishingType(s).isRecord() or - distinguishingType(s).isCallbackInterface())] + s + for s in possibleSignatures + if ( + distinguishingType(s).isDictionary() + or distinguishingType(s).isRecord() + or distinguishingType(s).isCallbackInterface() + ) + ] assert len(genericObjectSigs) <= 1 objectSigs.extend(genericObjectSigs) @@ -8850,8 +10138,7 @@ def needsNullOrUndefinedCase(type): # also allow the unwrapping test to skip having to do codegen # for the null-or-undefined case, which we already handled # above. - caseBody.append(CGGeneric("if (%s.isObject()) {\n" % - distinguishingArg)) + caseBody.append(CGGeneric("if (%s.isObject()) {\n" % distinguishingArg)) for sig in objectSigs: caseBody.append(CGIndenter(CGGeneric("do {\n"))) # Indent by 4, since we need to indent further @@ -8874,12 +10161,16 @@ def findUniqueSignature(filterLambda): return None stringSignature = findUniqueSignature( - lambda s: (distinguishingType(s).isString() or - distinguishingType(s).isEnum())) + lambda s: ( + distinguishingType(s).isString() or distinguishingType(s).isEnum() + ) + ) numericSignature = findUniqueSignature( - lambda s: distinguishingType(s).isNumeric()) + lambda s: distinguishingType(s).isNumeric() + ) booleanSignature = findUniqueSignature( - lambda s: distinguishingType(s).isBoolean()) + lambda s: distinguishingType(s).isBoolean() + ) if stringSignature or numericSignature: booleanCondition = "%s.isBoolean()" @@ -8894,8 +10185,7 @@ def findUniqueSignature(filterLambda): def addCase(sig, condition): sigCode = getPerSignatureCall(sig, distinguishingIndex) if condition: - sigCode = CGIfWrapper(sigCode, - condition % distinguishingArg) + sigCode = CGIfWrapper(sigCode, condition % distinguishingArg) caseBody.append(sigCode) if booleanSignature: @@ -8905,34 +10195,47 @@ def addCase(sig, condition): if stringSignature: addCase(stringSignature, None) - if (not booleanSignature and not numericSignature and - not stringSignature): + if not booleanSignature and not numericSignature and not stringSignature: # Just throw; we have no idea what we're supposed to # do with this. - caseBody.append(CGGeneric( - 'return cx.ThrowErrorMessage("%d", "%d");\n' % - (distinguishingIndex + 1, argCount))) + caseBody.append( + CGGeneric( + 'return cx.ThrowErrorMessage("%d", "%d");\n' + % (distinguishingIndex + 1, argCount) + ) + ) argCountCases.append(CGCase(str(argCount), CGList(caseBody))) overloadCGThings = [] overloadCGThings.append( - CGGeneric("unsigned argcount = std::min(args.length(), %du);\n" % - maxArgCount)) + CGGeneric( + "unsigned argcount = std::min(args.length(), %du);\n" % maxArgCount + ) + ) overloadCGThings.append( - CGSwitch("argcount", - argCountCases, - CGGeneric(dedent( - """ + CGSwitch( + "argcount", + argCountCases, + CGGeneric( + dedent( + """ // Using nsPrintfCString here would require including that // header. Let's not worry about it. nsAutoCString argCountStr; argCountStr.AppendPrintf("%u", args.length()); return cx.ThrowErrorMessage(argCountStr.get()); - """)))) + """ + ) + ), + ) + ) overloadCGThings.append( - CGGeneric('MOZ_CRASH("We have an always-returning default case");\n' - 'return false;\n')) + CGGeneric( + 'MOZ_CRASH("We have an always-returning default case");\n' + "return false;\n" + ) + ) self.cgRoot = CGList(overloadCGThings) def define(self): @@ -8944,33 +10247,54 @@ class CGGetterCall(CGPerSignatureCall): A class to generate a native object getter call for a particular IDL getter. """ - def __init__(self, returnType, nativeMethodName, descriptor, attr, dontSetSlot=False, - extendedAttributes=None): + + def __init__( + self, + returnType, + nativeMethodName, + descriptor, + attr, + dontSetSlot=False, + extendedAttributes=None, + ): if attr.getExtendedAttribute("UseCounter"): - useCounterName = "%s_%s_getter" % (descriptor.interface.identifier.name, - attr.identifier.name) + useCounterName = "%s_%s_getter" % ( + descriptor.interface.identifier.name, + attr.identifier.name, + ) else: useCounterName = None if attr.isStatic(): nativeMethodName = "%s::%s" % (descriptor.nativeType, nativeMethodName) - CGPerSignatureCall.__init__(self, returnType, [], nativeMethodName, - attr.isStatic(), descriptor, attr, - getter=True, useCounterName=useCounterName, - dontSetSlot=dontSetSlot, - extendedAttributes=extendedAttributes) + CGPerSignatureCall.__init__( + self, + returnType, + [], + nativeMethodName, + attr.isStatic(), + descriptor, + attr, + getter=True, + useCounterName=useCounterName, + dontSetSlot=dontSetSlot, + extendedAttributes=extendedAttributes, + ) -class FakeIdentifier(): +class FakeIdentifier: def __init__(self, name): self.name = name -class FakeArgument(): +class FakeArgument: """ A class that quacks like an IDLArgument. This is used to make setters look like method calls or for special operations. """ - def __init__(self, type, interfaceMember, name="arg", allowTreatNonCallableAsNull=False): + + def __init__( + self, type, interfaceMember, name="arg", allowTreatNonCallableAsNull=False + ): self.type = type self.optional = False self.variadic = False @@ -8991,18 +10315,28 @@ class CGSetterCall(CGPerSignatureCall): A class to generate a native object setter call for a particular IDL setter. """ + def __init__(self, argType, nativeMethodName, descriptor, attr): if attr.getExtendedAttribute("UseCounter"): - useCounterName = "%s_%s_setter" % (descriptor.interface.identifier.name, - attr.identifier.name) + useCounterName = "%s_%s_setter" % ( + descriptor.interface.identifier.name, + attr.identifier.name, + ) else: useCounterName = None if attr.isStatic(): nativeMethodName = "%s::%s" % (descriptor.nativeType, nativeMethodName) - CGPerSignatureCall.__init__(self, None, - [FakeArgument(argType, attr, allowTreatNonCallableAsNull=True)], - nativeMethodName, attr.isStatic(), - descriptor, attr, setter=True, useCounterName=useCounterName) + CGPerSignatureCall.__init__( + self, + None, + [FakeArgument(argType, attr, allowTreatNonCallableAsNull=True)], + nativeMethodName, + attr.isStatic(), + descriptor, + attr, + setter=True, + useCounterName=useCounterName, + ) def wrap_return_value(self): attr = self.idlNode @@ -9011,15 +10345,15 @@ def wrap_return_value(self): args = "cx, self" else: args = "self" - clearSlot = ("%s(%s);\n" % - (MakeClearCachedValueNativeName(self.idlNode), args)) + clearSlot = "%s(%s);\n" % ( + MakeClearCachedValueNativeName(self.idlNode), + args, + ) else: clearSlot = "" # We have no return value - return ("\n" - "%s" - "return true;\n" % clearSlot) + return "\n" "%s" "return true;\n" % clearSlot class CGAbstractBindingMethod(CGAbstractStaticMethod): @@ -9040,10 +10374,18 @@ class CGAbstractBindingMethod(CGAbstractStaticMethod): around or if the body does not need a JS::CallArgs. """ - def __init__(self, descriptor, name, args, getThisObj, - callArgs="JS::CallArgs args = JS::CallArgsFromVp(argc, vp);\n"): - CGAbstractStaticMethod.__init__(self, descriptor, name, "bool", args, - canRunScript=True) + + def __init__( + self, + descriptor, + name, + args, + getThisObj, + callArgs="JS::CallArgs args = JS::CallArgsFromVp(argc, vp);\n", + ): + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", args, canRunScript=True + ) # This can't ever happen, because we only use this for class hooks. self.unwrapFailureCode = fill( @@ -9051,13 +10393,15 @@ def __init__(self, descriptor, name, args, getThisObj, MOZ_CRASH("Unexpected object in '${name}' hook"); return false; """, - name=name) + name=name, + ) if getThisObj == "": self.getThisObj = None else: - self.getThisObj = CGGeneric("JS::Rooted obj(cx, %s);\n" % - getThisObj) + self.getThisObj = CGGeneric( + "JS::Rooted obj(cx, %s);\n" % getThisObj + ) self.callArgs = callArgs def definition_body(self): @@ -9068,14 +10412,14 @@ def definition_body(self): body += dedent( """ JS::Rooted rootSelf(cx, JS::ObjectValue(*obj)); - """) + """ + ) - body += str(CastableObjectUnwrapper( - self.descriptor, - "rootSelf", - "&rootSelf", - "self", - self.unwrapFailureCode)) + body += str( + CastableObjectUnwrapper( + self.descriptor, "rootSelf", "&rootSelf", "self", self.unwrapFailureCode + ) + ) return body + self.generate_code().define() @@ -9091,19 +10435,22 @@ class CGAbstractStaticBindingMethod(CGAbstractStaticMethod): function to do the rest of the work. This function should return a CGThing which is already properly indented. """ + def __init__(self, descriptor, name): - CGAbstractStaticMethod.__init__(self, descriptor, name, "bool", - JSNativeArguments(), - canRunScript=True) + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", JSNativeArguments(), canRunScript=True + ) def definition_body(self): # Make sure that "obj" is in the same compartment as "cx", since we'll # later use it to wrap return values. - unwrap = dedent(""" + unwrap = dedent( + """ JS::CallArgs args = JS::CallArgsFromVp(argc, vp); JS::Rooted obj(cx, &args.callee()); - """) + """ + ) return unwrap + self.generate_code().define() def generate_code(self): @@ -9115,18 +10462,18 @@ def MakeNativeName(name): def GetWebExposedName(idlObject, descriptor): - if idlObject == descriptor.operations['Stringifier']: + if idlObject == descriptor.operations["Stringifier"]: return "toString" name = idlObject.identifier.name - if name == '__namedsetter': + if name == "__namedsetter": return "named setter" - if name == '__namedgetter': + if name == "__namedgetter": return "named getter" - if name == '__indexedsetter': + if name == "__indexedsetter": return "indexed setter" - if name == '__indexedgetter': + if name == "__indexedgetter": return "indexed getter" - if name == '__legacycaller': + if name == "__legacycaller": return "legacy caller" return name @@ -9168,31 +10515,38 @@ class CGSpecializedMethod(CGAbstractStaticMethod): A class for generating the C++ code for a specialized method that the JIT can call with lower overhead. """ + def __init__(self, descriptor, method): self.method = method name = CppKeywords.checkMethodName(IDLToCIdentifier(method.identifier.name)) - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('void*', 'void_self'), - Argument('const JSJitMethodCallArgs&', 'args')] - CGAbstractStaticMethod.__init__(self, descriptor, name, 'bool', args, - canRunScript=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("void*", "void_self"), + Argument("const JSJitMethodCallArgs&", "args"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", args, canRunScript=True + ) def definition_body(self): - nativeName = CGSpecializedMethod.makeNativeName(self.descriptor, - self.method) - call = CGMethodCall(nativeName, self.method.isStatic(), self.descriptor, - self.method).define() + nativeName = CGSpecializedMethod.makeNativeName(self.descriptor, self.method) + call = CGMethodCall( + nativeName, self.method.isStatic(), self.descriptor, self.method + ).define() prefix = "" if self.method.getExtendedAttribute("CrossOriginCallable"): for signature in self.method.signatures(): # non-void signatures would require us to deal with remote proxies for the # return value here. if not signature[0].isVoid(): - raise TypeError("We don't support a method marked as CrossOriginCallable " - "with non-void return type") + raise TypeError( + "We don't support a method marked as CrossOriginCallable " + "with non-void return type" + ) prototypeID, _ = PrototypeIDAndDepth(self.descriptor) - prefix = fill(""" + prefix = fill( + """ // CrossOriginThisPolicy::UnwrapThisObject stores a ${nativeType}::RemoteProxy in void_self // if obj is a proxy with a RemoteObjectProxy handler for the right type, or else it stores // a ${nativeType}. If we get here from the JIT (without going through UnwrapThisObject) we @@ -9205,14 +10559,16 @@ def definition_body(self): """, prototypeID=prototypeID, nativeType=self.descriptor.nativeType, - call=call) + call=call, + ) return prefix + fill( """ auto* self = static_cast<${nativeType}*>(void_self); $*{call} """, nativeType=self.descriptor.nativeType, - call=call) + call=call, + ) def auto_profiler_label(self): interface_name = self.descriptor.interface.identifier.name @@ -9225,7 +10581,8 @@ def auto_profiler_label(self): uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, interface_name=interface_name, - method_name=method_name) + method_name=method_name, + ) @staticmethod def should_have_method_description(descriptor, idlMethod): @@ -9245,9 +10602,12 @@ def should_have_method_description(descriptor, idlMethod): args = sig[1] return any( idlTypeNeedsCallContext( - arg.type, descriptor, - allowTreatNonCallableAsNull=arg.allowTreatNonCallableAsNull()) - for arg in args) + arg.type, + descriptor, + allowTreatNonCallableAsNull=arg.allowTreatNonCallableAsNull(), + ) + for arg in args + ) @staticmethod def error_reporting_label_helper(descriptor, idlMethod, isConstructor): @@ -9257,14 +10617,15 @@ def error_reporting_label_helper(descriptor, idlMethod, isConstructor): across different classes. """ if not CGSpecializedMethod.should_have_method_description( - descriptor, idlMethod): + descriptor, idlMethod + ): return None return GetLabelForErrorReporting(descriptor, idlMethod, isConstructor) def error_reporting_label(self): - return CGSpecializedMethod.error_reporting_label_helper(self.descriptor, - self.method, - isConstructor=False) + return CGSpecializedMethod.error_reporting_label_helper( + self.descriptor, self.method, isConstructor=False + ) @staticmethod def makeNativeName(descriptor, method): @@ -9279,12 +10640,14 @@ class CGMethodPromiseWrapper(CGAbstractStaticMethod): A class for generating a wrapper around another method that will convert exceptions to promises. """ + def __init__(self, descriptor, methodToWrap): self.method = methodToWrap name = self.makeName(methodToWrap.name) args = list(methodToWrap.args) - CGAbstractStaticMethod.__init__(self, descriptor, name, 'bool', args, - canRunScript=True) + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", args, canRunScript=True + ) def definition_body(self): return fill( @@ -9296,7 +10659,8 @@ def definition_body(self): return ConvertExceptionToPromise(cx, args.rval()); """, methodName=self.method.name, - args=", ".join(arg.name for arg in self.args)) + args=", ".join(arg.name for arg in self.args), + ) @staticmethod def makeName(methodName): @@ -9317,7 +10681,8 @@ def definition_body(self): return false; } """, - nativeType=self.descriptor.nativeType) + nativeType=self.descriptor.nativeType, + ) jsonDescriptors = [self.descriptor] interface = self.descriptor.interface.parent @@ -9335,10 +10700,9 @@ def definition_body(self): return false; } """, - parentclass=toBindingNamespace(descriptor.name) - ) - ret += ('args.rval().setObject(*result);\n' - 'return true;\n') + parentclass=toBindingNamespace(descriptor.name), + ) + ret += "args.rval().setObject(*result);\n" "return true;\n" return ret @@ -9346,12 +10710,17 @@ class CGLegacyCallHook(CGAbstractBindingMethod): """ Call hook for our object """ + def __init__(self, descriptor): self._legacycaller = descriptor.operations["LegacyCaller"] # Our "self" is actually the callee in this case, not the thisval. CGAbstractBindingMethod.__init__( - self, descriptor, LEGACYCALLER_HOOK_NAME, - JSNativeArguments(), getThisObj="&args.callee()") + self, + descriptor, + LEGACYCALLER_HOOK_NAME, + JSNativeArguments(), + getThisObj="&args.callee()", + ) def define(self): if not self._legacycaller: @@ -9361,31 +10730,34 @@ def define(self): def generate_code(self): name = self._legacycaller.identifier.name nativeName = MakeNativeName(self.descriptor.binaryNameFor(name)) - return CGMethodCall(nativeName, False, self.descriptor, - self._legacycaller) + return CGMethodCall(nativeName, False, self.descriptor, self._legacycaller) def error_reporting_label(self): # Should act like methods. - return CGSpecializedMethod.error_reporting_label_helper(self.descriptor, - self._legacycaller, - isConstructor=False) + return CGSpecializedMethod.error_reporting_label_helper( + self.descriptor, self._legacycaller, isConstructor=False + ) + class CGResolveHook(CGAbstractClassHook): """ Resolve hook for objects that have the NeedResolve extended attribute. """ + def __init__(self, descriptor): assert descriptor.interface.getExtendedAttribute("NeedResolve") - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('JS::Handle', 'id'), - Argument('bool*', 'resolvedp')] - CGAbstractClassHook.__init__(self, descriptor, RESOLVE_HOOK_NAME, - "bool", args) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("JS::Handle", "id"), + Argument("bool*", "resolvedp"), + ] + CGAbstractClassHook.__init__(self, descriptor, RESOLVE_HOOK_NAME, "bool", args) def generate_code(self): - return dedent(""" + return dedent( + """ JS::Rooted desc(cx); if (!self->DoResolve(cx, obj, id, &desc)) { return false; @@ -9404,12 +10776,14 @@ def generate_code(self): } *resolvedp = true; return true; - """) + """ + ) def definition_body(self): if self.descriptor.isGlobal(): # Resolve standard classes - prefix = dedent(""" + prefix = dedent( + """ if (!ResolveGlobal(cx, obj, id, resolvedp)) { return false; } @@ -9417,7 +10791,8 @@ def definition_body(self): return true; } - """) + """ + ) else: prefix = "" return prefix + CGAbstractClassHook.definition_body(self) @@ -9427,93 +10802,188 @@ class CGMayResolveHook(CGAbstractStaticMethod): """ Resolve hook for objects that have the NeedResolve extended attribute. """ + def __init__(self, descriptor): assert descriptor.interface.getExtendedAttribute("NeedResolve") - args = [Argument('const JSAtomState&', 'names'), - Argument('jsid', 'id'), - Argument('JSObject*', 'maybeObj')] - CGAbstractStaticMethod.__init__(self, descriptor, MAY_RESOLVE_HOOK_NAME, - "bool", args) + args = [ + Argument("const JSAtomState&", "names"), + Argument("jsid", "id"), + Argument("JSObject*", "maybeObj"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, MAY_RESOLVE_HOOK_NAME, "bool", args + ) def definition_body(self): if self.descriptor.isGlobal(): # Check whether this would resolve as a standard class. - prefix = dedent(""" + prefix = dedent( + """ if (MayResolveGlobal(names, id, maybeObj)) { return true; } - """) + """ + ) else: prefix = "" - return (prefix + - "return %s::MayResolve(id);\n" % self.descriptor.nativeType) + return prefix + "return %s::MayResolve(id);\n" % self.descriptor.nativeType class CGEnumerateHook(CGAbstractBindingMethod): """ Enumerate hook for objects with custom hooks. """ + def __init__(self, descriptor): assert descriptor.interface.getExtendedAttribute("NeedResolve") - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('JS::MutableHandleVector', 'properties'), - Argument('bool', 'enumerableOnly')] + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("JS::MutableHandleVector", "properties"), + Argument("bool", "enumerableOnly"), + ] # Our "self" is actually the "obj" argument in this case, not the thisval. CGAbstractBindingMethod.__init__( - self, descriptor, NEW_ENUMERATE_HOOK_NAME, - args, getThisObj="", callArgs="") + self, descriptor, NEW_ENUMERATE_HOOK_NAME, args, getThisObj="", callArgs="" + ) def generate_code(self): - return CGGeneric(dedent(""" + return CGGeneric( + dedent( + """ FastErrorResult rv; self->GetOwnPropertyNames(cx, properties, enumerableOnly, rv); if (rv.MaybeSetPendingException(cx)) { return false; } return true; - """)) + """ + ) + ) def definition_body(self): if self.descriptor.isGlobal(): # Enumerate standard classes - prefix = dedent(""" + prefix = dedent( + """ if (!EnumerateGlobal(cx, obj, properties, enumerableOnly)) { return false; } - """) + """ + ) else: prefix = "" return prefix + CGAbstractBindingMethod.definition_body(self) -class CppKeywords(): +class CppKeywords: """ A class for checking if method names declared in webidl are not in conflict with C++ keywords. """ - keywords = frozenset([ - 'alignas', 'alignof', 'and', 'and_eq', 'asm', 'assert', 'auto', 'bitand', 'bitor', 'bool', - 'break', 'case', 'catch', 'char', 'char16_t', 'char32_t', 'class', 'compl', 'const', - 'constexpr', 'const_cast', 'continue', 'decltype', 'default', 'delete', 'do', 'double', - 'dynamic_cast', 'else', 'enum', 'explicit', 'export', 'extern', 'false', 'final', 'float', - 'for', 'friend', 'goto', 'if', 'inline', 'int', 'long', 'mutable', 'namespace', 'new', - 'noexcept', 'not', 'not_eq', 'nullptr', 'operator', 'or', 'or_eq', 'override', 'private', - 'protected', 'public', 'register', 'reinterpret_cast', 'return', 'short', 'signed', - 'sizeof', 'static', 'static_assert', 'static_cast', 'struct', 'switch', 'template', 'this', - 'thread_local', 'throw', 'true', 'try', 'typedef', 'typeid', 'typename', 'union', - 'unsigned', 'using', 'virtual', 'void', 'volatile', 'wchar_t', 'while', 'xor', 'xor_eq']) + + keywords = frozenset( + [ + "alignas", + "alignof", + "and", + "and_eq", + "asm", + "assert", + "auto", + "bitand", + "bitor", + "bool", + "break", + "case", + "catch", + "char", + "char16_t", + "char32_t", + "class", + "compl", + "const", + "constexpr", + "const_cast", + "continue", + "decltype", + "default", + "delete", + "do", + "double", + "dynamic_cast", + "else", + "enum", + "explicit", + "export", + "extern", + "false", + "final", + "float", + "for", + "friend", + "goto", + "if", + "inline", + "int", + "long", + "mutable", + "namespace", + "new", + "noexcept", + "not", + "not_eq", + "nullptr", + "operator", + "or", + "or_eq", + "override", + "private", + "protected", + "public", + "register", + "reinterpret_cast", + "return", + "short", + "signed", + "sizeof", + "static", + "static_assert", + "static_cast", + "struct", + "switch", + "template", + "this", + "thread_local", + "throw", + "true", + "try", + "typedef", + "typeid", + "typename", + "union", + "unsigned", + "using", + "virtual", + "void", + "volatile", + "wchar_t", + "while", + "xor", + "xor_eq", + ] + ) @staticmethod def checkMethodName(name): # Double '_' because 'assert' and '_assert' cannot be used in MS2013 compiler. # Bug 964892 and bug 963560. if name in CppKeywords.keywords: - name = '_' + name + '_' + name = "_" + name + "_" return name @@ -9521,14 +10991,14 @@ class CGStaticMethod(CGAbstractStaticBindingMethod): """ A class for generating the C++ code for an IDL static method. """ + def __init__(self, descriptor, method): self.method = method name = CppKeywords.checkMethodName(IDLToCIdentifier(method.identifier.name)) CGAbstractStaticBindingMethod.__init__(self, descriptor, name) def generate_code(self): - nativeName = CGSpecializedMethod.makeNativeName(self.descriptor, - self.method) + nativeName = CGSpecializedMethod.makeNativeName(self.descriptor, self.method) return CGMethodCall(nativeName, True, self.descriptor, self.method) def auto_profiler_label(self): @@ -9542,12 +11012,13 @@ def auto_profiler_label(self): uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, interface_name=interface_name, - method_name=method_name) + method_name=method_name, + ) def error_reporting_label(self): - return CGSpecializedMethod.error_reporting_label_helper(self.descriptor, - self.method, - isConstructor=False) + return CGSpecializedMethod.error_reporting_label_helper( + self.descriptor, self.method, isConstructor=False + ) class CGSpecializedGetter(CGAbstractStaticMethod): @@ -9555,29 +11026,36 @@ class CGSpecializedGetter(CGAbstractStaticMethod): A class for generating the code for a specialized attribute getter that the JIT can call with lower overhead. """ + def __init__(self, descriptor, attr): self.attr = attr - name = 'get_' + IDLToCIdentifier(attr.identifier.name) + name = "get_" + IDLToCIdentifier(attr.identifier.name) args = [ - Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('void*', 'void_self'), - Argument('JSJitGetterCallArgs', 'args') + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("void*", "void_self"), + Argument("JSJitGetterCallArgs", "args"), ] # StoreInSlot attributes have their getters called from Wrap(). We # really hope they can't run script, and don't want to annotate Wrap() # methods as doing that anyway, so let's not annotate them as # MOZ_CAN_RUN_SCRIPT. CGAbstractStaticMethod.__init__( - self, descriptor, name, "bool", args, - canRunScript=not attr.getExtendedAttribute("StoreInSlot")) + self, + descriptor, + name, + "bool", + args, + canRunScript=not attr.getExtendedAttribute("StoreInSlot"), + ) def definition_body(self): prefix = fill( """ auto* self = static_cast<${nativeType}*>(void_self); """, - nativeType=self.descriptor.nativeType) + nativeType=self.descriptor.nativeType, + ) if self.attr.isMaplikeOrSetlikeAttr(): assert not self.attr.getExtendedAttribute("CrossOriginReadable") @@ -9585,32 +11063,47 @@ def definition_body(self): # method for the size property of the backing object. Due to having # to unpack the backing object from the slot, this requires its own # generator. - return (prefix + - getMaplikeOrSetlikeSizeGetterBody(self.descriptor, self.attr)) - nativeName = CGSpecializedGetter.makeNativeName(self.descriptor, - self.attr) + return prefix + getMaplikeOrSetlikeSizeGetterBody( + self.descriptor, self.attr + ) + nativeName = CGSpecializedGetter.makeNativeName(self.descriptor, self.attr) type = self.attr.type if self.attr.getExtendedAttribute("CrossOriginReadable"): remoteType = type - extendedAttributes = self.descriptor.getExtendedAttributes(self.attr, getter=True) - if (remoteType.isGeckoInterface() - and not remoteType.unroll().inner.isExternal() - and remoteType.unroll().inner.getExtendedAttribute("ChromeOnly") is None): + extendedAttributes = self.descriptor.getExtendedAttributes( + self.attr, getter=True + ) + if ( + remoteType.isGeckoInterface() + and not remoteType.unroll().inner.isExternal() + and remoteType.unroll().inner.getExtendedAttribute("ChromeOnly") is None + ): # We'll use a JSObject. It might make more sense to use remoteType's # RemoteProxy, but it's not easy to construct a type for that from here. remoteType = BuiltinTypes[IDLBuiltinType.Types.object] - extendedAttributes.remove('infallible') + extendedAttributes.remove("infallible") prototypeID, _ = PrototypeIDAndDepth(self.descriptor) - prefix = fill(""" + prefix = ( + fill( + """ if (IsRemoteObjectProxy(obj, ${prototypeID})) { ${nativeType}::RemoteProxy* self = static_cast<${nativeType}::RemoteProxy*>(void_self); $*{call} } """, - prototypeID=prototypeID, - nativeType=self.descriptor.nativeType, - call=CGGetterCall(remoteType, nativeName, self.descriptor, self.attr, dontSetSlot=True, - extendedAttributes=extendedAttributes).define()) + prefix + prototypeID=prototypeID, + nativeType=self.descriptor.nativeType, + call=CGGetterCall( + remoteType, + nativeName, + self.descriptor, + self.attr, + dontSetSlot=True, + extendedAttributes=extendedAttributes, + ).define(), + ) + + prefix + ) if self.attr.slotIndices is not None: # We're going to store this return value in a slot on some object, @@ -9640,9 +11133,11 @@ def definition_body(self): } const size_t slotIndex = isXray ? ${xraySlotIndex} : ${slotIndex}; """, - xraySlotIndex=memberXrayExpandoReservedSlot(self.attr, - self.descriptor), - slotIndex=memberReservedSlot(self.attr, self.descriptor)) + xraySlotIndex=memberXrayExpandoReservedSlot( + self.attr, self.descriptor + ), + slotIndex=memberReservedSlot(self.attr, self.descriptor), + ) else: prefix += fill( """ @@ -9651,7 +11146,8 @@ def definition_body(self): MOZ_ASSERT(IsDOMObject(slotStorage)); const size_t slotIndex = ${slotIndex}; """, - slotIndex=memberReservedSlot(self.attr, self.descriptor)) + slotIndex=memberReservedSlot(self.attr, self.descriptor), + ) prefix += fill( """ @@ -9668,11 +11164,12 @@ def definition_body(self): } """, - maybeWrap=getMaybeWrapValueFuncForType(self.attr.type)) + maybeWrap=getMaybeWrapValueFuncForType(self.attr.type), + ) - return (prefix + - CGGetterCall(type, nativeName, - self.descriptor, self.attr).define()) + return ( + prefix + CGGetterCall(type, nativeName, self.descriptor, self.attr).define() + ) def auto_profiler_label(self): interface_name = self.descriptor.interface.identifier.name @@ -9685,7 +11182,8 @@ def auto_profiler_label(self): uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, interface_name=interface_name, - attr_name=attr_name) + attr_name=attr_name, + ) def error_reporting_label(self): # Getters never need a BindingCallContext. @@ -9695,11 +11193,9 @@ def error_reporting_label(self): def makeNativeName(descriptor, attr): name = attr.identifier.name nativeName = MakeNativeName(descriptor.binaryNameFor(name)) - _, resultOutParam, _, _, _ = getRetvalDeclarationForType(attr.type, - descriptor) + _, resultOutParam, _, _, _ = getRetvalDeclarationForType(attr.type, descriptor) extendedAttrs = descriptor.getExtendedAttributes(attr, getter=True) - canFail = ('infallible' not in extendedAttrs or - 'canOOM' in extendedAttrs) + canFail = "infallible" not in extendedAttrs or "canOOM" in extendedAttrs if resultOutParam or attr.type.nullable() or canFail: nativeName = "Get" + nativeName return nativeName @@ -9710,12 +11206,14 @@ class CGGetterPromiseWrapper(CGAbstractStaticMethod): A class for generating a wrapper around another getter that will convert exceptions to promises. """ + def __init__(self, descriptor, getterToWrap): self.getter = getterToWrap name = self.makeName(getterToWrap.name) args = list(getterToWrap.args) - CGAbstractStaticMethod.__init__(self, descriptor, name, 'bool', args, - canRunScript=True) + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", args, canRunScript=True + ) def definition_body(self): return fill( @@ -9727,7 +11225,8 @@ def definition_body(self): return ConvertExceptionToPromise(cx, args.rval()); """, getterName=self.getter.name, - args=", ".join(arg.name for arg in self.args)) + args=", ".join(arg.name for arg in self.args), + ) @staticmethod def makeName(getterName): @@ -9738,16 +11237,15 @@ class CGStaticGetter(CGAbstractStaticBindingMethod): """ A class for generating the C++ code for an IDL static attribute getter. """ + def __init__(self, descriptor, attr): self.attr = attr - name = 'get_' + IDLToCIdentifier(attr.identifier.name) + name = "get_" + IDLToCIdentifier(attr.identifier.name) CGAbstractStaticBindingMethod.__init__(self, descriptor, name) def generate_code(self): - nativeName = CGSpecializedGetter.makeNativeName(self.descriptor, - self.attr) - return CGGetterCall(self.attr.type, nativeName, self.descriptor, - self.attr) + nativeName = CGSpecializedGetter.makeNativeName(self.descriptor, self.attr) + return CGGetterCall(self.attr.type, nativeName, self.descriptor, self.attr) def auto_profiler_label(self): interface_name = self.descriptor.interface.identifier.name @@ -9760,7 +11258,8 @@ def auto_profiler_label(self): uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, interface_name=interface_name, - attr_name=attr_name) + attr_name=attr_name, + ) def error_reporting_label(self): # Getters never need a BindingCallContext. @@ -9772,19 +11271,22 @@ class CGSpecializedSetter(CGAbstractStaticMethod): A class for generating the code for a specialized attribute setter that the JIT can call with lower overhead. """ + def __init__(self, descriptor, attr): self.attr = attr - name = 'set_' + IDLToCIdentifier(attr.identifier.name) - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'obj'), - Argument('void*', 'void_self'), - Argument('JSJitSetterCallArgs', 'args')] - CGAbstractStaticMethod.__init__(self, descriptor, name, "bool", args, - canRunScript=True) + name = "set_" + IDLToCIdentifier(attr.identifier.name) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "obj"), + Argument("void*", "void_self"), + Argument("JSJitSetterCallArgs", "args"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, name, "bool", args, canRunScript=True + ) def definition_body(self): - nativeName = CGSpecializedSetter.makeNativeName(self.descriptor, - self.attr) + nativeName = CGSpecializedSetter.makeNativeName(self.descriptor, self.attr) type = self.attr.type call = CGSetterCall(type, nativeName, self.descriptor, self.attr).define() prefix = "" @@ -9792,11 +11294,15 @@ def definition_body(self): if type.isGeckoInterface() and not type.unroll().inner.isExternal(): # a setter taking a Gecko interface would require us to deal with remote # proxies for the value here. - raise TypeError("We don't support the setter of %s marked as " - "CrossOriginWritable because it takes a Gecko interface " - "as the value", attr.identifier.name) + raise TypeError( + "We don't support the setter of %s marked as " + "CrossOriginWritable because it takes a Gecko interface " + "as the value", + attr.identifier.name, + ) prototypeID, _ = PrototypeIDAndDepth(self.descriptor) - prefix = fill(""" + prefix = fill( + """ if (IsRemoteObjectProxy(obj, ${prototypeID})) { auto* self = static_cast<${nativeType}::RemoteProxy*>(void_self); $*{call} @@ -9804,14 +11310,16 @@ def definition_body(self): """, prototypeID=prototypeID, nativeType=self.descriptor.nativeType, - call=call) + call=call, + ) return prefix + fill( """ auto* self = static_cast<${nativeType}*>(void_self); $*{call} """, nativeType=self.descriptor.nativeType, - call=call) + call=call, + ) def auto_profiler_label(self): interface_name = self.descriptor.interface.identifier.name @@ -9824,21 +11332,25 @@ def auto_profiler_label(self): uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, interface_name=interface_name, - attr_name=attr_name) + attr_name=attr_name, + ) @staticmethod def error_reporting_label_helper(descriptor, attr): # Setters need a BindingCallContext if the type of the attribute needs # one. - if not idlTypeNeedsCallContext(attr.type, descriptor, - allowTreatNonCallableAsNull=True): + if not idlTypeNeedsCallContext( + attr.type, descriptor, allowTreatNonCallableAsNull=True + ): return None - return GetLabelForErrorReporting(descriptor, attr, - isConstructor=False) + " setter" + return ( + GetLabelForErrorReporting(descriptor, attr, isConstructor=False) + " setter" + ) def error_reporting_label(self): - return CGSpecializedSetter.error_reporting_label_helper(self.descriptor, - self.attr) + return CGSpecializedSetter.error_reporting_label_helper( + self.descriptor, self.attr + ) @staticmethod def makeNativeName(descriptor, attr): @@ -9850,23 +11362,25 @@ class CGStaticSetter(CGAbstractStaticBindingMethod): """ A class for generating the C++ code for an IDL static attribute setter. """ + def __init__(self, descriptor, attr): self.attr = attr - name = 'set_' + IDLToCIdentifier(attr.identifier.name) + name = "set_" + IDLToCIdentifier(attr.identifier.name) CGAbstractStaticBindingMethod.__init__(self, descriptor, name) def generate_code(self): - nativeName = CGSpecializedSetter.makeNativeName(self.descriptor, - self.attr) - checkForArg = CGGeneric(fill( - """ + nativeName = CGSpecializedSetter.makeNativeName(self.descriptor, self.attr) + checkForArg = CGGeneric( + fill( + """ if (!args.requireAtLeast(cx, "${name} setter", 1)) { return false; } """, - name=self.attr.identifier.name)) - call = CGSetterCall(self.attr.type, nativeName, self.descriptor, - self.attr) + name=self.attr.identifier.name, + ) + ) + call = CGSetterCall(self.attr.type, nativeName, self.descriptor, self.attr) return CGList([checkForArg, call]) def auto_profiler_label(self): @@ -9880,11 +11394,13 @@ def auto_profiler_label(self): uint32_t(js::ProfilingStackFrame::Flags::RELEVANT_FOR_JS)); """, interface_name=interface_name, - attr_name=attr_name) + attr_name=attr_name, + ) def error_reporting_label(self): - return CGSpecializedSetter.error_reporting_label_helper(self.descriptor, - self.attr) + return CGSpecializedSetter.error_reporting_label_helper( + self.descriptor, self.attr + ) class CGSpecializedForwardingSetter(CGSpecializedSetter): @@ -9892,6 +11408,7 @@ class CGSpecializedForwardingSetter(CGSpecializedSetter): A class for generating the code for a specialized attribute setter with PutForwards that the JIT can call with lower overhead. """ + def __init__(self, descriptor, attr): CGSpecializedSetter.__init__(self, descriptor, attr) @@ -9917,12 +11434,15 @@ def definition_body(self): """, attr=attrName, interface=self.descriptor.interface.identifier.name, - forwardToAttrName=forwardToAttrName) + forwardToAttrName=forwardToAttrName, + ) def error_reporting_label(self): # We always need to be able to throw. - return GetLabelForErrorReporting(self.descriptor, self.attr, - isConstructor=False) + " setter" + return ( + GetLabelForErrorReporting(self.descriptor, self.attr, isConstructor=False) + + " setter" + ) class CGSpecializedReplaceableSetter(CGSpecializedSetter): @@ -9930,6 +11450,7 @@ class CGSpecializedReplaceableSetter(CGSpecializedSetter): A class for generating the code for a specialized attribute setter with Replaceable that the JIT can call with lower overhead. """ + def __init__(self, descriptor, attr): CGSpecializedSetter.__init__(self, descriptor, attr) @@ -9937,8 +11458,10 @@ def definition_body(self): attrName = self.attr.identifier.name # JS_DefineProperty can only deal with ASCII assert all(ord(c) < 128 for c in attrName) - return ('return JS_DefineProperty(cx, obj, "%s", args[0], JSPROP_ENUMERATE);\n' % - attrName) + return ( + 'return JS_DefineProperty(cx, obj, "%s", args[0], JSPROP_ENUMERATE);\n' + % attrName + ) def error_reporting_label(self): # We never throw directly. @@ -9950,6 +11473,7 @@ class CGSpecializedLenientSetter(CGSpecializedSetter): A class for generating the code for a specialized attribute setter with LenientSetter that the JIT can call with lower overhead. """ + def __init__(self, descriptor, attr): CGSpecializedSetter.__init__(self, descriptor, attr) @@ -9957,10 +11481,12 @@ def definition_body(self): attrName = self.attr.identifier.name # JS_DefineProperty can only deal with ASCII assert all(ord(c) < 128 for c in attrName) - return dedent(""" + return dedent( + """ DeprecationWarning(cx, obj, Document::eLenientSetter); return true; - """) + """ + ) def error_reporting_label(self): # We never throw; that's the whole point. @@ -9976,6 +11502,7 @@ class CGMemberJITInfo(CGThing): A class for generating the JITInfo for a property that points to our specialized getter and setter. """ + def __init__(self, descriptor, member): self.member = member self.descriptor = descriptor @@ -9983,9 +11510,21 @@ def __init__(self, descriptor, member): def declare(self): return "" - def defineJitInfo(self, infoName, opName, opType, infallible, movable, - eliminatable, aliasSet, alwaysInSlot, lazilyInSlot, - slotIndex, returnTypes, args): + def defineJitInfo( + self, + infoName, + opName, + opType, + infallible, + movable, + eliminatable, + aliasSet, + alwaysInSlot, + lazilyInSlot, + slotIndex, + returnTypes, + args, + ): """ aliasSet is a JSJitInfo::AliasSet value, without the "JSJitInfo::" bit. @@ -9993,10 +11532,18 @@ def defineJitInfo(self, infoName, opName, opType, infallible, movable, reason (e.g. we have overloads or we're not a method) and otherwise an iterable of the arguments for this method. """ - assert(not movable or aliasSet != "AliasEverything") # Can't move write-aliasing things - assert(not alwaysInSlot or movable) # Things always in slots had better be movable - assert(not eliminatable or aliasSet != "AliasEverything") # Can't eliminate write-aliasing things - assert(not alwaysInSlot or eliminatable) # Things always in slots had better be eliminatable + assert ( + not movable or aliasSet != "AliasEverything" + ) # Can't move write-aliasing things + assert ( + not alwaysInSlot or movable + ) # Things always in slots had better be movable + assert ( + not eliminatable or aliasSet != "AliasEverything" + ) # Can't eliminate write-aliasing things + assert ( + not alwaysInSlot or eliminatable + ) # Things always in slots had better be eliminatable def jitInfoInitializer(isTypedMethod): initializer = fill( @@ -10022,14 +11569,16 @@ def jitInfoInitializer(isTypedMethod): opType=opType, aliasSet=aliasSet, returnType=functools.reduce( - CGMemberJITInfo.getSingleReturnType, returnTypes, ""), + CGMemberJITInfo.getSingleReturnType, returnTypes, "" + ), isInfallible=toStringBool(infallible), isMovable=toStringBool(movable), isEliminatable=toStringBool(eliminatable), isAlwaysInSlot=toStringBool(alwaysInSlot), isLazilyCachedInSlot=toStringBool(lazilyInSlot), isTypedMethod=toStringBool(isTypedMethod), - slotIndex=slotIndex) + slotIndex=slotIndex, + ) return initializer.rstrip() slotAssert = fill( @@ -10038,14 +11587,17 @@ def jitInfoInitializer(isTypedMethod): static_assert(${slotIndex} < ${classReservedSlots}, "There is no slot for us"); """, slotIndex=slotIndex, - classReservedSlots=INSTANCE_RESERVED_SLOTS + self.descriptor.interface.totalMembersInSlots) + classReservedSlots=INSTANCE_RESERVED_SLOTS + + self.descriptor.interface.totalMembersInSlots, + ) if args is not None: argTypes = "%s_argTypes" % infoName args = [CGMemberJITInfo.getJSArgType(arg.type) for arg in args] args.append("JSJitInfo::ArgTypeListEnd") - argTypesDecl = ( - "static const JSJitInfo::ArgType %s[] = { %s };\n" % - (argTypes, ", ".join(args))) + argTypesDecl = "static const JSJitInfo::ArgType %s[] = { %s };\n" % ( + argTypes, + ", ".join(args), + ) return fill( """ $*{argTypesDecl} @@ -10059,7 +11611,8 @@ def jitInfoInitializer(isTypedMethod): infoName=infoName, jitInfo=indent(jitInfoInitializer(True)), argTypes=argTypes, - slotAssert=slotAssert) + slotAssert=slotAssert, + ) # Unexposed things are meant to be used from C++ directly, so we make # their jitinfo non-static. That way C++ can get at it. @@ -10076,17 +11629,19 @@ def jitInfoInitializer(isTypedMethod): storageClass=storageClass, infoName=infoName, jitInfo=jitInfoInitializer(False), - slotAssert=slotAssert) + slotAssert=slotAssert, + ) def define(self): if self.member.isAttr(): - getterinfo = ("%s_getterinfo" % - IDLToCIdentifier(self.member.identifier.name)) + getterinfo = "%s_getterinfo" % IDLToCIdentifier(self.member.identifier.name) name = IDLToCIdentifier(self.member.identifier.name) if self.member.type.isPromise(): name = CGGetterPromiseWrapper.makeName(name) - getter = ("get_%s" % name) - extendedAttrs = self.descriptor.getExtendedAttributes(self.member, getter=True) + getter = "get_%s" % name + extendedAttrs = self.descriptor.getExtendedAttributes( + self.member, getter=True + ) getterinfal = "infallible" in extendedAttrs # At this point getterinfal is true if our getter either can't throw @@ -10103,9 +11658,11 @@ def define(self): # Now we have to set getterinfal to whether we can _really_ ever # throw, from the point of view of the JS engine. - getterinfal = (getterinfal and - "canOOM" not in extendedAttrs and - infallibleForMember(self.member, self.member.type, self.descriptor)) + getterinfal = ( + getterinfal + and "canOOM" not in extendedAttrs + and infallibleForMember(self.member, self.member.type, self.descriptor) + ) isAlwaysInSlot = self.member.getExtendedAttribute("StoreInSlot") if self.member.slotIndices is not None: assert isAlwaysInSlot or self.member.getExtendedAttribute("Cached") @@ -10118,37 +11675,59 @@ def define(self): isLazilyCachedInSlot = False slotIndex = "0" - result = self.defineJitInfo(getterinfo, getter, "Getter", - getterinfal, movable, eliminatable, - aliasSet, isAlwaysInSlot, - isLazilyCachedInSlot, slotIndex, - [self.member.type], None) - if (not self.member.readonly or - self.member.getExtendedAttribute("PutForwards") is not None or - self.member.getExtendedAttribute("Replaceable") is not None or - self.member.getExtendedAttribute("LenientSetter") is not None): - setterinfo = ("%s_setterinfo" % - IDLToCIdentifier(self.member.identifier.name)) + result = self.defineJitInfo( + getterinfo, + getter, + "Getter", + getterinfal, + movable, + eliminatable, + aliasSet, + isAlwaysInSlot, + isLazilyCachedInSlot, + slotIndex, + [self.member.type], + None, + ) + if ( + not self.member.readonly + or self.member.getExtendedAttribute("PutForwards") is not None + or self.member.getExtendedAttribute("Replaceable") is not None + or self.member.getExtendedAttribute("LenientSetter") is not None + ): + setterinfo = "%s_setterinfo" % IDLToCIdentifier( + self.member.identifier.name + ) # Actually a JSJitSetterOp, but JSJitGetterOp is first in the # union. - setter = ("(JSJitGetterOp)set_%s" % - IDLToCIdentifier(self.member.identifier.name)) + setter = "(JSJitGetterOp)set_%s" % IDLToCIdentifier( + self.member.identifier.name + ) # Setters are always fallible, since they have to do a typed unwrap. - result += self.defineJitInfo(setterinfo, setter, "Setter", - False, False, False, "AliasEverything", - False, False, "0", - [BuiltinTypes[IDLBuiltinType.Types.void]], - None) + result += self.defineJitInfo( + setterinfo, + setter, + "Setter", + False, + False, + False, + "AliasEverything", + False, + False, + "0", + [BuiltinTypes[IDLBuiltinType.Types.void]], + None, + ) return result if self.member.isMethod(): - methodinfo = ("%s_methodinfo" % - IDLToCIdentifier(self.member.identifier.name)) + methodinfo = "%s_methodinfo" % IDLToCIdentifier(self.member.identifier.name) name = CppKeywords.checkMethodName( - IDLToCIdentifier(self.member.identifier.name)) + IDLToCIdentifier(self.member.identifier.name) + ) if self.member.returnsPromise(): name = CGMethodPromiseWrapper.makeName(name) # Actually a JSJitMethodOp, but JSJitGetterOp is first in the union. - method = ("(JSJitGetterOp)%s" % name) + method = "(JSJitGetterOp)%s" % name # Methods are infallible if they are infallible, have no arguments # to unwrap, and have a return type that's infallible to wrap up for @@ -10182,13 +11761,13 @@ def define(self): eliminatable = self.mayBeEliminatable() and hasInfallibleImpl # XXXbz can we move the smarts about fallibility due to arg # conversions into the JIT, using our new args stuff? - if (len(sig[1]) != 0 or - not infallibleForMember(self.member, sig[0], self.descriptor)): + if len(sig[1]) != 0 or not infallibleForMember( + self.member, sig[0], self.descriptor + ): # We have arguments or our return-value boxing can fail methodInfal = False else: - methodInfal = (hasInfallibleImpl and - "canOOM" not in extendedAttrs) + methodInfal = hasInfallibleImpl and "canOOM" not in extendedAttrs # For now, only bother to output args if we're side-effect-free. if self.member.affects == "Nothing": args = sig[1] @@ -10196,10 +11775,20 @@ def define(self): args = None aliasSet = self.aliasSet() - result = self.defineJitInfo(methodinfo, method, "Method", - methodInfal, movable, eliminatable, - aliasSet, False, False, "0", - [s[0] for s in sigs], args) + result = self.defineJitInfo( + methodinfo, + method, + "Method", + methodInfal, + movable, + eliminatable, + aliasSet, + False, + False, + "0", + [s[0] for s in sigs], + args, + ) return result raise TypeError("Illegal member type to CGPropertyJITInfo") @@ -10216,8 +11805,9 @@ def mayBeMovable(self): # don't want them coalesced with each other or loop-hoisted, since # their return value can change even if nothing is going on from our # point of view. - return (affects == "Nothing" and - (dependsOn != "Everything" and dependsOn != "DeviceState")) + return affects == "Nothing" and ( + dependsOn != "Everything" and dependsOn != "DeviceState" + ) def mayBeEliminatable(self): """ @@ -10294,8 +11884,9 @@ def getJSReturnTypeTag(t): if u.hasNullableType: # Might be null or not return "JSVAL_TYPE_UNKNOWN" - return functools.reduce(CGMemberJITInfo.getSingleReturnType, - u.flatMemberTypes, "") + return functools.reduce( + CGMemberJITInfo.getSingleReturnType, u.flatMemberTypes, "" + ) if t.isDictionary(): return "JSVAL_TYPE_OBJECT" if not t.isPrimitive(): @@ -10303,13 +11894,22 @@ def getJSReturnTypeTag(t): tag = t.tag() if tag == IDLType.Tags.bool: return "JSVAL_TYPE_BOOLEAN" - if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, - IDLType.Tags.int16, IDLType.Tags.uint16, - IDLType.Tags.int32]: + if tag in [ + IDLType.Tags.int8, + IDLType.Tags.uint8, + IDLType.Tags.int16, + IDLType.Tags.uint16, + IDLType.Tags.int32, + ]: return "JSVAL_TYPE_INT32" - if tag in [IDLType.Tags.int64, IDLType.Tags.uint64, - IDLType.Tags.unrestricted_float, IDLType.Tags.float, - IDLType.Tags.unrestricted_double, IDLType.Tags.double]: + if tag in [ + IDLType.Tags.int64, + IDLType.Tags.uint64, + IDLType.Tags.unrestricted_float, + IDLType.Tags.float, + IDLType.Tags.unrestricted_double, + IDLType.Tags.double, + ]: # These all use JS_NumberValue, which can return int or double. # But TI treats "double" as meaning "int or double", so we're # good to return JSVAL_TYPE_DOUBLE here. @@ -10328,10 +11928,9 @@ def getSingleReturnType(existingType, t): if type == existingType: return existingType - if ((type == "JSVAL_TYPE_DOUBLE" and - existingType == "JSVAL_TYPE_INT32") or - (existingType == "JSVAL_TYPE_DOUBLE" and - type == "JSVAL_TYPE_INT32")): + if (type == "JSVAL_TYPE_DOUBLE" and existingType == "JSVAL_TYPE_INT32") or ( + existingType == "JSVAL_TYPE_DOUBLE" and type == "JSVAL_TYPE_INT32" + ): # Promote INT32 to DOUBLE as needed return "JSVAL_TYPE_DOUBLE" # Different types @@ -10342,7 +11941,10 @@ def getJSArgType(t): assert not t.isVoid() if t.nullable(): # Sometimes it might return null, sometimes not - return "JSJitInfo::ArgType(JSJitInfo::Null | %s)" % CGMemberJITInfo.getJSArgType(t.inner) + return ( + "JSJitInfo::ArgType(JSJitInfo::Null | %s)" + % CGMemberJITInfo.getJSArgType(t.inner) + ) if t.isSequence(): return "JSJitInfo::Object" if t.isPromise(): @@ -10365,9 +11967,9 @@ def getJSArgType(t): if t.isUnion(): u = t.unroll() type = "JSJitInfo::Null" if u.hasNullableType else "" - return ("JSJitInfo::ArgType(%s)" % - functools.reduce(CGMemberJITInfo.getSingleArgType, - u.flatMemberTypes, type)) + return "JSJitInfo::ArgType(%s)" % functools.reduce( + CGMemberJITInfo.getSingleArgType, u.flatMemberTypes, type + ) if t.isDictionary(): return "JSJitInfo::Object" if not t.isPrimitive(): @@ -10375,13 +11977,22 @@ def getJSArgType(t): tag = t.tag() if tag == IDLType.Tags.bool: return "JSJitInfo::Boolean" - if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, - IDLType.Tags.int16, IDLType.Tags.uint16, - IDLType.Tags.int32]: + if tag in [ + IDLType.Tags.int8, + IDLType.Tags.uint8, + IDLType.Tags.int16, + IDLType.Tags.uint16, + IDLType.Tags.int32, + ]: return "JSJitInfo::Integer" - if tag in [IDLType.Tags.int64, IDLType.Tags.uint64, - IDLType.Tags.unrestricted_float, IDLType.Tags.float, - IDLType.Tags.unrestricted_double, IDLType.Tags.double]: + if tag in [ + IDLType.Tags.int64, + IDLType.Tags.uint64, + IDLType.Tags.unrestricted_float, + IDLType.Tags.float, + IDLType.Tags.unrestricted_double, + IDLType.Tags.double, + ]: # These all use JS_NumberValue, which can return int or double. # But TI treats "double" as meaning "int or double", so we're # good to return JSVAL_TYPE_DOUBLE here. @@ -10407,6 +12018,7 @@ class CGStaticMethodJitinfo(CGGeneric): """ A class for generating the JITInfo for a promise-returning static method. """ + def __init__(self, method): CGGeneric.__init__( self, @@ -10416,10 +12028,12 @@ def __init__(self, method): " { prototypes::id::_ID_Count }, { 0 }, JSJitInfo::StaticMethod,\n" " JSJitInfo::AliasEverything, JSVAL_TYPE_OBJECT, false, false,\n" " false, false, 0\n" - "};\n" % - (IDLToCIdentifier(method.identifier.name), - CppKeywords.checkMethodName( - IDLToCIdentifier(method.identifier.name)))) + "};\n" + % ( + IDLToCIdentifier(method.identifier.name), + CppKeywords.checkMethodName(IDLToCIdentifier(method.identifier.name)), + ), + ) def getEnumValueName(value): @@ -10432,8 +12046,8 @@ def getEnumValueName(value): if re.match("[^\x20-\x7E]", value): raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters') if re.match("^[0-9]", value): - value = '_' + value - value = re.sub(r'[^0-9A-Za-z_]', '_', value) + value = "_" + value + value = re.sub(r"[^0-9A-Za-z_]", "_", value) if re.match("^_[A-Z]|__", value): raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec') if value == "_empty": @@ -10442,20 +12056,29 @@ def getEnumValueName(value): return "_empty" nativeName = MakeNativeName(value) if nativeName == "EndGuard_": - raise SyntaxError('Enum value "' + value + '" cannot be used because it' - ' collides with our internal EndGuard_ value. Please' - ' rename our internal EndGuard_ to something else') + raise SyntaxError( + 'Enum value "' + value + '" cannot be used because it' + " collides with our internal EndGuard_ value. Please" + " rename our internal EndGuard_ to something else" + ) return nativeName + class CGEnumToJSValue(CGAbstractMethod): def __init__(self, enum): enumType = enum.identifier.name self.stringsArray = enumType + "Values::" + ENUM_ENTRY_VARIABLE_NAME - CGAbstractMethod.__init__(self, None, "ToJSValue", "bool", - [Argument("JSContext*", "aCx"), - Argument(enumType, "aArgument"), - Argument("JS::MutableHandle", - "aValue")]) + CGAbstractMethod.__init__( + self, + None, + "ToJSValue", + "bool", + [ + Argument("JSContext*", "aCx"), + Argument(enumType, "aArgument"), + Argument("JS::MutableHandle", "aValue"), + ], + ) def definition_body(self): return fill( @@ -10470,7 +12093,8 @@ def definition_body(self): aValue.setString(resultStr); return true; """, - strings=self.stringsArray) + strings=self.stringsArray, + ) class CGEnum(CGThing): @@ -10501,21 +12125,27 @@ def __init__(self, enum): # -1 because nEnumStrings() includes a string for EndGuard_ real_entry_count=self.nEnumStrings() - 1, name=self.enum.identifier.name, - type=self.underlyingType()) + type=self.underlyingType(), + ) strings = CGNamespace( self.stringsNamespace(), - CGGeneric(declare=entryDecl, - define=fill( - """ + CGGeneric( + declare=entryDecl, + define=fill( + """ extern const EnumEntry ${name}[${count}] = { $*{entries} { nullptr, 0 } }; """, - name=ENUM_ENTRY_VARIABLE_NAME, - count=self.nEnumStrings(), - entries=''.join('{"%s", %d},\n' % (val, len(val)) - for val in self.enum.values())))) + name=ENUM_ENTRY_VARIABLE_NAME, + count=self.nEnumStrings(), + entries="".join( + '{"%s", %d},\n' % (val, len(val)) for val in self.enum.values() + ), + ), + ), + ) toJSValue = CGEnumToJSValue(enum) self.cgThings = CGList([strings, toJSValue], "\n") @@ -10531,8 +12161,9 @@ def underlyingType(self): return "uint8_t" if count <= 65536: return "uint16_t" - raise ValueError("Enum " + self.enum.identifier.name + - " has more than 65536 values") + raise ValueError( + "Enum " + self.enum.identifier.name + " has more than 65536 values" + ) def declare(self): decl = fill( @@ -10544,7 +12175,8 @@ def declare(self): """, name=self.enum.identifier.name, ty=self.underlyingType(), - enums=",\n".join(map(getEnumValueName, self.enum.values())) + ",\n") + enums=",\n".join(map(getEnumValueName, self.enum.values())) + ",\n", + ) return decl + "\n" + self.cgThings.declare() @@ -10574,23 +12206,23 @@ def getUnionAccessorSignatureType(type, descriptorProvider): wrapperType = "Record" # We don't use the returned template here, so it's OK to just pass no # sourceDescription. - elementInfo = getJSToNativeConversionInfo(type.inner, - descriptorProvider, - isMember=wrapperType) + elementInfo = getJSToNativeConversionInfo( + type.inner, descriptorProvider, isMember=wrapperType + ) if wrapperType == "Sequence": innerType = elementInfo.declType else: innerType = [recordKeyDeclType(type), elementInfo.declType] - return CGTemplatedType(wrapperType, innerType, - isConst=True, isReference=True) + return CGTemplatedType(wrapperType, innerType, isConst=True, isReference=True) # Nested unions are unwrapped automatically into our flatMemberTypes. assert not type.isUnion() if type.isGeckoInterface(): descriptor = descriptorProvider.getDescriptor( - type.unroll().inner.identifier.name) + type.unroll().inner.identifier.name + ) typeName = CGGeneric(descriptor.nativeType) if not type.unroll().inner.isExternal(): typeName = CGWrapper(typeName, post="&") @@ -10638,8 +12270,7 @@ def getUnionAccessorSignatureType(type, descriptorProvider): return CGGeneric(builtinNames[type.tag()]) -def getUnionTypeTemplateVars(unionType, type, descriptorProvider, - ownsMembers=False): +def getUnionTypeTemplateVars(unionType, type, descriptorProvider, ownsMembers=False): name = getUnionMemberName(type) holderName = "m" + name + "Holder" @@ -10647,19 +12278,23 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, # constructed as some type, since we've been trying to convert into the # corresponding member. prefix = "" if ownsMembers else "mUnion." - tryNextCode = ("$*{destroyHolder}\n" - "%sDestroy%s();\n" - "tryNext = true;\n" - "return true;\n" % (prefix, name)) + tryNextCode = ( + "$*{destroyHolder}\n" + "%sDestroy%s();\n" + "tryNext = true;\n" + "return true;\n" % (prefix, name) + ) - sourceDescription = ("%s branch of %s" % - (type.prettyName(), unionType.prettyName())) + sourceDescription = "%s branch of %s" % (type.prettyName(), unionType.prettyName()) conversionInfo = getJSToNativeConversionInfo( - type, descriptorProvider, failureCode=tryNextCode, + type, + descriptorProvider, + failureCode=tryNextCode, isDefinitelyObject=not type.isDictionary(), isMember=("OwningUnion" if ownsMembers else None), - sourceDescription=sourceDescription) + sourceDescription=sourceDescription, + ) if conversionInfo.holderType is not None: assert not ownsMembers @@ -10675,17 +12310,21 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, if type.isObject(): if ownsMembers: - body = dedent(""" + body = dedent( + """ MOZ_ASSERT(mType == eUninitialized); mValue.mObject.SetValue(obj); mType = eObject; - """) + """ + ) else: - body = dedent(""" + body = dedent( + """ MOZ_ASSERT(mUnion.mType == mUnion.eUninitialized); mUnion.mValue.mObject.SetValue(cx, obj); mUnion.mType = mUnion.eObject; - """) + """ + ) # It's a bit sketchy to do the security check after setting the value, # but it keeps the code cleaner and lets us avoid rooting |obj| over the @@ -10698,16 +12337,23 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, } return true; """, - sourceDescription=sourceDescription) + sourceDescription=sourceDescription, + ) setters = [ - ClassMethod("SetToObject", "bool", - [Argument("BindingCallContext&", "cx"), - Argument("JSObject*", "obj"), - Argument("bool", "passedToJSImpl", default="false")], - inline=True, bodyInHeader=True, - body=body) - ] + ClassMethod( + "SetToObject", + "bool", + [ + Argument("BindingCallContext&", "cx"), + Argument("JSObject*", "obj"), + Argument("bool", "passedToJSImpl", default="false"), + ], + inline=True, + bodyInHeader=True, + body=body, + ) + ] elif type.isDictionary() and not type.inner.needsConversionFromJS: # In this case we are never initialized from JS to start with setters = None @@ -10729,7 +12375,8 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, declName="memberSlot", holderName=(holderName if ownsMembers else "%s.ref()" % holderName), destroyHolder=destroyHolder, - passedToJSImpl="passedToJSImpl") + passedToJSImpl="passedToJSImpl", + ) jsConversion = fill( """ @@ -10743,7 +12390,8 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, structType=structType, name=name, ctorArgs=ctorArgs, - jsConversion=jsConversion) + jsConversion=jsConversion, + ) if ownsMembers: handleType = "JS::Handle" @@ -10756,15 +12404,20 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, else: cxType = "JSContext*" setters = [ - ClassMethod("TrySetTo" + name, "bool", - [Argument(cxType, "cx"), - Argument(handleType, "value"), - Argument("bool&", "tryNext"), - Argument("bool", "passedToJSImpl", default="false")], - inline=not ownsMembers, - bodyInHeader=not ownsMembers, - body=jsConversion) - ] + ClassMethod( + "TrySetTo" + name, + "bool", + [ + Argument(cxType, "cx"), + Argument(handleType, "value"), + Argument("bool&", "tryNext"), + Argument("bool", "passedToJSImpl", default="false"), + ], + inline=not ownsMembers, + bodyInHeader=not ownsMembers, + body=jsConversion, + ) + ] if needCallContext: # Add a method for non-binding uses of unions to allow them to set # things in the union without providing a call context (though if @@ -10774,25 +12427,34 @@ def getUnionTypeTemplateVars(unionType, type, descriptorProvider, BindingCallContext cx(cx_, nullptr); return TrySetTo${name}(cx, value, tryNext, passedToJSImpl); """, - name=name) + name=name, + ) setters.append( - ClassMethod("TrySetTo" + name, "bool", - [Argument("JSContext*", "cx_"), - Argument(handleType, "value"), - Argument("bool&", "tryNext"), - Argument("bool", "passedToJSImpl", default="false")], - inline=not ownsMembers, - bodyInHeader=not ownsMembers, - body=shimBody)) + ClassMethod( + "TrySetTo" + name, + "bool", + [ + Argument("JSContext*", "cx_"), + Argument(handleType, "value"), + Argument("bool&", "tryNext"), + Argument("bool", "passedToJSImpl", default="false"), + ], + inline=not ownsMembers, + bodyInHeader=not ownsMembers, + body=shimBody, + ) + ) return { "name": name, "structType": structType, "externalType": externalType, "setters": setters, - "holderType": conversionInfo.holderType.define() if conversionInfo.holderType else None, + "holderType": conversionInfo.holderType.define() + if conversionInfo.holderType + else None, "ctorArgs": ctorArgs, - "ctorArgList": [Argument("JSContext*", "cx")] if ctorNeedsCx else [] + "ctorArgList": [Argument("JSContext*", "cx")] if ctorNeedsCx else [], } @@ -10815,43 +12477,71 @@ def deps(self): def getStruct(self): - members = [ClassMember("mType", "Type", body="eUninitialized"), - ClassMember("mValue", "Value")] - ctor = ClassConstructor([], bodyInHeader=True, visibility="public", - explicit=True) + members = [ + ClassMember("mType", "Type", body="eUninitialized"), + ClassMember("mValue", "Value"), + ] + ctor = ClassConstructor( + [], bodyInHeader=True, visibility="public", explicit=True + ) methods = [] enumValues = ["eUninitialized"] toJSValCases = [CGCase("eUninitialized", CGGeneric("return false;\n"))] destructorCases = [CGCase("eUninitialized", None)] assignmentCases = [ - CGCase("eUninitialized", - CGGeneric('MOZ_ASSERT(mType == eUninitialized,\n' - ' "We need to destroy ourselves?");\n'))] + CGCase( + "eUninitialized", + CGGeneric( + "MOZ_ASSERT(mType == eUninitialized,\n" + ' "We need to destroy ourselves?");\n' + ), + ) + ] traceCases = [] unionValues = [] if self.type.hasNullableType: enumValues.append("eNull") - methods.append(ClassMethod("IsNull", "bool", [], const=True, inline=True, - body="return mType == eNull;\n", - bodyInHeader=True)) - methods.append(ClassMethod("SetNull", "void", [], inline=True, - body=("Uninit();\n" - "mType = eNull;\n"), - bodyInHeader=True)) + methods.append( + ClassMethod( + "IsNull", + "bool", + [], + const=True, + inline=True, + body="return mType == eNull;\n", + bodyInHeader=True, + ) + ) + methods.append( + ClassMethod( + "SetNull", + "void", + [], + inline=True, + body=("Uninit();\n" "mType = eNull;\n"), + bodyInHeader=True, + ) + ) destructorCases.append(CGCase("eNull", None)) - assignmentCases.append(CGCase("eNull", - CGGeneric("MOZ_ASSERT(mType == eUninitialized);\n" - "mType = eNull;\n"))) - toJSValCases.append(CGCase("eNull", CGGeneric("rval.setNull();\n" - "return true;\n"))) + assignmentCases.append( + CGCase( + "eNull", + CGGeneric( + "MOZ_ASSERT(mType == eUninitialized);\n" "mType = eNull;\n" + ), + ) + ) + toJSValCases.append( + CGCase("eNull", CGGeneric("rval.setNull();\n" "return true;\n")) + ) hasObjectType = any(t.isObject() for t in self.type.flatMemberTypes) skipToJSVal = False for t in self.type.flatMemberTypes: - vars = getUnionTypeTemplateVars(self.type, - t, self.descriptorProvider, - ownsMembers=self.ownsMembers) + vars = getUnionTypeTemplateVars( + self.type, t, self.descriptorProvider, ownsMembers=self.ownsMembers + ) if vars["name"] != "Object" or self.ownsMembers: body = fill( """ @@ -10862,26 +12552,36 @@ def getStruct(self): mType = e${name}; return mValue.m${name}.SetValue(${ctorArgs}); """, - **vars) + **vars + ) # bodyInHeader must be false for return values because they own # their union members and we don't want include headers in # UnionTypes.h just to call Addref/Release - methods.append(ClassMethod( - "RawSetAs" + vars["name"], - vars["structType"] + "&", - vars["ctorArgList"], - bodyInHeader=not self.ownsMembers, - body=body % "MOZ_ASSERT(mType == eUninitialized);")) + methods.append( + ClassMethod( + "RawSetAs" + vars["name"], + vars["structType"] + "&", + vars["ctorArgList"], + bodyInHeader=not self.ownsMembers, + body=body % "MOZ_ASSERT(mType == eUninitialized);", + ) + ) uninit = "Uninit();" if hasObjectType and not self.ownsMembers: - uninit = 'MOZ_ASSERT(mType != eObject, "This will not play well with Rooted");\n' + uninit - methods.append(ClassMethod( - "SetAs" + vars["name"], - vars["structType"] + "&", - vars["ctorArgList"], - bodyInHeader=not self.ownsMembers, - body=body % uninit)) + uninit = ( + 'MOZ_ASSERT(mType != eObject, "This will not play well with Rooted");\n' + + uninit + ) + methods.append( + ClassMethod( + "SetAs" + vars["name"], + vars["structType"] + "&", + vars["ctorArgList"], + bodyInHeader=not self.ownsMembers, + body=body % uninit, + ) + ) if self.ownsMembers: if vars["setters"]: methods.extend(vars["setters"]) @@ -10895,12 +12595,17 @@ def getStruct(self): if charType: methods.append( - ClassMethod("SetStringLiteral", "void", - # Hack, but it works... - [Argument(charType, "(&aData)[N]")], - inline=True, bodyInHeader=True, - templateArgs=["int N"], - body="RawSetAs%s().AssignLiteral(aData);\n" % t.name)) + ClassMethod( + "SetStringLiteral", + "void", + # Hack, but it works... + [Argument(charType, "(&aData)[N]")], + inline=True, + bodyInHeader=True, + templateArgs=["int N"], + body="RawSetAs%s().AssignLiteral(aData);\n" % t.name, + ) + ) body = fill( """ @@ -10908,35 +12613,49 @@ def getStruct(self): mValue.m${name}.Destroy(); mType = eUninitialized; """, - **vars) - methods.append(ClassMethod("Destroy" + vars["name"], - "void", - [], - visibility="private", - bodyInHeader=not self.ownsMembers, - body=body)) + **vars + ) + methods.append( + ClassMethod( + "Destroy" + vars["name"], + "void", + [], + visibility="private", + bodyInHeader=not self.ownsMembers, + body=body, + ) + ) body = fill("return mType == e${name};\n", **vars) - methods.append(ClassMethod("Is" + vars["name"], - "bool", - [], - const=True, - bodyInHeader=True, - body=body)) + methods.append( + ClassMethod( + "Is" + vars["name"], + "bool", + [], + const=True, + bodyInHeader=True, + body=body, + ) + ) body = fill( """ MOZ_ASSERT(Is${name}(), "Wrong type!"); return mValue.m${name}.Value(); """, - **vars) + **vars + ) # The non-const version of GetAs* returns our internal type getterReturnType = "%s&" % vars["structType"] - methods.append(ClassMethod("GetAs" + vars["name"], - getterReturnType, - [], - bodyInHeader=True, - body=body)) + methods.append( + ClassMethod( + "GetAs" + vars["name"], + getterReturnType, + [], + bodyInHeader=True, + body=body, + ) + ) # The const version of GetAs* returns our internal type # for owning unions, but our external type for non-owning # ones. @@ -10944,67 +12663,104 @@ def getStruct(self): getterReturnType = "%s const &" % vars["structType"] else: getterReturnType = vars["externalType"] - methods.append(ClassMethod("GetAs" + vars["name"], - getterReturnType, - [], - const=True, - bodyInHeader=True, - body=body)) - - unionValues.append( - fill("UnionMember<${structType} > m${name}", **vars)) + methods.append( + ClassMethod( + "GetAs" + vars["name"], + getterReturnType, + [], + const=True, + bodyInHeader=True, + body=body, + ) + ) + + unionValues.append(fill("UnionMember<${structType} > m${name}", **vars)) enumValues.append("e" + vars["name"]) conversionToJS = self.getConversionToJS(vars, t) if conversionToJS: - toJSValCases.append( - CGCase("e" + vars["name"], - conversionToJS)) + toJSValCases.append(CGCase("e" + vars["name"], conversionToJS)) else: skipToJSVal = True destructorCases.append( - CGCase("e" + vars["name"], - CGGeneric("Destroy%s();\n" % vars["name"]))) + CGCase("e" + vars["name"], CGGeneric("Destroy%s();\n" % vars["name"])) + ) assignmentCases.append( - CGCase("e" + vars["name"], - CGGeneric("SetAs%s() = aOther.GetAs%s();\n" % - (vars["name"], vars["name"])))) + CGCase( + "e" + vars["name"], + CGGeneric( + "SetAs%s() = aOther.GetAs%s();\n" % (vars["name"], vars["name"]) + ), + ) + ) if self.ownsMembers and typeNeedsRooting(t): if t.isObject(): traceCases.append( - CGCase("e" + vars["name"], - CGGeneric('JS::UnsafeTraceRoot(trc, %s, "%s");\n' % - ("&mValue.m" + vars["name"] + ".Value()", - "mValue.m" + vars["name"])))) + CGCase( + "e" + vars["name"], + CGGeneric( + 'JS::UnsafeTraceRoot(trc, %s, "%s");\n' + % ( + "&mValue.m" + vars["name"] + ".Value()", + "mValue.m" + vars["name"], + ) + ), + ) + ) elif t.isDictionary(): traceCases.append( - CGCase("e" + vars["name"], - CGGeneric("mValue.m%s.Value().TraceDictionary(trc);\n" % - vars["name"]))) + CGCase( + "e" + vars["name"], + CGGeneric( + "mValue.m%s.Value().TraceDictionary(trc);\n" + % vars["name"] + ), + ) + ) elif t.isSequence(): traceCases.append( - CGCase("e" + vars["name"], - CGGeneric("DoTraceSequence(trc, mValue.m%s.Value());\n" % - vars["name"]))) + CGCase( + "e" + vars["name"], + CGGeneric( + "DoTraceSequence(trc, mValue.m%s.Value());\n" + % vars["name"] + ), + ) + ) elif t.isRecord(): traceCases.append( - CGCase("e" + vars["name"], - CGGeneric("TraceRecord(trc, mValue.m%s.Value());\n" % - vars["name"]))) + CGCase( + "e" + vars["name"], + CGGeneric( + "TraceRecord(trc, mValue.m%s.Value());\n" % vars["name"] + ), + ) + ) else: assert t.isSpiderMonkeyInterface() traceCases.append( - CGCase("e" + vars["name"], - CGGeneric("mValue.m%s.Value().TraceSelf(trc);\n" % - vars["name"]))) + CGCase( + "e" + vars["name"], + CGGeneric( + "mValue.m%s.Value().TraceSelf(trc);\n" % vars["name"] + ), + ) + ) dtor = CGSwitch("mType", destructorCases).define() - methods.append(ClassMethod("Uninit", "void", [], - visibility="public", body=dtor, - bodyInHeader=not self.ownsMembers, - inline=not self.ownsMembers)) + methods.append( + ClassMethod( + "Uninit", + "void", + [], + visibility="public", + body=dtor, + bodyInHeader=not self.ownsMembers, + inline=not self.ownsMembers, + ) + ) if not skipToJSVal: methods.append( @@ -11014,23 +12770,30 @@ def getStruct(self): [ Argument("JSContext*", "cx"), Argument("JS::Handle", "scopeObj"), - Argument("JS::MutableHandle", "rval") + Argument("JS::MutableHandle", "rval"), ], - body=CGSwitch("mType", toJSValCases, - default=CGGeneric("return false;\n")).define() + "\nreturn false;\n", - const=True)) + body=CGSwitch( + "mType", toJSValCases, default=CGGeneric("return false;\n") + ).define() + + "\nreturn false;\n", + const=True, + ) + ) constructors = [ctor] selfName = CGUnionStruct.unionTypeName(self.type, self.ownsMembers) if self.ownsMembers: if traceCases: - traceBody = CGSwitch("mType", traceCases, - default=CGGeneric("")).define() + traceBody = CGSwitch( + "mType", traceCases, default=CGGeneric("") + ).define() else: traceBody = "" - methods.append(ClassMethod("TraceUnion", "void", - [Argument("JSTracer*", "trc")], - body=traceBody)) + methods.append( + ClassMethod( + "TraceUnion", "void", [Argument("JSTracer*", "trc")], body=traceBody + ) + ) if CGUnionStruct.isUnionCopyConstructible(self.type): constructors.append( ClassConstructor( @@ -11038,14 +12801,20 @@ def getStruct(self): bodyInHeader=True, visibility="public", explicit=True, - body="*this = aOther;\n")) + body="*this = aOther;\n", + ) + ) op_body = CGList([]) op_body.append(CGSwitch("aOther.mType", assignmentCases)) op_body.append(CGGeneric("return *this;\n")) - methods.append(ClassMethod( - "operator=", "%s&" % selfName, - [Argument("const %s&" % selfName, "aOther")], - body=op_body.define())) + methods.append( + ClassMethod( + "operator=", + "%s&" % selfName, + [Argument("const %s&" % selfName, "aOther")], + body=op_body.define(), + ) + ) disallowCopyConstruction = False else: disallowCopyConstruction = True @@ -11053,23 +12822,28 @@ def getStruct(self): disallowCopyConstruction = True if self.ownsMembers: - friend = " friend void ImplCycleCollectionUnlink(%s& aUnion);\n" % CGUnionStruct.unionTypeName(self.type, True) + friend = ( + " friend void ImplCycleCollectionUnlink(%s& aUnion);\n" + % CGUnionStruct.unionTypeName(self.type, True) + ) else: friend = " friend class %sArgument;\n" % str(self.type) bases = [ClassBase("AllOwningUnionBase")] if self.ownsMembers else [] - return CGClass(selfName, - bases=bases, - members=members, - constructors=constructors, - methods=methods, - disallowCopyConstruction=disallowCopyConstruction, - extradeclarations=friend, - destructor=ClassDestructor(visibility="public", - body="Uninit();\n", - bodyInHeader=True), - enums=[ClassEnum("Type", enumValues, visibility="private")], - unions=[ClassUnion("Value", unionValues, visibility="private")]) + return CGClass( + selfName, + bases=bases, + members=members, + constructors=constructors, + methods=methods, + disallowCopyConstruction=disallowCopyConstruction, + extradeclarations=friend, + destructor=ClassDestructor( + visibility="public", body="Uninit();\n", bodyInHeader=True + ), + enums=[ClassEnum("Type", enumValues, visibility="private")], + unions=[ClassUnion("Value", unionValues, visibility="private")], + ) def getConversionToJS(self, templateVars, type): if type.isDictionary() and not type.inner.needsConversionToJS: @@ -11080,14 +12854,16 @@ def getConversionToJS(self, templateVars, type): assert not type.nullable() # flatMemberTypes never has nullable types val = "mValue.m%(name)s.Value()" % templateVars wrapCode = wrapForType( - type, self.descriptorProvider, + type, + self.descriptorProvider, { "jsvalRef": "rval", "jsvalHandle": "rval", "obj": "scopeObj", "result": val, - "spiderMonkeyInterfacesAreStructs": True - }) + "spiderMonkeyInterfacesAreStructs": True, + }, + ) return CGGeneric(wrapCode) @staticmethod @@ -11126,25 +12902,38 @@ def __init__(self, type, descriptorProvider): def declare(self): structName = str(self.type) - members = [ClassMember("mUnion", structName + "&", - body="const_cast<%s&>(aUnion)" % structName)] + members = [ + ClassMember( + "mUnion", structName + "&", body="const_cast<%s&>(aUnion)" % structName + ) + ] # Argument needs to be a const ref because that's all Maybe<> allows - ctor = ClassConstructor([Argument("const %s&" % structName, "aUnion")], - bodyInHeader=True, - visibility="public", - explicit=True) + ctor = ClassConstructor( + [Argument("const %s&" % structName, "aUnion")], + bodyInHeader=True, + visibility="public", + explicit=True, + ) methods = [] if self.type.hasNullableType: - methods.append(ClassMethod("SetNull", "bool", [], - body=("MOZ_ASSERT(mUnion.mType == mUnion.eUninitialized);\n" - "mUnion.mType = mUnion.eNull;\n" - "return true;\n"), - inline=True, bodyInHeader=True)) + methods.append( + ClassMethod( + "SetNull", + "bool", + [], + body=( + "MOZ_ASSERT(mUnion.mType == mUnion.eUninitialized);\n" + "mUnion.mType = mUnion.eNull;\n" + "return true;\n" + ), + inline=True, + bodyInHeader=True, + ) + ) for t in self.type.flatMemberTypes: - vars = getUnionTypeTemplateVars(self.type, - t, self.descriptorProvider) + vars = getUnionTypeTemplateVars(self.type, t, self.descriptorProvider) if vars["setters"]: methods.extend(vars["setters"]) if vars["name"] != "Object": @@ -11154,13 +12943,18 @@ def declare(self): mUnion.mType = mUnion.e${name}; return mUnion.mValue.m${name}.SetValue(${ctorArgs}); """, - **vars) - methods.append(ClassMethod("RawSetAs" + vars["name"], - vars["structType"] + "&", - vars["ctorArgList"], - bodyInHeader=True, - body=body, - visibility="private")) + **vars + ) + methods.append( + ClassMethod( + "RawSetAs" + vars["name"], + vars["structType"] + "&", + vars["ctorArgList"], + bodyInHeader=True, + body=body, + visibility="private", + ) + ) # Provide a SetStringLiteral() method to support string defaults. if t.isByteString() or t.isUTF8String(): charType = "const nsCString::char_type" @@ -11171,24 +12965,31 @@ def declare(self): if charType: methods.append( - ClassMethod("SetStringLiteral", "void", - # Hack, but it works... - [Argument(charType, "(&aData)[N]")], - inline=True, bodyInHeader=True, - templateArgs=["int N"], - body="RawSetAs%s().AssignLiteral(aData);\n" % t.name)) + ClassMethod( + "SetStringLiteral", + "void", + # Hack, but it works... + [Argument(charType, "(&aData)[N]")], + inline=True, + bodyInHeader=True, + templateArgs=["int N"], + body="RawSetAs%s().AssignLiteral(aData);\n" % t.name, + ) + ) if vars["holderType"] is not None: - holderType = CGTemplatedType("Maybe", - CGGeneric(vars["holderType"])).define() - members.append(ClassMember("m%sHolder" % vars["name"], - holderType)) + holderType = CGTemplatedType( + "Maybe", CGGeneric(vars["holderType"]) + ).define() + members.append(ClassMember("m%sHolder" % vars["name"], holderType)) - return CGClass(structName + "Argument", - members=members, - constructors=[ctor], - methods=methods, - disallowCopyConstruction=True).declare() + return CGClass( + structName + "Argument", + members=members, + constructors=[ctor], + methods=methods, + disallowCopyConstruction=True, + ).declare() def define(self): return "" @@ -11199,6 +13000,7 @@ def deps(self): class ClassItem: """ Use with CGClass """ + def __init__(self, name, visibility): self.name = name self.visibility = visibility @@ -11211,23 +13013,36 @@ def define(self, cgClass): class ClassBase(ClassItem): - def __init__(self, name, visibility='public'): + def __init__(self, name, visibility="public"): ClassItem.__init__(self, name, visibility) def declare(self, cgClass): - return '%s %s' % (self.visibility, self.name) + return "%s %s" % (self.visibility, self.name) def define(self, cgClass): # Only in the header - return '' + return "" class ClassMethod(ClassItem): - def __init__(self, name, returnType, args, inline=False, static=False, - virtual=False, const=False, bodyInHeader=False, - templateArgs=None, visibility='public', body=None, - breakAfterReturnDecl="\n", - breakAfterSelf="\n", override=False, canRunScript=False): + def __init__( + self, + name, + returnType, + args, + inline=False, + static=False, + virtual=False, + const=False, + bodyInHeader=False, + templateArgs=None, + visibility="public", + body=None, + breakAfterReturnDecl="\n", + breakAfterSelf="\n", + override=False, + canRunScript=False, + ): """ override indicates whether to flag the method as override """ @@ -11245,23 +13060,23 @@ def __init__(self, name, returnType, args, inline=False, static=False, self.breakAfterReturnDecl = breakAfterReturnDecl self.breakAfterSelf = breakAfterSelf self.override = override - self.canRunScript = canRunScript; + self.canRunScript = canRunScript ClassItem.__init__(self, name, visibility) def getDecorators(self, declaring): decorators = [] if self.canRunScript: - decorators.append('MOZ_CAN_RUN_SCRIPT') + decorators.append("MOZ_CAN_RUN_SCRIPT") if self.inline: - decorators.append('inline') + decorators.append("inline") if declaring: if self.static: - decorators.append('static') + decorators.append("static") if self.virtual and not self.override: - decorators.append('virtual') + decorators.append("virtual") if decorators: - return ' '.join(decorators) + ' ' - return '' + return " ".join(decorators) + " " + return "" def getBody(self): # Override me or pass a string to constructor @@ -11269,14 +13084,17 @@ def getBody(self): return self.body def declare(self, cgClass): - templateClause = ('template <%s>\n' % ', '.join(self.templateArgs) - if self.bodyInHeader and self.templateArgs else '') - args = ', '.join([a.declare() for a in self.args]) + templateClause = ( + "template <%s>\n" % ", ".join(self.templateArgs) + if self.bodyInHeader and self.templateArgs + else "" + ) + args = ", ".join([a.declare() for a in self.args]) if self.bodyInHeader: body = indent(self.getBody()) - body = '\n{\n' + body + '}\n' + body = "\n{\n" + body + "}\n" else: - body = ';\n' + body = ";\n" return fill( "${templateClause}${decorators}${returnType}${breakAfterReturnDecl}" @@ -11288,26 +13106,27 @@ def declare(self, cgClass): breakAfterReturnDecl=self.breakAfterReturnDecl, name=self.name, args=args, - const=' const' if self.const else '', - override=' override' if self.override else '', + const=" const" if self.const else "", + override=" override" if self.override else "", body=body, - breakAfterSelf=self.breakAfterSelf) + breakAfterSelf=self.breakAfterSelf, + ) def define(self, cgClass): if self.bodyInHeader: - return '' + return "" templateArgs = cgClass.templateArgs if templateArgs: if cgClass.templateSpecialization: - templateArgs = \ - templateArgs[len(cgClass.templateSpecialization):] + templateArgs = templateArgs[len(cgClass.templateSpecialization) :] if templateArgs: - templateClause = \ - 'template <%s>\n' % ', '.join([str(a) for a in templateArgs]) + templateClause = "template <%s>\n" % ", ".join( + [str(a) for a in templateArgs] + ) else: - templateClause = '' + templateClause = "" return fill( """ @@ -11322,9 +13141,10 @@ def define(self, cgClass): returnType=self.returnType, className=cgClass.getNameString(), name=self.name, - args=', '.join([a.define() for a in self.args]), - const=' const' if self.const else '', - body=self.getBody()) + args=", ".join([a.define() for a in self.args]), + const=" const" if self.const else "", + body=self.getBody(), + ) class ClassUsingDeclaration(ClassItem): @@ -11338,7 +13158,8 @@ class ClassUsingDeclaration(ClassItem): visibility determines the visibility of the name (public, protected, private), defaults to public. """ - def __init__(self, baseClass, name, visibility='public'): + + def __init__(self, baseClass, name, visibility="public"): self.baseClass = baseClass ClassItem.__init__(self, name, visibility) @@ -11346,7 +13167,7 @@ def declare(self, cgClass): return "using %s::%s;\n\n" % (self.baseClass, self.name) def define(self, cgClass): - return '' + return "" class ClassConstructor(ClassItem): @@ -11371,9 +13192,18 @@ class ClassConstructor(ClassItem): body contains a string with the code for the constructor, defaults to empty. """ - def __init__(self, args, inline=False, bodyInHeader=False, - visibility="private", explicit=False, constexpr=False, baseConstructors=None, - body=""): + + def __init__( + self, + args, + inline=False, + bodyInHeader=False, + visibility="private", + explicit=False, + constexpr=False, + baseConstructors=None, + body="", + ): assert not (inline and constexpr) assert not (bodyInHeader and constexpr) self.args = args @@ -11388,14 +13218,14 @@ def __init__(self, args, inline=False, bodyInHeader=False, def getDecorators(self, declaring): decorators = [] if self.explicit: - decorators.append('explicit') + decorators.append("explicit") if self.inline and declaring: - decorators.append('inline') + decorators.append("inline") if self.constexpr and declaring: - decorators.append('constexpr') + decorators.append("constexpr") if decorators: - return ' '.join(decorators) + ' ' - return '' + return " ".join(decorators) + " " + return "" def getInitializationList(self, cgClass): items = [str(c) for c in self.baseConstructors] @@ -11406,29 +13236,35 @@ def getInitializationList(self, cgClass): items.append(m.name + "(" + initialize + ")") if len(items) > 0: - return '\n : ' + ',\n '.join(items) - return '' + return "\n : " + ",\n ".join(items) + return "" def getBody(self): return self.body def declare(self, cgClass): - args = ', '.join([a.declare() for a in self.args]) + args = ", ".join([a.declare() for a in self.args]) if self.bodyInHeader: - body = self.getInitializationList(cgClass) + '\n{\n' + indent(self.getBody()) + '}\n' + body = ( + self.getInitializationList(cgClass) + + "\n{\n" + + indent(self.getBody()) + + "}\n" + ) else: - body = ';\n' + body = ";\n" return fill( "${decorators}${className}(${args})${body}\n", decorators=self.getDecorators(True), className=cgClass.getNameString(), args=args, - body=body) + body=body, + ) def define(self, cgClass): if self.bodyInHeader: - return '' + return "" return fill( """ @@ -11440,9 +13276,10 @@ def define(self, cgClass): """, decorators=self.getDecorators(False), className=cgClass.getNameString(), - args=', '.join([a.define() for a in self.args]), + args=", ".join([a.define() for a in self.args]), initializationList=self.getInitializationList(cgClass), - body=self.getBody()) + body=self.getBody(), + ) class ClassDestructor(ClassItem): @@ -11461,8 +13298,15 @@ class ClassDestructor(ClassItem): virtual determines whether the destructor is virtual, defaults to False. """ - def __init__(self, inline=False, bodyInHeader=False, - visibility="private", body='', virtual=False): + + def __init__( + self, + inline=False, + bodyInHeader=False, + visibility="private", + body="", + virtual=False, + ): self.inline = inline or bodyInHeader self.bodyInHeader = bodyInHeader self.body = body @@ -11472,31 +13316,32 @@ def __init__(self, inline=False, bodyInHeader=False, def getDecorators(self, declaring): decorators = [] if self.virtual and declaring: - decorators.append('virtual') + decorators.append("virtual") if self.inline and declaring: - decorators.append('inline') + decorators.append("inline") if decorators: - return ' '.join(decorators) + ' ' - return '' + return " ".join(decorators) + " " + return "" def getBody(self): return self.body def declare(self, cgClass): if self.bodyInHeader: - body = '\n{\n' + indent(self.getBody()) + '}\n' + body = "\n{\n" + indent(self.getBody()) + "}\n" else: - body = ';\n' + body = ";\n" return fill( "${decorators}~${className}()${body}\n", decorators=self.getDecorators(True), className=cgClass.getNameString(), - body=body) + body=body, + ) def define(self, cgClass): if self.bodyInHeader: - return '' + return "" return fill( """ ${decorators} @@ -11507,33 +13352,42 @@ def define(self, cgClass): """, decorators=self.getDecorators(False), className=cgClass.getNameString(), - body=self.getBody()) + body=self.getBody(), + ) class ClassMember(ClassItem): - def __init__(self, name, type, visibility="private", static=False, - body=None, hasIgnoreInitCheckFlag=False): + def __init__( + self, + name, + type, + visibility="private", + static=False, + body=None, + hasIgnoreInitCheckFlag=False, + ): self.type = type self.static = static self.body = body - self.hasIgnoreInitCheckFlag = hasIgnoreInitCheckFlag; + self.hasIgnoreInitCheckFlag = hasIgnoreInitCheckFlag ClassItem.__init__(self, name, visibility) def declare(self, cgClass): - return '%s%s%s %s;\n' % ('static ' if self.static else '', - 'MOZ_INIT_OUTSIDE_CTOR ' - if self.hasIgnoreInitCheckFlag else '', - self.type, self.name) + return "%s%s%s %s;\n" % ( + "static " if self.static else "", + "MOZ_INIT_OUTSIDE_CTOR " if self.hasIgnoreInitCheckFlag else "", + self.type, + self.name, + ) def define(self, cgClass): if not self.static: - return '' + return "" if self.body: body = " = " + self.body else: body = "" - return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(), - self.name, body) + return "%s %s::%s%s;\n" % (self.type, cgClass.getNameString(), self.name, body) class ClassEnum(ClassItem): @@ -11546,16 +13400,16 @@ def declare(self, cgClass): entries = [] for i in range(0, len(self.entries)): if not self.values or i >= len(self.values): - entry = '%s' % self.entries[i] + entry = "%s" % self.entries[i] else: - entry = '%s = %s' % (self.entries[i], self.values[i]) + entry = "%s = %s" % (self.entries[i], self.values[i]) entries.append(entry) - name = '' if not self.name else ' ' + self.name - return 'enum%s\n{\n%s\n};\n' % (name, indent(',\n'.join(entries))) + name = "" if not self.name else " " + self.name + return "enum%s\n{\n%s\n};\n" % (name, indent(",\n".join(entries))) def define(self, cgClass): # Only goes in the header - return '' + return "" class ClassUnion(ClassItem): @@ -11564,22 +13418,33 @@ def __init__(self, name, entries, visibility="public"): ClassItem.__init__(self, name, visibility) def declare(self, cgClass): - return "union %s\n{\n%s\n};\n" % (self.name, indent(''.join(self.entries))) + return "union %s\n{\n%s\n};\n" % (self.name, indent("".join(self.entries))) def define(self, cgClass): # Only goes in the header - return '' + return "" class CGClass(CGThing): - def __init__(self, name, bases=[], members=[], constructors=[], - destructor=None, methods=[], - enums=[], unions=[], templateArgs=[], - templateSpecialization=[], isStruct=False, - disallowCopyConstruction=False, indent='', - decorators='', - extradeclarations='', - extradefinitions=''): + def __init__( + self, + name, + bases=[], + members=[], + constructors=[], + destructor=None, + methods=[], + enums=[], + unions=[], + templateArgs=[], + templateSpecialization=[], + isStruct=False, + disallowCopyConstruction=False, + indent="", + decorators="", + extradeclarations="", + extradefinitions="", + ): CGThing.__init__(self) self.name = name self.bases = bases @@ -11596,7 +13461,7 @@ def __init__(self, name, bases=[], members=[], constructors=[], self.isStruct = isStruct self.disallowCopyConstruction = disallowCopyConstruction self.indent = indent - self.defaultVisibility = 'public' if isStruct else 'private' + self.defaultVisibility = "public" if isStruct else "private" self.decorators = decorators self.extradeclarations = extradeclarations self.extradefinitions = extradefinitions @@ -11604,88 +13469,98 @@ def __init__(self, name, bases=[], members=[], constructors=[], def getNameString(self): className = self.name if self.templateSpecialization: - className += '<%s>' % ', '.join([str(a) - for a in self.templateSpecialization]) + className += "<%s>" % ", ".join( + [str(a) for a in self.templateSpecialization] + ) return className def declare(self): - result = '' + result = "" if self.templateArgs: templateArgs = [a.declare() for a in self.templateArgs] - templateArgs = templateArgs[len(self.templateSpecialization):] - result += ('template <%s>\n' % - ','.join([str(a) for a in templateArgs])) + templateArgs = templateArgs[len(self.templateSpecialization) :] + result += "template <%s>\n" % ",".join([str(a) for a in templateArgs]) - type = 'struct' if self.isStruct else 'class' + type = "struct" if self.isStruct else "class" if self.templateSpecialization: - specialization = \ - '<%s>' % ', '.join([str(a) for a in self.templateSpecialization]) + specialization = "<%s>" % ", ".join( + [str(a) for a in self.templateSpecialization] + ) else: - specialization = '' + specialization = "" - myself = '%s %s%s' % (type, self.name, specialization) - if self.decorators != '': + myself = "%s %s%s" % (type, self.name, specialization) + if self.decorators != "": myself += " " + self.decorators result += myself if self.bases: - inherit = ' : ' + inherit = " : " result += inherit # Grab our first base baseItems = [CGGeneric(b.declare(self)) for b in self.bases] bases = baseItems[:1] # Indent the rest - bases.extend(CGIndenter(b, len(myself) + len(inherit)) - for b in baseItems[1:]) + bases.extend( + CGIndenter(b, len(myself) + len(inherit)) for b in baseItems[1:] + ) result += ",\n".join(b.define() for b in bases) - result += '\n{\n' + result += "\n{\n" result += self.extradeclarations def declareMembers(cgClass, memberList, defaultVisibility): - members = {'private': [], 'protected': [], 'public': []} + members = {"private": [], "protected": [], "public": []} for member in memberList: members[member.visibility].append(member) - if defaultVisibility == 'public': - order = ['public', 'protected', 'private'] + if defaultVisibility == "public": + order = ["public", "protected", "private"] else: - order = ['private', 'protected', 'public'] + order = ["private", "protected", "public"] - result = '' + result = "" lastVisibility = defaultVisibility for visibility in order: list = members[visibility] if list: if visibility != lastVisibility: - result += visibility + ':\n' + result += visibility + ":\n" for member in list: result += indent(member.declare(cgClass)) lastVisibility = visibility return (result, lastVisibility) if self.disallowCopyConstruction: + class DisallowedCopyConstructor(object): def __init__(self): self.visibility = "private" def declare(self, cgClass): name = cgClass.getNameString() - return ("%s(const %s&) = delete;\n" - "%s& operator=(const %s&) = delete;\n" % (name, name, name, name)) + return ( + "%s(const %s&) = delete;\n" + "%s& operator=(const %s&) = delete;\n" + % (name, name, name, name) + ) disallowedCopyConstructors = [DisallowedCopyConstructor()] else: disallowedCopyConstructors = [] - order = [self.enums, self.unions, - self.members, - self.constructors + disallowedCopyConstructors, - self.destructors, self.methods] + order = [ + self.enums, + self.unions, + self.members, + self.constructors + disallowedCopyConstructors, + self.destructors, + self.methods, + ] lastVisibility = self.defaultVisibility pieces = [] @@ -11696,14 +13571,14 @@ def declare(self, cgClass): code = code.rstrip() + "\n" # remove extra blank lines at the end pieces.append(code) - result += '\n'.join(pieces) - result += '};\n' + result += "\n".join(pieces) + result += "};\n" result = indent(result, len(self.indent)) return result def define(self): - def defineMembers(cgClass, memberList, itemCount, separator=''): - result = '' + def defineMembers(cgClass, memberList, itemCount, separator=""): + result = "" for member in memberList: if itemCount != 0: result = result + separator @@ -11714,29 +13589,36 @@ def defineMembers(cgClass, memberList, itemCount, separator=''): itemCount += 1 return (result, itemCount) - order = [(self.members, ''), (self.constructors, '\n'), - (self.destructors, '\n'), (self.methods, '\n')] + order = [ + (self.members, ""), + (self.constructors, "\n"), + (self.destructors, "\n"), + (self.methods, "\n"), + ] result = self.extradefinitions itemCount = 0 for memberList, separator in order: - memberString, itemCount = defineMembers(self, memberList, - itemCount, separator) + memberString, itemCount = defineMembers( + self, memberList, itemCount, separator + ) result = result + memberString return result class CGResolveOwnProperty(CGAbstractStaticMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'wrapper'), - Argument('JS::Handle', 'obj'), - Argument('JS::Handle', 'id'), - Argument('JS::MutableHandle', 'desc'), - ] - CGAbstractStaticMethod.__init__(self, descriptor, "ResolveOwnProperty", - "bool", args) - + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "wrapper"), + Argument("JS::Handle", "obj"), + Argument("JS::Handle", "id"), + Argument("JS::MutableHandle", "desc"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, "ResolveOwnProperty", "bool", args + ) + def definition_body(self): return "return js::GetProxyHandler(obj)->getOwnPropertyDescriptor(cx, wrapper, id, desc);\n" @@ -11746,19 +13628,28 @@ class CGResolveOwnPropertyViaResolve(CGAbstractBindingMethod): An implementation of Xray ResolveOwnProperty stuff for things that have a resolve hook. """ + def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'wrapper'), - Argument('JS::Handle', 'obj'), - Argument('JS::Handle', 'id'), - Argument('JS::MutableHandle', 'desc')] - CGAbstractBindingMethod.__init__(self, descriptor, - "ResolveOwnPropertyViaResolve", - args, getThisObj="", - callArgs="") + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "wrapper"), + Argument("JS::Handle", "obj"), + Argument("JS::Handle", "id"), + Argument("JS::MutableHandle", "desc"), + ] + CGAbstractBindingMethod.__init__( + self, + descriptor, + "ResolveOwnPropertyViaResolve", + args, + getThisObj="", + callArgs="", + ) def generate_code(self): - return CGGeneric(dedent(""" + return CGGeneric( + dedent( + """ { // Since we're dealing with an Xray, do the resolve on the // underlying object first. That gives it a chance to @@ -11782,17 +13673,22 @@ def generate_code(self): } } return self->DoResolve(cx, wrapper, id, desc); - """)) + """ + ) + ) class CGEnumerateOwnProperties(CGAbstractStaticMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'wrapper'), - Argument('JS::Handle', 'obj'), - Argument('JS::MutableHandleVector', 'props')] - CGAbstractStaticMethod.__init__(self, descriptor, - "EnumerateOwnProperties", "bool", args) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "wrapper"), + Argument("JS::Handle", "obj"), + Argument("JS::MutableHandleVector", "props"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, "EnumerateOwnProperties", "bool", args + ) def definition_body(self): return "return js::GetProxyHandler(obj)->ownPropertyKeys(cx, wrapper, props);\n" @@ -11803,18 +13699,27 @@ class CGEnumerateOwnPropertiesViaGetOwnPropertyNames(CGAbstractBindingMethod): An implementation of Xray EnumerateOwnProperties stuff for things that have a resolve hook. """ + def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'wrapper'), - Argument('JS::Handle', 'obj'), - Argument('JS::MutableHandleVector', 'props')] - CGAbstractBindingMethod.__init__(self, descriptor, - "EnumerateOwnPropertiesViaGetOwnPropertyNames", - args, getThisObj="", - callArgs="") + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "wrapper"), + Argument("JS::Handle", "obj"), + Argument("JS::MutableHandleVector", "props"), + ] + CGAbstractBindingMethod.__init__( + self, + descriptor, + "EnumerateOwnPropertiesViaGetOwnPropertyNames", + args, + getThisObj="", + callArgs="", + ) def generate_code(self): - return CGGeneric(dedent(""" + return CGGeneric( + dedent( + """ FastErrorResult rv; // This wants all own props, not just enumerable ones. self->GetOwnPropertyNames(cx, props, false, rv); @@ -11822,19 +13727,25 @@ def generate_code(self): return false; } return true; - """)) + """ + ) + ) class CGPrototypeTraitsClass(CGClass): - def __init__(self, descriptor, indent=''): - templateArgs = [Argument('prototypes::ID', 'PrototypeID')] - templateSpecialization = ['prototypes::id::' + descriptor.name] - enums = [ClassEnum('', ['Depth'], - [descriptor.interface.inheritanceDepth()])] - CGClass.__init__(self, 'PrototypeTraits', indent=indent, - templateArgs=templateArgs, - templateSpecialization=templateSpecialization, - enums=enums, isStruct=True) + def __init__(self, descriptor, indent=""): + templateArgs = [Argument("prototypes::ID", "PrototypeID")] + templateSpecialization = ["prototypes::id::" + descriptor.name] + enums = [ClassEnum("", ["Depth"], [descriptor.interface.inheritanceDepth()])] + CGClass.__init__( + self, + "PrototypeTraits", + indent=indent, + templateArgs=templateArgs, + templateSpecialization=templateSpecialization, + enums=enums, + isStruct=True, + ) def deps(self): return set() @@ -11847,12 +13758,12 @@ def __init__(self, name, isStruct=False): self.isStruct = isStruct def declare(self): - type = 'struct' if self.isStruct else 'class' - return '%s %s;\n' % (type, self.name) + type = "struct" if self.isStruct else "class" + return "%s %s;\n" % (type, self.name) def define(self): # Header only - return '' + return "" def deps(self): return set() @@ -11875,8 +13786,16 @@ class CGProxySpecialOperation(CGPerSignatureCall): caller is responsible for declaring the variable and initializing it to false. """ - def __init__(self, descriptor, operation, checkFound=True, - argumentHandleValue=None, resultVar=None, foundVar=None): + + def __init__( + self, + descriptor, + operation, + checkFound=True, + argumentHandleValue=None, + resultVar=None, + foundVar=None, + ): self.checkFound = checkFound self.foundVar = foundVar or "found" @@ -11889,32 +13808,45 @@ def __init__(self, descriptor, operation, checkFound=True, # We pass len(arguments) as the final argument so that the # CGPerSignatureCall won't do any argument conversion of its own. - CGPerSignatureCall.__init__(self, returnType, arguments, nativeName, - False, descriptor, operation, - len(arguments), resultVar=resultVar, - objectName="proxy") + CGPerSignatureCall.__init__( + self, + returnType, + arguments, + nativeName, + False, + descriptor, + operation, + len(arguments), + resultVar=resultVar, + objectName="proxy", + ) if operation.isSetter(): # arguments[0] is the index or name of the item that we're setting. argument = arguments[1] info = getJSToNativeConversionInfo( - argument.type, descriptor, - sourceDescription=("value being assigned to %s setter" % - descriptor.interface.identifier.name)) + argument.type, + descriptor, + sourceDescription=( + "value being assigned to %s setter" + % descriptor.interface.identifier.name + ), + ) if argumentHandleValue is None: argumentHandleValue = "desc.value()" rootedValue = fill( """ JS::Rooted rootedValue(cx, ${argumentHandleValue}); """, - argumentHandleValue = argumentHandleValue) + argumentHandleValue=argumentHandleValue, + ) templateValues = { "declName": argument.identifier.name, "holderName": argument.identifier.name + "_holder", "val": argumentHandleValue, "maybeMutableVal": "&rootedValue", "obj": "obj", - "passedToJSImpl": "false" + "passedToJSImpl": "false", } self.cgRoot.prepend(instantiateJSToNativeConversion(info, templateValues)) # rootedValue needs to come before the conversion, so we @@ -11927,16 +13859,23 @@ def __init__(self, descriptor, operation, checkFound=True, def getArguments(self): args = [(a, a.identifier.name) for a in self.arguments] if self.idlNode.isGetter() or self.idlNode.isDeleter(): - args.append((FakeArgument(BuiltinTypes[IDLBuiltinType.Types.boolean], - self.idlNode), - self.foundVar)) + args.append( + ( + FakeArgument( + BuiltinTypes[IDLBuiltinType.Types.boolean], self.idlNode + ), + self.foundVar, + ) + ) return args def wrap_return_value(self): if not self.idlNode.isGetter() or self.templateValues is None: return "" - wrap = CGGeneric(wrapForType(self.returnType, self.descriptor, self.templateValues)) + wrap = CGGeneric( + wrapForType(self.returnType, self.descriptor, self.templateValues) + ) if self.checkFound: wrap = CGIfWrapper(wrap, self.foundVar) else: @@ -11959,13 +13898,27 @@ class CGProxyIndexedOperation(CGProxySpecialOperation): foundVar: See the docstring for CGProxySpecialOperation. """ - def __init__(self, descriptor, name, doUnwrap=True, checkFound=True, - argumentHandleValue=None, resultVar=None, foundVar=None): + + def __init__( + self, + descriptor, + name, + doUnwrap=True, + checkFound=True, + argumentHandleValue=None, + resultVar=None, + foundVar=None, + ): self.doUnwrap = doUnwrap - CGProxySpecialOperation.__init__(self, descriptor, name, checkFound, - argumentHandleValue=argumentHandleValue, - resultVar=resultVar, - foundVar=foundVar) + CGProxySpecialOperation.__init__( + self, + descriptor, + name, + checkFound, + argumentHandleValue=argumentHandleValue, + resultVar=resultVar, + foundVar=foundVar, + ) def define(self): # Our first argument is the id we're getting. @@ -11979,8 +13932,7 @@ def define(self): unwrap = "%s* self = UnwrapProxy(proxy);\n" % self.descriptor.nativeType else: unwrap = "" - return (setIndex + unwrap + - CGProxySpecialOperation.define(self)) + return setIndex + unwrap + CGProxySpecialOperation.define(self) class CGProxyIndexedGetter(CGProxyIndexedOperation): @@ -11997,11 +13949,19 @@ class CGProxyIndexedGetter(CGProxyIndexedOperation): foundVar: See the docstring for CGProxySpecialOperation. """ - def __init__(self, descriptor, templateValues=None, doUnwrap=True, - checkFound=True, foundVar=None): + + def __init__( + self, + descriptor, + templateValues=None, + doUnwrap=True, + checkFound=True, + foundVar=None, + ): self.templateValues = templateValues - CGProxyIndexedOperation.__init__(self, descriptor, 'IndexedGetter', - doUnwrap, checkFound, foundVar=foundVar) + CGProxyIndexedOperation.__init__( + self, descriptor, "IndexedGetter", doUnwrap, checkFound, foundVar=foundVar + ) class CGProxyIndexedPresenceChecker(CGProxyIndexedGetter): @@ -12012,6 +13972,7 @@ class CGProxyIndexedPresenceChecker(CGProxyIndexedGetter): foundVar: See the docstring for CGProxySpecialOperation. """ + def __init__(self, descriptor, foundVar): CGProxyIndexedGetter.__init__(self, descriptor, foundVar=foundVar) self.cgRoot.append(CGGeneric("(void)result;\n")) @@ -12021,9 +13982,11 @@ class CGProxyIndexedSetter(CGProxyIndexedOperation): """ Class to generate a call to an indexed setter. """ + def __init__(self, descriptor, argumentHandleValue=None): - CGProxyIndexedOperation.__init__(self, descriptor, 'IndexedSetter', - argumentHandleValue=argumentHandleValue) + CGProxyIndexedOperation.__init__( + self, descriptor, "IndexedSetter", argumentHandleValue=argumentHandleValue + ) class CGProxyNamedOperation(CGProxySpecialOperation): @@ -12040,12 +14003,25 @@ class CGProxyNamedOperation(CGProxySpecialOperation): tailCode: if we end up with a non-symbol string id, run this code after we do all our other work. """ - def __init__(self, descriptor, name, value=None, argumentHandleValue=None, - resultVar=None, foundVar=None, tailCode=""): - CGProxySpecialOperation.__init__(self, descriptor, name, - argumentHandleValue=argumentHandleValue, - resultVar=resultVar, - foundVar=foundVar) + + def __init__( + self, + descriptor, + name, + value=None, + argumentHandleValue=None, + resultVar=None, + foundVar=None, + tailCode="", + ): + CGProxySpecialOperation.__init__( + self, + descriptor, + name, + argumentHandleValue=argumentHandleValue, + resultVar=resultVar, + foundVar=foundVar, + ) self.value = value self.tailCode = tailCode @@ -12070,7 +14046,8 @@ def define(self): """, nativeType=self.descriptor.nativeType, op=CGProxySpecialOperation.define(self), - tailCode=self.tailCode) + tailCode=self.tailCode, + ) if self.value is None: return fill( @@ -12087,7 +14064,8 @@ def define(self): decls=decls, idName=idName, argName=argName, - main=main) + main=main, + ) # Sadly, we have to set up nameVal even if we have an atom id, # because we don't know for sure, and we can end up needing it @@ -12108,7 +14086,8 @@ def define(self): decls=decls, value=self.value, argName=argName, - main=main) + main=main, + ) class CGProxyNamedGetter(CGProxyNamedOperation): @@ -12120,11 +14099,12 @@ class CGProxyNamedGetter(CGProxyNamedOperation): foundVar: See the docstring for CGProxySpecialOperation. """ - def __init__(self, descriptor, templateValues=None, value=None, - foundVar=None): + + def __init__(self, descriptor, templateValues=None, value=None, foundVar=None): self.templateValues = templateValues - CGProxyNamedOperation.__init__(self, descriptor, 'NamedGetter', value, - foundVar=foundVar) + CGProxyNamedOperation.__init__( + self, descriptor, "NamedGetter", value, foundVar=foundVar + ) class CGProxyNamedPresenceChecker(CGProxyNamedGetter): @@ -12135,6 +14115,7 @@ class CGProxyNamedPresenceChecker(CGProxyNamedGetter): foundVar: See the docstring for CGProxySpecialOperation. """ + def __init__(self, descriptor, foundVar=None): CGProxyNamedGetter.__init__(self, descriptor, foundVar=foundVar) self.cgRoot.append(CGGeneric("(void)result;\n")) @@ -12144,10 +14125,15 @@ class CGProxyNamedSetter(CGProxyNamedOperation): """ Class to generate a call to a named setter. """ + def __init__(self, descriptor, tailCode, argumentHandleValue=None): - CGProxyNamedOperation.__init__(self, descriptor, 'NamedSetter', - argumentHandleValue=argumentHandleValue, - tailCode=tailCode) + CGProxyNamedOperation.__init__( + self, + descriptor, + "NamedSetter", + argumentHandleValue=argumentHandleValue, + tailCode=tailCode, + ) class CGProxyNamedDeleter(CGProxyNamedOperation): @@ -12158,16 +14144,19 @@ class CGProxyNamedDeleter(CGProxyNamedOperation): foundVar: See the docstring for CGProxySpecialOperation. """ + def __init__(self, descriptor, resultVar=None, foundVar=None): - CGProxyNamedOperation.__init__(self, descriptor, 'NamedDeleter', - resultVar=resultVar, - foundVar=foundVar) + CGProxyNamedOperation.__init__( + self, descriptor, "NamedDeleter", resultVar=resultVar, foundVar=foundVar + ) class CGProxyIsProxy(CGAbstractMethod): def __init__(self, descriptor): - args = [Argument('JSObject*', 'obj')] - CGAbstractMethod.__init__(self, descriptor, "IsProxy", "bool", args, alwaysInline=True) + args = [Argument("JSObject*", "obj")] + CGAbstractMethod.__init__( + self, descriptor, "IsProxy", "bool", args, alwaysInline=True + ) def declare(self): return "" @@ -12178,8 +14167,15 @@ def definition_body(self): class CGProxyUnwrap(CGAbstractMethod): def __init__(self, descriptor): - args = [Argument('JSObject*', 'obj')] - CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy", descriptor.nativeType + '*', args, alwaysInline=True) + args = [Argument("JSObject*", "obj")] + CGAbstractMethod.__init__( + self, + descriptor, + "UnwrapProxy", + descriptor.nativeType + "*", + args, + alwaysInline=True, + ) def declare(self): return "" @@ -12195,10 +14191,13 @@ def definition_body(self): MOZ_ASSERT(IsProxy(obj)); return static_cast<${type}*>(js::GetProxyReservedSlot(obj, DOM_OBJECT_SLOT).toPrivate()); """, - type=self.descriptor.nativeType) + type=self.descriptor.nativeType, + ) + MISSING_PROP_PREF = "dom.missing_prop_counters.enabled" + def missingPropUseCountersForDescriptor(desc): if not desc.needsMissingPropUseCounters: return "" @@ -12210,7 +14209,8 @@ def missingPropUseCountersForDescriptor(desc): } """, - pref=prefIdentifier(MISSING_PROP_PREF)) + pref=prefIdentifier(MISSING_PROP_PREF), + ) def findAncestorWithInstrumentedProps(desc): @@ -12226,15 +14226,22 @@ def findAncestorWithInstrumentedProps(desc): ancestor = ancestor.parent return None + class CGCountMaybeMissingProperty(CGAbstractMethod): def __init__(self, descriptor): """ Returns whether we counted the property involved. """ - CGAbstractMethod.__init__(self, descriptor, "CountMaybeMissingProperty", - "bool", - [Argument("JS::Handle", "proxy"), - Argument("JS::Handle", "id")]) + CGAbstractMethod.__init__( + self, + descriptor, + "CountMaybeMissingProperty", + "bool", + [ + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + ], + ) def gen_switch(self, switchDecriptor): """ @@ -12250,18 +14257,21 @@ def gen_switch(self, switchDecriptor): generate nested switches) or strings to use for case bodies. """ cases = [] - for label, body in sorted(six.iteritems(switchDecriptor['cases'])): + for label, body in sorted(six.iteritems(switchDecriptor["cases"])): if isinstance(body, dict): body = self.gen_switch(body) - cases.append(fill( - """ + cases.append( + fill( + """ case ${label}: { $*{body} break; } """, - label=label, - body=body)) + label=label, + body=body, + ) + ) return fill( """ $*{precondition} @@ -12269,9 +14279,10 @@ def gen_switch(self, switchDecriptor): $*{cases} } """, - precondition=switchDecriptor['precondition'], - condition=switchDecriptor['condition'], - cases="".join(cases)) + precondition=switchDecriptor["precondition"], + condition=switchDecriptor["condition"], + cases="".join(cases), + ) def charSwitch(self, props, charIndex): """ @@ -12290,22 +14301,21 @@ def charSwitch(self, props, charIndex): } """, iface=self.descriptor.name, - name=props[0]) + name=props[0], + ) switch = dict() if charIndex == 0: - switch['precondition'] = \ - 'StringIdChars chars(nogc, str);\n' + switch["precondition"] = "StringIdChars chars(nogc, str);\n" else: - switch['precondition'] = "" + switch["precondition"] = "" # Find the first place where we might actually have a difference. - while all(prop[charIndex] == props[0][charIndex] - for prop in props): + while all(prop[charIndex] == props[0][charIndex] for prop in props): charIndex += 1 - switch['condition'] = 'chars[%d]' % charIndex - switch['cases'] = dict() + switch["condition"] = "chars[%d]" % charIndex + switch["cases"] = dict() current_props = None curChar = None idx = 0 @@ -12313,13 +14323,14 @@ def charSwitch(self, props, charIndex): nextChar = "'%s'" % props[idx][charIndex] if nextChar != curChar: if curChar: - switch['cases'][curChar] = self.charSwitch(current_props, - charIndex + 1) + switch["cases"][curChar] = self.charSwitch( + current_props, charIndex + 1 + ) current_props = [] curChar = nextChar current_props.append(props[idx]) idx += 1 - switch['cases'][curChar] = self.charSwitch(current_props, charIndex + 1) + switch["cases"][curChar] = self.charSwitch(current_props, charIndex + 1) return switch def definition_body(self): @@ -12333,7 +14344,8 @@ def definition_body(self): } """, - ancestor=ancestor.identifier.name) + ancestor=ancestor.identifier.name, + ) else: body = "" @@ -12342,17 +14354,17 @@ def definition_body(self): return body + dedent( """ return false; - """) + """ + ) lengths = set(len(prop) for prop in instrumentedProps) - switchDesc = { 'condition': 'JS::GetLinearStringLength(str)', - 'precondition': '' } - switchDesc['cases'] = dict() + switchDesc = {"condition": "JS::GetLinearStringLength(str)", "precondition": ""} + switchDesc["cases"] = dict() for length in sorted(lengths): - switchDesc['cases'][str(length)] = self.charSwitch( - list(sorted(prop for prop in instrumentedProps - if len(prop) == length)), - 0) + switchDesc["cases"][str(length)] = self.charSwitch( + list(sorted(prop for prop in instrumentedProps if len(prop) == length)), + 0, + ) return body + fill( """ @@ -12373,23 +14385,33 @@ def definition_body(self): return false; """, pref=prefIdentifier(MISSING_PROP_PREF), - switch=self.gen_switch(switchDesc)) + switch=self.gen_switch(switchDesc), + ) class CGDOMJSProxyHandler_getOwnPropDescriptor(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('bool', 'ignoreNamedProps'), - Argument('JS::MutableHandle', 'desc')] - ClassMethod.__init__(self, "getOwnPropDescriptor", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("bool", "ignoreNamedProps"), + Argument("JS::MutableHandle", "desc"), + ] + ClassMethod.__init__( + self, + "getOwnPropDescriptor", + "bool", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): - indexedGetter = self.descriptor.operations['IndexedGetter'] - indexedSetter = self.descriptor.operations['IndexedSetter'] + indexedGetter = self.descriptor.operations["IndexedGetter"] + indexedSetter = self.descriptor.operations["IndexedSetter"] if self.descriptor.isMaybeCrossOriginObject(): xrayDecl = dedent( @@ -12400,7 +14422,8 @@ def getBody(self): MOZ_ASSERT(js::IsObjectInContextCompartment(proxy, cx), "getOwnPropertyDescriptor() and set() should have dealt"); - """) + """ + ) xrayCheck = "" else: xrayDecl = "bool isXray = xpc::WrapperFactory::IsXrayWrapper(proxy);\n" @@ -12408,12 +14431,14 @@ def getBody(self): if self.descriptor.supportsIndexedProperties(): readonly = toStringBool(indexedSetter is None) - fillDescriptor = "FillPropertyDescriptor(desc, proxy, %s);\nreturn true;\n" % readonly + fillDescriptor = ( + "FillPropertyDescriptor(desc, proxy, %s);\nreturn true;\n" % readonly + ) templateValues = { - 'jsvalRef': 'desc.value()', - 'jsvalHandle': 'desc.value()', - 'obj': 'proxy', - 'successCode': fillDescriptor + "jsvalRef": "desc.value()", + "jsvalHandle": "desc.value()", + "obj": "proxy", + "successCode": fillDescriptor, } getIndexed = fill( """ @@ -12423,7 +14448,10 @@ def getBody(self): } """, - callGetter=CGProxyIndexedGetter(self.descriptor, templateValues).define()) + callGetter=CGProxyIndexedGetter( + self.descriptor, templateValues + ).define(), + ) else: getIndexed = "" @@ -12431,23 +14459,29 @@ def getBody(self): if self.descriptor.supportsNamedProperties(): operations = self.descriptor.operations - readonly = toStringBool(operations['NamedSetter'] is None) + readonly = toStringBool(operations["NamedSetter"] is None) fillDescriptor = ( "FillPropertyDescriptor(desc, proxy, %s, %s);\n" - "return true;\n" % - (readonly, - toStringBool(self.descriptor.namedPropertiesEnumerable))) - templateValues = {'jsvalRef': 'desc.value()', 'jsvalHandle': 'desc.value()', - 'obj': 'proxy', 'successCode': fillDescriptor} + "return true;\n" + % (readonly, toStringBool(self.descriptor.namedPropertiesEnumerable)) + ) + templateValues = { + "jsvalRef": "desc.value()", + "jsvalHandle": "desc.value()", + "obj": "proxy", + "successCode": fillDescriptor, + } - computeCondition = dedent(""" + computeCondition = dedent( + """ bool hasOnProto; if (!HasPropertyOnPrototype(cx, proxy, id, &hasOnProto)) { return false; } callNamedGetter = !hasOnProto; - """) - if self.descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + """ + ) + if self.descriptor.interface.getExtendedAttribute("OverrideBuiltins"): computeCondition = fill( """ if (!isXray) { @@ -12456,15 +14490,16 @@ def getBody(self): $*{hasOnProto} } """, - hasOnProto=computeCondition) + hasOnProto=computeCondition, + ) outerCondition = "!ignoreNamedProps" if self.descriptor.supportsIndexedProperties(): outerCondition = "!IsArrayIndex(index) && " + outerCondition - namedGetCode = CGProxyNamedGetter(self.descriptor, - templateValues).define() - namedGet = fill(""" + namedGetCode = CGProxyNamedGetter(self.descriptor, templateValues).define() + namedGet = fill( + """ bool callNamedGetter = false; if (${outerCondition}) { $*{computeCondition} @@ -12473,9 +14508,10 @@ def getBody(self): $*{namedGetCode} } """, - outerCondition=outerCondition, - computeCondition=computeCondition, - namedGetCode=namedGetCode) + outerCondition=outerCondition, + computeCondition=computeCondition, + namedGetCode=namedGetCode, + ) namedGet += "\n" else: namedGet = "" @@ -12505,40 +14541,54 @@ def getBody(self): xrayCheck=xrayCheck, getIndexed=getIndexed, missingPropUseCounters=missingPropUseCounters, - namedGet=namedGet) + namedGet=namedGet, + ) class CGDOMJSProxyHandler_defineProperty(ClassMethod): def __init__(self, descriptor): # The usual convention is to name the ObjectOpResult out-parameter # `result`, but that name is a bit overloaded around here. - args = [Argument('JSContext*', 'cx_'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::Handle', 'desc'), - Argument('JS::ObjectOpResult&', 'opresult'), - Argument('bool*', 'done')] - ClassMethod.__init__(self, "defineProperty", "bool", args, virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx_"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::Handle", "desc"), + Argument("JS::ObjectOpResult&", "opresult"), + Argument("bool*", "done"), + ] + ClassMethod.__init__( + self, + "defineProperty", + "bool", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): set = "" - indexedSetter = self.descriptor.operations['IndexedSetter'] + indexedSetter = self.descriptor.operations["IndexedSetter"] if indexedSetter: error_label = CGSpecializedMethod.error_reporting_label_helper( - self.descriptor, indexedSetter, isConstructor=False) + self.descriptor, indexedSetter, isConstructor=False + ) if error_label: cxDecl = fill( """ BindingCallContext cx(cx_, "${error_label}"); """, - error_label=error_label) + error_label=error_label, + ) else: cxDecl = dedent( """ JSContext* cx = cx_; - """) + """ + ) set += fill( """ uint32_t index = GetArrayIndexFromId(id); @@ -12556,7 +14606,8 @@ def getBody(self): } """, cxDecl=cxDecl, - callSetter=CGProxyIndexedSetter(self.descriptor).define()) + callSetter=CGProxyIndexedSetter(self.descriptor).define(), + ) elif self.descriptor.supportsIndexedProperties(): # We allow untrusted content to prevent Xrays from setting a # property if that property is an indexed property and we have no @@ -12569,32 +14620,39 @@ def getBody(self): *done = true; return opresult.failNoIndexedSetter(); } - """) + """ + ) - namedSetter = self.descriptor.operations['NamedSetter'] + namedSetter = self.descriptor.operations["NamedSetter"] if namedSetter: error_label = CGSpecializedMethod.error_reporting_label_helper( - self.descriptor, namedSetter, isConstructor=False) + self.descriptor, namedSetter, isConstructor=False + ) if error_label: set += fill( """ BindingCallContext cx(cx_, "${error_label}"); """, - error_label=error_label) + error_label=error_label, + ) else: set += dedent( """ JSContext* cx = cx_; - """) + """ + ) if self.descriptor.hasUnforgeableMembers: - raise TypeError("Can't handle a named setter on an interface " - "that has unforgeables. Figure out how that " - "should work!") + raise TypeError( + "Can't handle a named setter on an interface " + "that has unforgeables. Figure out how that " + "should work!" + ) tailCode = dedent( """ *done = true; return opresult.succeed(); - """) + """ + ) set += CGProxyNamedSetter(self.descriptor, tailCode).define() else: # We allow untrusted content to prevent Xrays from setting a @@ -12614,7 +14672,10 @@ def getBody(self): return opresult.failNoNamedSetter(); } """, - presenceChecker=CGProxyNamedPresenceChecker(self.descriptor, foundVar="found").define()) + presenceChecker=CGProxyNamedPresenceChecker( + self.descriptor, foundVar="found" + ).define(), + ) if self.descriptor.isMaybeCrossOriginObject(): set += dedent( """ @@ -12622,12 +14683,15 @@ def getBody(self): "Why did the MaybeCrossOriginObject defineProperty override fail?"); MOZ_ASSERT(js::IsObjectInContextCompartment(proxy, cx_), "Why did the MaybeCrossOriginObject defineProperty override fail?"); - """) + """ + ) # In all cases we want to tail-call to our base class; we can # always land here for symbols. - set += ("return mozilla::dom::DOMProxyHandler::defineProperty(%s);\n" % - ", ".join(a.name for a in self.args)) + set += ( + "return mozilla::dom::DOMProxyHandler::defineProperty(%s);\n" + % ", ".join(a.name for a in self.args) + ) return set @@ -12642,14 +14706,16 @@ def getDeleterBody(descriptor, type, foundVar=None): - own property found but can't be deleted (foundVar=true, deleteSucceeded=false) """ assert type in ("Named", "Indexed") - deleter = descriptor.operations[type + 'Deleter'] + deleter = descriptor.operations[type + "Deleter"] if deleter: assert type == "Named" assert foundVar is not None if descriptor.hasUnforgeableMembers: - raise TypeError("Can't handle a deleter on an interface " - "that has unforgeables. Figure out how " - "that should work!") + raise TypeError( + "Can't handle a deleter on an interface " + "that has unforgeables. Figure out how " + "that should work!" + ) # See if the deleter method is fallible. t = deleter.signatures()[0][0] if t.isPrimitive() and not t.nullable() and t.tag() == IDLType.Tags.bool: @@ -12662,16 +14728,19 @@ def getDeleterBody(descriptor, type, foundVar=None): deleteSucceeded = true; } """, - foundVar=foundVar) + foundVar=foundVar, + ) else: # No boolean return value: if a property is found, # deleting it always succeeds. setDS = "deleteSucceeded = true;\n" - body = (CGProxyNamedDeleter(descriptor, - resultVar="deleteSucceeded", - foundVar=foundVar).define() + - setDS) + body = ( + CGProxyNamedDeleter( + descriptor, resultVar="deleteSucceeded", foundVar=foundVar + ).define() + + setDS + ) elif getattr(descriptor, "supports%sProperties" % type)(): presenceCheckerClass = globals()["CGProxy%sPresenceChecker" % type] foundDecl = "" @@ -12685,8 +14754,11 @@ def getDeleterBody(descriptor, type, foundVar=None): deleteSucceeded = !${foundVar}; """, foundDecl=foundDecl, - presenceChecker=presenceCheckerClass(descriptor, foundVar=foundVar).define(), - foundVar=foundVar) + presenceChecker=presenceCheckerClass( + descriptor, foundVar=foundVar + ).define(), + foundVar=foundVar, + ) else: body = None return body @@ -12694,13 +14766,16 @@ def getDeleterBody(descriptor, type, foundVar=None): class CGDeleteNamedProperty(CGAbstractStaticMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'xray'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::ObjectOpResult&', 'opresult')] - CGAbstractStaticMethod.__init__(self, descriptor, "DeleteNamedProperty", - "bool", args) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "xray"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::ObjectOpResult&", "opresult"), + ] + CGAbstractStaticMethod.__init__( + self, descriptor, "DeleteNamedProperty", "bool", args + ) def definition_body(self): return fill( @@ -12717,25 +14792,31 @@ def definition_body(self): } return opresult.failCantDelete(); """, - namedBody=getDeleterBody(self.descriptor, "Named", foundVar="found")) + namedBody=getDeleterBody(self.descriptor, "Named", foundVar="found"), + ) class CGDOMJSProxyHandler_delete(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::ObjectOpResult&', 'opresult')] - ClassMethod.__init__(self, "delete_", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::ObjectOpResult&", "opresult"), + ] + ClassMethod.__init__( + self, "delete_", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): - delete = dedent(""" + delete = dedent( + """ MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy), "Should not have a XrayWrapper here"); - """) + """ + ) if self.descriptor.isMaybeCrossOriginObject(): delete += dedent( @@ -12747,12 +14828,13 @@ def getBody(self): // Safe to enter the Realm of proxy now. JSAutoRealm ar(cx, proxy); JS_MarkCrossZoneId(cx, id); - """) + """ + ) indexedBody = getDeleterBody(self.descriptor, "Indexed") if indexedBody is not None: # Can't handle cross-origin objects here. - assert not self.descriptor.isMaybeCrossOriginObject(); + assert not self.descriptor.isMaybeCrossOriginObject() delete += fill( """ uint32_t index = GetArrayIndexFromId(id); @@ -12762,7 +14844,8 @@ def getBody(self): return deleteSucceeded ? opresult.succeed() : opresult.failCantDelete(); } """, - indexedBody=indexedBody) + indexedBody=indexedBody, + ) namedBody = getDeleterBody(self.descriptor, "Named", foundVar="found") if namedBody is not None: @@ -12781,9 +14864,10 @@ def getBody(self): tryNamedDelete = !hasProp; } } - """) + """ + ) - if not self.descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + if not self.descriptor.interface.getExtendedAttribute("OverrideBuiltins"): delete += dedent( """ if (tryNamedDelete) { @@ -12793,7 +14877,8 @@ def getBody(self): } tryNamedDelete = !hasOnProto; } - """) + """ + ) # We always return above for an index id in the case when we support # indexed properties, so we can just treat the id as a name @@ -12809,24 +14894,33 @@ def getBody(self): } } """, - namedBody=namedBody) + namedBody=namedBody, + ) - delete += dedent(""" + delete += dedent( + """ return dom::DOMProxyHandler::delete_(cx, proxy, id, opresult); - """) + """ + ) return delete class CGDOMJSProxyHandler_ownPropNames(ClassMethod): - def __init__(self, descriptor, ): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('unsigned', 'flags'), - Argument('JS::MutableHandleVector', 'props')] - ClassMethod.__init__(self, "ownPropNames", "bool", args, - virtual=True, override=True, const=True) + def __init__( + self, + descriptor, + ): + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("unsigned", "flags"), + Argument("JS::MutableHandleVector", "props"), + ] + ClassMethod.__init__( + self, "ownPropNames", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): @@ -12852,7 +14946,8 @@ def getBody(self): return xpc::AppendCrossOriginWhitelistedPropNames(cx, props); } - """) + """ + ) xrayCheck = "" else: xrayDecl = "bool isXray = xpc::WrapperFactory::IsXrayWrapper(proxy);\n" @@ -12875,12 +14970,13 @@ def getBody(self): } } """, - callerType=callerType) + callerType=callerType, + ) else: addIndices = "" if self.descriptor.supportsNamedProperties(): - if self.descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + if self.descriptor.interface.getExtendedAttribute("OverrideBuiltins"): shadow = "!isXray" else: shadow = "false" @@ -12899,10 +14995,12 @@ def getBody(self): } """, callerType=callerType, - shadow=shadow) + shadow=shadow, + ) if not self.descriptor.namedPropertiesEnumerable: - addNames = CGIfWrapper(CGGeneric(addNames), - "flags & JSITER_HIDDEN").define() + addNames = CGIfWrapper( + CGGeneric(addNames), "flags & JSITER_HIDDEN" + ).define() addNames = "\n" + addNames else: addNames = "" @@ -12915,7 +15013,8 @@ def getBody(self): return false; } """, - xrayCheck=xrayCheck) + xrayCheck=xrayCheck, + ) if self.descriptor.isMaybeCrossOriginObject(): # We need to enter our compartment (which we might not be @@ -12931,7 +15030,8 @@ def getBody(self): JS_MarkCrossZoneId(cx, id); } """, - addExpandoProps=addExpandoProps) + addExpandoProps=addExpandoProps, + ) return fill( """ @@ -12946,17 +15046,21 @@ def getBody(self): xrayDecl=xrayDecl, addIndices=addIndices, addNames=addNames, - addExpandoProps=addExpandoProps) + addExpandoProps=addExpandoProps, + ) class CGDOMJSProxyHandler_hasOwn(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('bool*', 'bp')] - ClassMethod.__init__(self, "hasOwn", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("bool*", "bp"), + ] + ClassMethod.__init__( + self, "hasOwn", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): @@ -12973,7 +15077,8 @@ def getBody(self): // Now safe to enter the Realm of proxy and do the rest of the work there. JSAutoRealm ar(cx, proxy); JS_MarkCrossZoneId(cx, id); - """) + """ + ) else: maybeCrossOrigin = "" @@ -12990,7 +15095,10 @@ def getBody(self): } """, - presenceChecker=CGProxyIndexedPresenceChecker(self.descriptor, foundVar="found").define()) + presenceChecker=CGProxyIndexedPresenceChecker( + self.descriptor, foundVar="found" + ).define(), + ) else: indexed = "" @@ -13004,8 +15112,11 @@ def getBody(self): *bp = found; """, - presenceChecker=CGProxyNamedPresenceChecker(self.descriptor, foundVar="found").define()) - if not self.descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + presenceChecker=CGProxyNamedPresenceChecker( + self.descriptor, foundVar="found" + ).define(), + ) + if not self.descriptor.interface.getExtendedAttribute("OverrideBuiltins"): named = fill( """ bool hasOnProto; @@ -13017,7 +15128,8 @@ def getBody(self): return true; } """, - protoLacksProperty=named) + protoLacksProperty=named, + ) named += "*bp = false;\n" else: named += "\n" @@ -13050,18 +15162,22 @@ def getBody(self): maybeCrossOrigin=maybeCrossOrigin, indexed=indexed, missingPropUseCounters=missingPropUseCounters, - named=named) + named=named, + ) class CGDOMJSProxyHandler_get(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'receiver'), - Argument('JS::Handle', 'id'), - Argument('JS::MutableHandle', 'vp')] - ClassMethod.__init__(self, "get", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "receiver"), + Argument("JS::Handle", "id"), + Argument("JS::MutableHandle", "vp"), + ] + ClassMethod.__init__( + self, "get", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): @@ -13072,13 +15188,15 @@ def getBody(self): return CrossOriginGet(cx, proxy, receiver, id, vp); } - """) + """ + ) else: maybeCrossOriginGet = "" missingPropUseCounters = missingPropUseCountersForDescriptor(self.descriptor) - getUnforgeableOrExpando = dedent(""" + getUnforgeableOrExpando = dedent( + """ { // Scope for expando JS::Rooted expando(cx, DOMProxyHandler::GetExpandoObject(proxy)); if (expando) { @@ -13095,7 +15213,8 @@ def getBody(self): } } } - """) + """ + ) if self.descriptor.isMaybeCrossOriginObject(): getUnforgeableOrExpando = fill( @@ -13113,7 +15232,8 @@ def getBody(self): return MaybeWrapValue(cx, vp); } """, - getUnforgeableOrExpando=getUnforgeableOrExpando) + getUnforgeableOrExpando=getUnforgeableOrExpando, + ) else: getUnforgeableOrExpando = fill( """ @@ -13123,16 +15243,18 @@ def getBody(self): return true; } """, - getUnforgeableOrExpando=getUnforgeableOrExpando) + getUnforgeableOrExpando=getUnforgeableOrExpando, + ) getUnforgeableOrExpando = fill( """ bool expandoHasProp = false; $*{getUnforgeableOrExpando} """, - getUnforgeableOrExpando=getUnforgeableOrExpando) + getUnforgeableOrExpando=getUnforgeableOrExpando, + ) - templateValues = {'jsvalRef': 'vp', 'jsvalHandle': 'vp', 'obj': 'proxy'} + templateValues = {"jsvalRef": "vp", "jsvalHandle": "vp", "obj": "proxy"} if self.descriptor.supportsIndexedProperties(): # We can't handle this for cross-origin objects @@ -13149,8 +15271,11 @@ def getBody(self): $*{getUnforgeableOrExpando} } """, - callGetter=CGProxyIndexedGetter(self.descriptor, templateValues).define(), - getUnforgeableOrExpando=getUnforgeableOrExpando) + callGetter=CGProxyIndexedGetter( + self.descriptor, templateValues + ).define(), + getUnforgeableOrExpando=getUnforgeableOrExpando, + ) else: getIndexedOrExpando = getUnforgeableOrExpando @@ -13165,12 +15290,14 @@ def getBody(self): else: getNamed = "" - getOnPrototype = dedent(""" + getOnPrototype = dedent( + """ if (!GetPropertyOnPrototype(cx, proxy, rootedReceiver, id, &foundOnPrototype, vp)) { return false; } - """) + """ + ) if self.descriptor.isMaybeCrossOriginObject(): getOnPrototype = fill( @@ -13190,7 +15317,8 @@ def getBody(self): } """, - getOnPrototype=getOnPrototype) + getOnPrototype=getOnPrototype, + ) else: getOnPrototype = fill( """ @@ -13201,9 +15329,10 @@ def getBody(self): } """, - getOnPrototype=getOnPrototype) + getOnPrototype=getOnPrototype, + ) - if self.descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + if self.descriptor.interface.getExtendedAttribute("OverrideBuiltins"): getNamed = getNamed + getOnPrototype else: getNamed = getOnPrototype + getNamed @@ -13226,55 +15355,77 @@ def getBody(self): maybeCrossOriginGet=maybeCrossOriginGet, missingPropUseCounters=missingPropUseCounters, indexedOrExpando=getIndexedOrExpando, - named=getNamed) + named=getNamed, + ) class CGDOMJSProxyHandler_setCustom(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx_'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::Handle', 'v'), - Argument('bool*', 'done')] - ClassMethod.__init__(self, "setCustom", "bool", args, virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx_"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::Handle", "v"), + Argument("bool*", "done"), + ] + ClassMethod.__init__( + self, "setCustom", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): - assertion = ("MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),\n" - ' "Should not have a XrayWrapper here");\n') + assertion = ( + "MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),\n" + ' "Should not have a XrayWrapper here");\n' + ) # Correctness first. If we have a NamedSetter and [OverrideBuiltins], # always call the NamedSetter and never do anything else. - namedSetter = self.descriptor.operations['NamedSetter'] - if (namedSetter is not None and - self.descriptor.interface.getExtendedAttribute('OverrideBuiltins')): + namedSetter = self.descriptor.operations["NamedSetter"] + if namedSetter is not None and self.descriptor.interface.getExtendedAttribute( + "OverrideBuiltins" + ): # Check assumptions. if self.descriptor.supportsIndexedProperties(): - raise ValueError("In interface " + self.descriptor.name + ": " + - "Can't cope with [OverrideBuiltins] and an indexed getter") + raise ValueError( + "In interface " + + self.descriptor.name + + ": " + + "Can't cope with [OverrideBuiltins] and an indexed getter" + ) if self.descriptor.hasUnforgeableMembers: - raise ValueError("In interface " + self.descriptor.name + ": " + - "Can't cope with [OverrideBuiltins] and unforgeable members") + raise ValueError( + "In interface " + + self.descriptor.name + + ": " + + "Can't cope with [OverrideBuiltins] and unforgeable members" + ) tailCode = dedent( """ *done = true; return true; - """) - callSetter = CGProxyNamedSetter(self.descriptor, tailCode, argumentHandleValue="v") + """ + ) + callSetter = CGProxyNamedSetter( + self.descriptor, tailCode, argumentHandleValue="v" + ) error_label = CGSpecializedMethod.error_reporting_label_helper( - self.descriptor, namedSetter, isConstructor=False) + self.descriptor, namedSetter, isConstructor=False + ) if error_label: cxDecl = fill( """ BindingCallContext cx(cx_, "${error_label}"); """, - error_label=error_label) + error_label=error_label, + ) else: cxDecl = dedent( """ JSContext* cx = cx_; - """) + """ + ) return fill( """ $*{assertion} @@ -13285,25 +15436,29 @@ def getBody(self): """, assertion=assertion, cxDecl=cxDecl, - callSetter=callSetter.define()) + callSetter=callSetter.define(), + ) # As an optimization, if we are going to call an IndexedSetter, go # ahead and call it and have done. - indexedSetter = self.descriptor.operations['IndexedSetter'] + indexedSetter = self.descriptor.operations["IndexedSetter"] if indexedSetter is not None: error_label = CGSpecializedMethod.error_reporting_label_helper( - self.descriptor, indexedSetter, isConstructor=False) + self.descriptor, indexedSetter, isConstructor=False + ) if error_label: cxDecl = fill( """ BindingCallContext cx(cx_, "${error_label}"); """, - error_label=error_label) + error_label=error_label, + ) else: cxDecl = dedent( """ JSContext* cx = cx_; - """) + """ + ) setIndexed = fill( """ uint32_t index = GetArrayIndexFromId(id); @@ -13316,23 +15471,31 @@ def getBody(self): """, cxDecl=cxDecl, - callSetter=CGProxyIndexedSetter(self.descriptor, - argumentHandleValue="v").define()) + callSetter=CGProxyIndexedSetter( + self.descriptor, argumentHandleValue="v" + ).define(), + ) else: setIndexed = "" - return (assertion + - setIndexed + - "*done = false;\n" - "return true;\n") + return assertion + setIndexed + "*done = false;\n" "return true;\n" class CGDOMJSProxyHandler_className(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy')] - ClassMethod.__init__(self, "className", "const char*", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + ] + ClassMethod.__init__( + self, + "className", + "const char*", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): @@ -13343,7 +15506,8 @@ def getBody(self): return "Object"; } - """) + """ + ) else: crossOrigin = "" return fill( @@ -13352,14 +15516,22 @@ def getBody(self): return "${name}"; """, crossOrigin=crossOrigin, - name=self.descriptor.name) + name=self.descriptor.name, + ) class CGDOMJSProxyHandler_finalizeInBackground(ClassMethod): def __init__(self, descriptor): - args = [Argument('const JS::Value&', 'priv')] - ClassMethod.__init__(self, "finalizeInBackground", "bool", args, - virtual=True, override=True, const=True) + args = [Argument("const JS::Value&", "priv")] + ClassMethod.__init__( + self, + "finalizeInBackground", + "bool", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): @@ -13368,42 +15540,58 @@ def getBody(self): class CGDOMJSProxyHandler_finalize(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSFreeOp*', 'fop'), Argument('JSObject*', 'proxy')] - ClassMethod.__init__(self, "finalize", "void", args, - virtual=True, override=True, const=True) + args = [Argument("JSFreeOp*", "fop"), Argument("JSObject*", "proxy")] + ClassMethod.__init__( + self, "finalize", "void", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): - return (("%s* self = UnwrapPossiblyNotInitializedDOMObject<%s>(proxy);\n" % - (self.descriptor.nativeType, self.descriptor.nativeType)) + - finalizeHook(self.descriptor, FINALIZE_HOOK_NAME, - self.args[0].name, self.args[1].name).define()) + return ( + "%s* self = UnwrapPossiblyNotInitializedDOMObject<%s>(proxy);\n" + % (self.descriptor.nativeType, self.descriptor.nativeType) + ) + finalizeHook( + self.descriptor, + FINALIZE_HOOK_NAME, + self.args[0].name, + self.args[1].name, + ).define() class CGDOMJSProxyHandler_objectMoved(ClassMethod): def __init__(self, descriptor): - args = [Argument('JSObject*', 'obj'), Argument('JSObject*', 'old')] - ClassMethod.__init__(self, "objectMoved", "size_t", args, - virtual=True, override=True, const=True) + args = [Argument("JSObject*", "obj"), Argument("JSObject*", "old")] + ClassMethod.__init__( + self, "objectMoved", "size_t", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): - return (("%s* self = UnwrapPossiblyNotInitializedDOMObject<%s>(obj);\n" % - (self.descriptor.nativeType, self.descriptor.nativeType)) + - objectMovedHook(self.descriptor, OBJECT_MOVED_HOOK_NAME, - self.args[0].name, self.args[1].name)) + return ( + "%s* self = UnwrapPossiblyNotInitializedDOMObject<%s>(obj);\n" + % (self.descriptor.nativeType, self.descriptor.nativeType) + ) + objectMovedHook( + self.descriptor, + OBJECT_MOVED_HOOK_NAME, + self.args[0].name, + self.args[1].name, + ) class CGDOMJSProxyHandler_getElements(ClassMethod): def __init__(self, descriptor): assert descriptor.supportsIndexedProperties() - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('uint32_t', 'begin'), - Argument('uint32_t', 'end'), - Argument('js::ElementAdder*', 'adder')] - ClassMethod.__init__(self, "getElements", "bool", args, virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("uint32_t", "begin"), + Argument("uint32_t", "end"), + Argument("js::ElementAdder*", "adder"), + ] + ClassMethod.__init__( + self, "getElements", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): @@ -13413,13 +15601,16 @@ def getBody(self): # forward on to our ancestor. templateValues = { - 'jsvalRef': 'temp', - 'jsvalHandle': '&temp', - 'obj': 'proxy', - 'successCode': ("if (!adder->append(cx, temp)) return false;\n" - "continue;\n") + "jsvalRef": "temp", + "jsvalHandle": "&temp", + "obj": "proxy", + "successCode": ( + "if (!adder->append(cx, temp)) return false;\n" "continue;\n" + ), } - get = CGProxyIndexedGetter(self.descriptor, templateValues, False, False).define() + get = CGProxyIndexedGetter( + self.descriptor, templateValues, False, False + ).define() if self.descriptor.lengthNeedsCallerType(): callerType = callerTypeGetterForDescriptor(self.descriptor) @@ -13453,46 +15644,64 @@ def getBody(self): """, nativeType=self.descriptor.nativeType, callerType=callerType, - get=get) + get=get, + ) class CGDOMJSProxyHandler_getInstance(ClassMethod): def __init__(self): - ClassMethod.__init__(self, "getInstance", "const DOMProxyHandler*", [], static=True) + ClassMethod.__init__( + self, "getInstance", "const DOMProxyHandler*", [], static=True + ) def getBody(self): - return dedent(""" + return dedent( + """ static const DOMProxyHandler instance; return &instance; - """) + """ + ) class CGDOMJSProxyHandler_call(ClassMethod): def __init__(self): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('const JS::CallArgs&', 'args')] + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("const JS::CallArgs&", "args"), + ] - ClassMethod.__init__(self, "call", "bool", args, virtual=True, override=True, const=True) + ClassMethod.__init__( + self, "call", "bool", args, virtual=True, override=True, const=True + ) def getBody(self): return fill( """ return js::ForwardToNative(cx, ${legacyCaller}, args); """, - legacyCaller=LEGACYCALLER_HOOK_NAME) + legacyCaller=LEGACYCALLER_HOOK_NAME, + ) class CGDOMJSProxyHandler_isCallable(ClassMethod): def __init__(self): - ClassMethod.__init__(self, "isCallable", "bool", - [Argument('JSObject*', 'obj')], - virtual=True, override=True, const=True) + ClassMethod.__init__( + self, + "isCallable", + "bool", + [Argument("JSObject*", "obj")], + virtual=True, + override=True, + const=True, + ) def getBody(self): - return dedent(""" + return dedent( + """ return true; - """) + """ + ) class CGDOMJSProxyHandler_canNurseryAllocate(ClassMethod): @@ -13500,15 +15709,24 @@ class CGDOMJSProxyHandler_canNurseryAllocate(ClassMethod): Override the default canNurseryAllocate in BaseProxyHandler, for cases when we should be nursery-allocated. """ + def __init__(self): - ClassMethod.__init__(self, "canNurseryAllocate", "bool", - [], - virtual=True, override=True, const=True) + ClassMethod.__init__( + self, + "canNurseryAllocate", + "bool", + [], + virtual=True, + override=True, + const=True, + ) def getBody(self): - return dedent(""" + return dedent( + """ return true; - """) + """ + ) class CGDOMJSProxyHandler_getOwnPropertyDescriptor(ClassMethod): @@ -13516,15 +15734,25 @@ class CGDOMJSProxyHandler_getOwnPropertyDescriptor(ClassMethod): Implementation of getOwnPropertyDescriptor. We only use this for cross-origin objects. """ + def __init__(self, descriptor): assert descriptor.isMaybeCrossOriginObject() - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::MutableHandle', 'desc')] - ClassMethod.__init__(self, "getOwnPropertyDescriptor", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::MutableHandle", "desc"), + ] + ClassMethod.__init__( + self, + "getOwnPropertyDescriptor", + "bool", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): @@ -13565,7 +15793,8 @@ def getBody(self): // And step 4. return CrossOriginPropertyFallback(cx, proxy, id, desc); - """) + """ + ) class CGDOMJSProxyHandler_getSameOriginPrototype(ClassMethod): @@ -13573,19 +15802,28 @@ class CGDOMJSProxyHandler_getSameOriginPrototype(ClassMethod): Implementation of getSameOriginPrototype. We only use this for cross-origin objects. """ + def __init__(self, descriptor): assert descriptor.isMaybeCrossOriginObject() - args = [Argument('JSContext*', 'cx')] - ClassMethod.__init__(self, "getSameOriginPrototype", "JSObject*", args, - virtual=True, override=True, const=True) + args = [Argument("JSContext*", "cx")] + ClassMethod.__init__( + self, + "getSameOriginPrototype", + "JSObject*", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): return dedent( """ return GetProtoObjectHandle(cx); - """) + """ + ) class CGDOMJSProxyHandler_definePropertySameOrigin(ClassMethod): @@ -13593,40 +15831,55 @@ class CGDOMJSProxyHandler_definePropertySameOrigin(ClassMethod): Implementation of definePropertySameOrigin. We only use this for cross-origin objects. """ + def __init__(self, descriptor): assert descriptor.isMaybeCrossOriginObject() - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::Handle', 'desc'), - Argument('JS::ObjectOpResult&', 'result')] - ClassMethod.__init__(self, "definePropertySameOrigin", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::Handle", "desc"), + Argument("JS::ObjectOpResult&", "result"), + ] + ClassMethod.__init__( + self, + "definePropertySameOrigin", + "bool", + args, + virtual=True, + override=True, + const=True, + ) self.descriptor = descriptor def getBody(self): return dedent( """ return dom::DOMProxyHandler::defineProperty(cx, proxy, id, desc, result); - """) + """ + ) class CGDOMJSProxyHandler_set(ClassMethod): """ Implementation of set(). We only use this for cross-origin objects. """ + def __init__(self, descriptor): assert descriptor.isMaybeCrossOriginObject() - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::Handle', 'id'), - Argument('JS::Handle', 'v'), - Argument('JS::Handle', 'receiver'), - Argument('JS::ObjectOpResult&', 'result')] - ClassMethod.__init__(self, "set", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::Handle", "id"), + Argument("JS::Handle", "v"), + Argument("JS::Handle", "receiver"), + Argument("JS::ObjectOpResult&", "result"), + ] + ClassMethod.__init__( + self, "set", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): @@ -13651,19 +15904,24 @@ def getBody(self): JS_MarkCrossZoneId(cx, id); return dom::DOMProxyHandler::set(cx, proxy, id, wrappedValue, wrappedReceiver, result); - """) + """ + ) class CGDOMJSProxyHandler_EnsureHolder(ClassMethod): """ Implementation of EnsureHolder(). We only use this for cross-origin objects. """ + def __init__(self, descriptor): - args = [Argument('JSContext*', 'cx'), - Argument('JS::Handle', 'proxy'), - Argument('JS::MutableHandle', 'holder')] - ClassMethod.__init__(self, "EnsureHolder", "bool", args, - virtual=True, override=True, const=True) + args = [ + Argument("JSContext*", "cx"), + Argument("JS::Handle", "proxy"), + Argument("JS::MutableHandle", "holder"), + ] + ClassMethod.__init__( + self, "EnsureHolder", "bool", args, virtual=True, override=True, const=True + ) self.descriptor = descriptor def getBody(self): @@ -13672,80 +15930,90 @@ def getBody(self): return EnsureHolder(cx, proxy, JSCLASS_RESERVED_SLOTS(JS::GetClass(proxy)) - 1, sCrossOriginProperties, holder); - """) + """ + ) class CGDOMJSProxyHandler(CGClass): def __init__(self, descriptor): - assert (descriptor.supportsIndexedProperties() or - descriptor.supportsNamedProperties() or - descriptor.isMaybeCrossOriginObject()) - methods = [CGDOMJSProxyHandler_getOwnPropDescriptor(descriptor), - CGDOMJSProxyHandler_defineProperty(descriptor), - ClassUsingDeclaration("mozilla::dom::DOMProxyHandler", - "defineProperty"), - CGDOMJSProxyHandler_ownPropNames(descriptor), - CGDOMJSProxyHandler_hasOwn(descriptor), - CGDOMJSProxyHandler_get(descriptor), - CGDOMJSProxyHandler_className(descriptor), - CGDOMJSProxyHandler_finalizeInBackground(descriptor), - CGDOMJSProxyHandler_finalize(descriptor), - CGDOMJSProxyHandler_getInstance(), - CGDOMJSProxyHandler_delete(descriptor)] + assert ( + descriptor.supportsIndexedProperties() + or descriptor.supportsNamedProperties() + or descriptor.isMaybeCrossOriginObject() + ) + methods = [ + CGDOMJSProxyHandler_getOwnPropDescriptor(descriptor), + CGDOMJSProxyHandler_defineProperty(descriptor), + ClassUsingDeclaration("mozilla::dom::DOMProxyHandler", "defineProperty"), + CGDOMJSProxyHandler_ownPropNames(descriptor), + CGDOMJSProxyHandler_hasOwn(descriptor), + CGDOMJSProxyHandler_get(descriptor), + CGDOMJSProxyHandler_className(descriptor), + CGDOMJSProxyHandler_finalizeInBackground(descriptor), + CGDOMJSProxyHandler_finalize(descriptor), + CGDOMJSProxyHandler_getInstance(), + CGDOMJSProxyHandler_delete(descriptor), + ] constructors = [ - ClassConstructor( - [], - constexpr=True, - visibility="public", - explicit=True) + ClassConstructor([], constexpr=True, visibility="public", explicit=True) ] if descriptor.supportsIndexedProperties(): methods.append(CGDOMJSProxyHandler_getElements(descriptor)) - if (descriptor.operations['IndexedSetter'] is not None or - (descriptor.operations['NamedSetter'] is not None and - descriptor.interface.getExtendedAttribute('OverrideBuiltins'))): + if descriptor.operations["IndexedSetter"] is not None or ( + descriptor.operations["NamedSetter"] is not None + and descriptor.interface.getExtendedAttribute("OverrideBuiltins") + ): methods.append(CGDOMJSProxyHandler_setCustom(descriptor)) - if descriptor.operations['LegacyCaller']: + if descriptor.operations["LegacyCaller"]: methods.append(CGDOMJSProxyHandler_call()) methods.append(CGDOMJSProxyHandler_isCallable()) if descriptor.interface.hasProbablyShortLivingWrapper(): if not descriptor.wrapperCache: - raise TypeError("Need a wrapper cache to support nursery " - "allocation of DOM objects") + raise TypeError( + "Need a wrapper cache to support nursery " + "allocation of DOM objects" + ) methods.append(CGDOMJSProxyHandler_canNurseryAllocate()) if descriptor.wrapperCache: methods.append(CGDOMJSProxyHandler_objectMoved(descriptor)) if descriptor.isMaybeCrossOriginObject(): - methods.extend([ - CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor), - CGDOMJSProxyHandler_getSameOriginPrototype(descriptor), - CGDOMJSProxyHandler_definePropertySameOrigin(descriptor), - CGDOMJSProxyHandler_set(descriptor), - CGDOMJSProxyHandler_EnsureHolder(descriptor), - ClassUsingDeclaration("MaybeCrossOriginObjectMixins", - "EnsureHolder") - ]) - - if descriptor.interface.getExtendedAttribute('OverrideBuiltins'): + methods.extend( + [ + CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor), + CGDOMJSProxyHandler_getSameOriginPrototype(descriptor), + CGDOMJSProxyHandler_definePropertySameOrigin(descriptor), + CGDOMJSProxyHandler_set(descriptor), + CGDOMJSProxyHandler_EnsureHolder(descriptor), + ClassUsingDeclaration( + "MaybeCrossOriginObjectMixins", "EnsureHolder" + ), + ] + ) + + if descriptor.interface.getExtendedAttribute("OverrideBuiltins"): assert not descriptor.isMaybeCrossOriginObject() - parentClass = 'ShadowingDOMProxyHandler' + parentClass = "ShadowingDOMProxyHandler" elif descriptor.isMaybeCrossOriginObject(): - parentClass = 'MaybeCrossOriginObject' + parentClass = "MaybeCrossOriginObject" else: - parentClass = 'mozilla::dom::DOMProxyHandler' + parentClass = "mozilla::dom::DOMProxyHandler" - CGClass.__init__(self, 'DOMProxyHandler', - bases=[ClassBase(parentClass)], - constructors=constructors, - methods=methods) + CGClass.__init__( + self, + "DOMProxyHandler", + bases=[ClassBase(parentClass)], + constructors=constructors, + methods=methods, + ) class CGDOMJSProxyHandlerDeclarer(CGThing): """ A class for declaring a DOMProxyHandler. """ + def __init__(self, handlerThing): self.handlerThing = handlerThing @@ -13761,6 +16029,7 @@ class CGDOMJSProxyHandlerDefiner(CGThing): """ A class for defining a DOMProxyHandler. """ + def __init__(self, handlerThing): self.handlerThing = handlerThing @@ -13772,9 +16041,9 @@ def define(self): def stripTrailingWhitespace(text): - tail = '\n' if text.endswith('\n') else '' + tail = "\n" if text.endswith("\n") else "" lines = text.splitlines() - return '\n'.join(line.rstrip() for line in lines) + tail + return "\n".join(line.rstrip() for line in lines) + tail class MemberProperties: @@ -13787,7 +16056,7 @@ def __init__(self): def memberProperties(m, descriptor): props = MemberProperties() if m.isMethod(): - if (not m.isIdentifierLess() or m == descriptor.operations['Stringifier']): + if not m.isIdentifierLess() or m == descriptor.operations["Stringifier"]: if not m.isStatic() and descriptor.interface.hasInterfacePrototypeObject(): if m.getExtendedAttribute("CrossOriginCallable"): props.isCrossOriginMethod = True @@ -13802,8 +16071,9 @@ def memberProperties(m, descriptor): elif m.getExtendedAttribute("PutForwards"): if m.getExtendedAttribute("CrossOriginWritable"): props.isCrossOriginSetter = True - elif (m.getExtendedAttribute("Replaceable") or - m.getExtendedAttribute("LenientSetter")): + elif m.getExtendedAttribute("Replaceable") or m.getExtendedAttribute( + "LenientSetter" + ): if m.getExtendedAttribute("CrossOriginWritable"): props.isCrossOriginSetter = True @@ -13814,27 +16084,34 @@ class CGDescriptor(CGThing): def __init__(self, descriptor): CGThing.__init__(self) - assert not descriptor.concrete or descriptor.interface.hasInterfacePrototypeObject() + assert ( + not descriptor.concrete + or descriptor.interface.hasInterfacePrototypeObject() + ) self._deps = descriptor.interface.getDeps() cgThings = [] - cgThings.append(CGGeneric(declare="typedef %s NativeType;\n" % - descriptor.nativeType)) + cgThings.append( + CGGeneric(declare="typedef %s NativeType;\n" % descriptor.nativeType) + ) parent = descriptor.interface.parent if parent: - cgThings.append(CGGeneric("static_assert(IsRefcounted::value == IsRefcounted<%s::NativeType>::value,\n" - " \"Can't inherit from an interface with a different ownership model.\");\n" % - toBindingNamespace(descriptor.parentPrototypeName))) + cgThings.append( + CGGeneric( + "static_assert(IsRefcounted::value == IsRefcounted<%s::NativeType>::value,\n" + ' "Can\'t inherit from an interface with a different ownership model.");\n' + % toBindingNamespace(descriptor.parentPrototypeName) + ) + ) defaultToJSONMethod = None needCrossOriginPropertyArrays = False unscopableNames = list() for n in descriptor.interface.namedConstructors: - cgThings.append(CGClassConstructor(descriptor, n, - NamedConstructorName(n))) + cgThings.append(CGClassConstructor(descriptor, n, NamedConstructorName(n))) for m in descriptor.interface.members: - if m.isMethod() and m.identifier.name == 'QueryInterface': + if m.isMethod() and m.identifier.name == "QueryInterface": continue props = memberProperties(m, descriptor) @@ -13845,7 +16122,10 @@ def __init__(self, descriptor): unscopableNames.append(m.identifier.name) if m.isDefaultToJSON(): defaultToJSONMethod = m - elif not m.isIdentifierLess() or m == descriptor.operations['Stringifier']: + elif ( + not m.isIdentifierLess() + or m == descriptor.operations["Stringifier"] + ): if m.isStatic(): assert descriptor.interface.hasInterfaceObject() cgThings.append(CGStaticMethod(descriptor, m)) @@ -13855,7 +16135,9 @@ def __init__(self, descriptor): specializedMethod = CGSpecializedMethod(descriptor, m) cgThings.append(specializedMethod) if m.returnsPromise(): - cgThings.append(CGMethodPromiseWrapper(descriptor, specializedMethod)) + cgThings.append( + CGMethodPromiseWrapper(descriptor, specializedMethod) + ) cgThings.append(CGMemberJITInfo(descriptor, m)) if props.isCrossOriginMethod: needCrossOriginPropertyArrays = True @@ -13874,7 +16156,9 @@ def __init__(self, descriptor): specializedGetter = CGSpecializedGetter(descriptor, m) cgThings.append(specializedGetter) if m.type.isPromise(): - cgThings.append(CGGetterPromiseWrapper(descriptor, specializedGetter)) + cgThings.append( + CGGetterPromiseWrapper(descriptor, specializedGetter) + ) if props.isCrossOriginGetter: needCrossOriginPropertyArrays = True if not m.readonly: @@ -13893,8 +16177,10 @@ def __init__(self, descriptor): cgThings.append(CGSpecializedReplaceableSetter(descriptor, m)) elif m.getExtendedAttribute("LenientSetter"): cgThings.append(CGSpecializedLenientSetter(descriptor, m)) - if (not m.isStatic() and - descriptor.interface.hasInterfacePrototypeObject()): + if ( + not m.isStatic() + and descriptor.interface.hasInterfacePrototypeObject() + ): cgThings.append(CGMemberJITInfo(descriptor, m)) if m.isConst() and m.type.isPrimitive(): cgThings.append(CGConstDefinition(m)) @@ -13924,11 +16210,12 @@ def __init__(self, descriptor): if defaultToJSONMethod: # Now that we know about our property arrays, we can # output our "collect attribute values" method, which uses those. - cgThings.append(CGCollectJSONAttributesMethod(descriptor, defaultToJSONMethod)) + cgThings.append( + CGCollectJSONAttributesMethod(descriptor, defaultToJSONMethod) + ) if descriptor.interface.hasInterfaceObject(): - cgThings.append(CGClassConstructor(descriptor, - descriptor.interface.ctor())) + cgThings.append(CGClassConstructor(descriptor, descriptor.interface.ctor())) cgThings.append(CGInterfaceObjectJSClass(descriptor, properties)) cgThings.append(CGNamedConstructors(descriptor)) @@ -13944,16 +16231,21 @@ def __init__(self, descriptor): if descriptor.interface.hasInterfacePrototypeObject(): cgThings.append(CGPrototypeJSClass(descriptor, properties)) - if (descriptor.interface.hasInterfaceObject() and - not descriptor.interface.isExternal() and - descriptor.isExposedConditionally()): + if ( + descriptor.interface.hasInterfaceObject() + and not descriptor.interface.isExternal() + and descriptor.isExposedConditionally() + ): cgThings.append(CGConstructorEnabled(descriptor)) - if (descriptor.interface.hasMembersInSlots() and - descriptor.interface.hasChildInterfaces()): - raise TypeError("We don't support members in slots on " - "non-leaf interfaces like %s" % - descriptor.interface.identifier.name) + if ( + descriptor.interface.hasMembersInSlots() + and descriptor.interface.hasChildInterfaces() + ): + raise TypeError( + "We don't support members in slots on " + "non-leaf interfaces like %s" % descriptor.interface.identifier.name + ) if descriptor.needsMissingPropUseCounters: cgThings.append(CGCountMaybeMissingProperty(descriptor)) @@ -13964,17 +16256,24 @@ def __init__(self, descriptor): cgThings.append(CGDeserializer(descriptor)) if descriptor.proxy: - cgThings.append(CGGeneric(fill( - """ + cgThings.append( + CGGeneric( + fill( + """ static_assert(std::is_base_of_v, "We don't support non-nsISupports native classes for " "proxy-based bindings yet"); """, - nativeType=descriptor.nativeType))) + nativeType=descriptor.nativeType, + ) + ) + ) if not descriptor.wrapperCache: - raise TypeError("We need a wrappercache to support expandos for proxy-based " - "bindings (" + descriptor.name + ")") + raise TypeError( + "We need a wrappercache to support expandos for proxy-based " + "bindings (" + descriptor.name + ")" + ) handlerThing = CGDOMJSProxyHandler(descriptor) cgThings.append(CGDOMJSProxyHandlerDeclarer(handlerThing)) cgThings.append(CGProxyIsProxy(descriptor)) @@ -13995,8 +16294,7 @@ def __init__(self, descriptor): cgThings.append(CGWrapWithCacheMethod(descriptor, properties)) cgThings.append(CGWrapMethod(descriptor)) else: - cgThings.append(CGWrapNonWrapperCacheMethod(descriptor, - properties)) + cgThings.append(CGWrapNonWrapperCacheMethod(descriptor, properties)) # Set up our Xray callbacks as needed. This needs to come # after we have our DOMProxyHandler defined. @@ -14009,7 +16307,9 @@ def __init__(self, descriptor): cgThings.append(CGDeleteNamedProperty(descriptor)) elif descriptor.needsXrayResolveHooks(): cgThings.append(CGResolveOwnPropertyViaResolve(descriptor)) - cgThings.append(CGEnumerateOwnPropertiesViaGetOwnPropertyNames(descriptor)) + cgThings.append( + CGEnumerateOwnPropertiesViaGetOwnPropertyNames(descriptor) + ) if descriptor.wantsXrayExpandoClass: cgThings.append(CGXrayExpandoJSClass(descriptor)) @@ -14020,33 +16320,63 @@ def __init__(self, descriptor): # If we're not wrappercached, we don't know how to clear our # cached values, since we can't get at the JSObject. if descriptor.wrapperCache: - cgThings.extend(CGClearCachedValueMethod(descriptor, m) for - m in clearableCachedAttrs(descriptor)) - - haveUnscopables = (len(unscopableNames) != 0 and - descriptor.interface.hasInterfacePrototypeObject()) + cgThings.extend( + CGClearCachedValueMethod(descriptor, m) + for m in clearableCachedAttrs(descriptor) + ) + + haveUnscopables = ( + len(unscopableNames) != 0 + and descriptor.interface.hasInterfacePrototypeObject() + ) if haveUnscopables: cgThings.append( - CGList([CGGeneric("static const char* const unscopableNames[] = {"), - CGIndenter(CGList([CGGeneric('"%s"' % name) for - name in unscopableNames] + - [CGGeneric("nullptr")], ",\n")), - CGGeneric("};\n")], "\n")) + CGList( + [ + CGGeneric("static const char* const unscopableNames[] = {"), + CGIndenter( + CGList( + [CGGeneric('"%s"' % name) for name in unscopableNames] + + [CGGeneric("nullptr")], + ",\n", + ) + ), + CGGeneric("};\n"), + ], + "\n", + ) + ) legacyWindowAliases = descriptor.interface.legacyWindowAliases haveLegacyWindowAliases = len(legacyWindowAliases) != 0 if haveLegacyWindowAliases: cgThings.append( - CGList([CGGeneric("static const char* const legacyWindowAliases[] = {"), - CGIndenter(CGList([CGGeneric('"%s"' % name) for - name in legacyWindowAliases] + - [CGGeneric("nullptr")], ",\n")), - CGGeneric("};\n")], "\n")) + CGList( + [ + CGGeneric("static const char* const legacyWindowAliases[] = {"), + CGIndenter( + CGList( + [ + CGGeneric('"%s"' % name) + for name in legacyWindowAliases + ] + + [CGGeneric("nullptr")], + ",\n", + ) + ), + CGGeneric("};\n"), + ], + "\n", + ) + ) # CGCreateInterfaceObjectsMethod needs to come after our # CGDOMJSClass and unscopables, if any. - cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties, - haveUnscopables, haveLegacyWindowAliases)) + cgThings.append( + CGCreateInterfaceObjectsMethod( + descriptor, properties, haveUnscopables, haveLegacyWindowAliases + ) + ) # CGGetProtoObjectMethod and CGGetConstructorObjectMethod need # to come after CGCreateInterfaceObjectsMethod. @@ -14063,10 +16393,10 @@ def __init__(self, descriptor): cgThings.append(CGCrossOriginProperties(descriptor)) cgThings = CGList((CGIndenter(t, declareOnly=True) for t in cgThings), "\n") - cgThings = CGWrapper(cgThings, pre='\n', post='\n') - self.cgRoot = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name), - cgThings), - post='\n') + cgThings = CGWrapper(cgThings, pre="\n", post="\n") + self.cgRoot = CGWrapper( + CGNamespace(toBindingNamespace(descriptor.name), cgThings), post="\n" + ) def declare(self): return self.cgRoot.declare() @@ -14094,23 +16424,26 @@ def __init__(self, namespace, enumName, names, values, comment=""): entries.append(entry) # Append a Count. - entries.append('_' + enumName + '_Count') + entries.append("_" + enumName + "_Count") # Indent. - entries = [' ' + e for e in entries] + entries = [" " + e for e in entries] # Build the enum body. - enumstr = comment + 'enum %s : uint16_t\n{\n%s\n};\n' % (enumName, ',\n'.join(entries)) + enumstr = comment + "enum %s : uint16_t\n{\n%s\n};\n" % ( + enumName, + ",\n".join(entries), + ) curr = CGGeneric(declare=enumstr) # Add some whitespace padding. - curr = CGWrapper(curr, pre='\n', post='\n') + curr = CGWrapper(curr, pre="\n", post="\n") # Add the namespace. curr = CGNamespace(namespace, curr) # Add the typedef - typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName) + typedef = "\ntypedef %s::%s %s;\n\n" % (namespace, enumName, enumName) curr = CGList([curr, CGGeneric(declare=typedef)]) # Save the result. @@ -14124,10 +16457,10 @@ def define(self): def initIdsClassMethod(identifiers, atomCacheName): - idinit = ['!atomsCache->%s.init(cx, "%s")' % - (CGDictionary.makeIdName(id), - id) - for id in identifiers] + idinit = [ + '!atomsCache->%s.init(cx, "%s")' % (CGDictionary.makeIdName(id), id) + for id in identifiers + ] idinit.reverse() body = fill( """ @@ -14140,11 +16473,16 @@ def initIdsClassMethod(identifiers, atomCacheName): } return true; """, - idinit=" ||\n ".join(idinit)) - return ClassMethod("InitIds", "bool", [ - Argument("JSContext*", "cx"), - Argument("%s*" % atomCacheName, "atomsCache") - ], static=True, body=body, visibility="private") + idinit=" ||\n ".join(idinit), + ) + return ClassMethod( + "InitIds", + "bool", + [Argument("JSContext*", "cx"), Argument("%s*" % atomCacheName, "atomsCache")], + static=True, + body=body, + visibility="private", + ) class CGDictionary(CGThing): @@ -14153,37 +16491,41 @@ def __init__(self, dictionary, descriptorProvider): self.descriptorProvider = descriptorProvider self.needToInitIds = len(dictionary.members) > 0 self.memberInfo = [ - (member, - getJSToNativeConversionInfo( - member.type, - descriptorProvider, - isMember="Dictionary", - isOptional=member.canHaveMissingValue(), - isKnownMissing=not dictionary.needsConversionFromJS, - defaultValue=member.defaultValue, - sourceDescription=self.getMemberSourceDescription(member))) - for member in dictionary.members] + ( + member, + getJSToNativeConversionInfo( + member.type, + descriptorProvider, + isMember="Dictionary", + isOptional=member.canHaveMissingValue(), + isKnownMissing=not dictionary.needsConversionFromJS, + defaultValue=member.defaultValue, + sourceDescription=self.getMemberSourceDescription(member), + ), + ) + for member in dictionary.members + ] # If we have a union member which is going to be declared in a different # header but contains something that will be declared in the same header # as us, bail: the C++ includes won't work out. for member in dictionary.members: type = member.type.unroll() - if (type.isUnion() and - CGHeaders.getUnionDeclarationFilename(descriptorProvider.getConfig(), - type) != - CGHeaders.getDeclarationFilename(dictionary)): + if type.isUnion() and CGHeaders.getUnionDeclarationFilename( + descriptorProvider.getConfig(), type + ) != CGHeaders.getDeclarationFilename(dictionary): for t in type.flatMemberTypes: - if (t.isDictionary() and - CGHeaders.getDeclarationFilename(t.inner) == - CGHeaders.getDeclarationFilename(dictionary)): + if t.isDictionary() and CGHeaders.getDeclarationFilename( + t.inner + ) == CGHeaders.getDeclarationFilename(dictionary): raise TypeError( "Dictionary contains a union that will live in a different " "header that contains a dictionary from the same header as " "the original dictionary. This won't compile. Move the " "inner dictionary to a different Web IDL file to move it " - "to a different header.\n%s\n%s" % - (t.location, t.inner.location)) + "to a different header.\n%s\n%s" + % (t.location, t.inner.location) + ) self.structs = self.getStructs() def declare(self): @@ -14207,13 +16549,15 @@ def initMethod(self): separated by newlines. """ - body = dedent(""" + body = dedent( + """ // Passing a null JSContext is OK only if we're initing from null, // Since in that case we will not have to do any property gets // Also evaluate isNullOrUndefined in order to avoid false-positive // checkers by static analysis tools MOZ_ASSERT_IF(!cx, val.isNull() && val.isNullOrUndefined()); - """) + """ + ) if self.needToInitIds: body += fill( @@ -14228,7 +16572,8 @@ def initMethod(self): } """, - dictName=self.makeClassName(self.dictionary)) + dictName=self.makeClassName(self.dictionary), + ) if self.dictionary.parent: body += fill( @@ -14239,7 +16584,8 @@ def initMethod(self): } """, - dictName=self.makeClassName(self.dictionary.parent)) + dictName=self.makeClassName(self.dictionary.parent), + ) else: body += dedent( """ @@ -14247,10 +16593,10 @@ def initMethod(self): return cx.ThrowErrorMessage(sourceDescription); } - """) + """ + ) - memberInits = [self.getMemberConversion(m).define() - for m in self.memberInfo] + memberInits = [self.getMemberConversion(m).define() for m in self.memberInfo] if memberInits: body += fill( """ @@ -14265,16 +16611,22 @@ def initMethod(self): } $*{memberInits} """, - memberInits="\n".join(memberInits)) + memberInits="\n".join(memberInits), + ) body += "return true;\n" - return ClassMethod("Init", "bool", [ - Argument('BindingCallContext&', 'cx'), - Argument('JS::Handle', 'val'), - Argument('const char*', 'sourceDescription', default='"Value"'), - Argument('bool', 'passedToJSImpl', default='false') - ], body=body) + return ClassMethod( + "Init", + "bool", + [ + Argument("BindingCallContext&", "cx"), + Argument("JS::Handle", "val"), + Argument("const char*", "sourceDescription", default='"Value"'), + Argument("bool", "passedToJSImpl", default="false"), + ], + body=body, + ) def initWithoutCallContextMethod(self): """ @@ -14287,13 +16639,19 @@ def initWithoutCallContextMethod(self): // that's not really what it's formatted for. BindingCallContext cx(cx_, nullptr); return Init(cx, val, sourceDescription, passedToJSImpl); - """) - return ClassMethod("Init", "bool", [ - Argument('JSContext*', 'cx_'), - Argument('JS::Handle', 'val'), - Argument('const char*', 'sourceDescription', default='"Value"'), - Argument('bool', 'passedToJSImpl', default='false') - ], body=body) + """ + ) + return ClassMethod( + "Init", + "bool", + [ + Argument("JSContext*", "cx_"), + Argument("JS::Handle", "val"), + Argument("const char*", "sourceDescription", default='"Value"'), + Argument("bool", "passedToJSImpl", default="false"), + ], + body=body, + ) def simpleInitMethod(self): """ @@ -14301,10 +16659,13 @@ def simpleInitMethod(self): for cases when we are just default-initializing it. """ - relevantMembers = [m for m in self.memberInfo - # We only need to init the things that can have - # default values. - if m[0].optional and m[0].defaultValue] + relevantMembers = [ + m + for m in self.memberInfo + # We only need to init the things that can have + # default values. + if m[0].optional and m[0].defaultValue + ] # We mostly avoid outputting code that uses cx in our native-to-JS # conversions, but there is one exception: we may have a @@ -14314,14 +16675,15 @@ def simpleInitMethod(self): # native-to-JS templates hardcode `cx` as the JSContext value, we're # going to need to provide that. haveMemberThatNeedsCx = any( - m[0].type.isDictionary() and - m[0].type.unroll().inner.needsConversionFromJS for - m in relevantMembers) + m[0].type.isDictionary() and m[0].type.unroll().inner.needsConversionFromJS + for m in relevantMembers + ) if haveMemberThatNeedsCx: body = dedent( """ JSContext* cx = nullptr; - """) + """ + ) else: body = "" @@ -14339,29 +16701,40 @@ def simpleInitMethod(self): """, dictName=self.makeClassName(self.dictionary.parent), - args=args) + args=args, + ) - memberInits = [self.getMemberConversion(m, isKnownMissing=True).define() - for m in relevantMembers] + memberInits = [ + self.getMemberConversion(m, isKnownMissing=True).define() + for m in relevantMembers + ] if memberInits: body += fill( """ $*{memberInits} """, - memberInits="\n".join(memberInits)) + memberInits="\n".join(memberInits), + ) body += "return true;\n" - return ClassMethod("Init", "bool", [ - Argument('const char*', 'sourceDescription', default='"Value"'), - Argument('bool', 'passedToJSImpl', default='false') - ], body=body) + return ClassMethod( + "Init", + "bool", + [ + Argument("const char*", "sourceDescription", default='"Value"'), + Argument("bool", "passedToJSImpl", default="false"), + ], + body=body, + ) def initFromJSONMethod(self): return ClassMethod( - "Init", "bool", - [Argument('const nsAString&', 'aJSON')], - body=dedent(""" + "Init", + "bool", + [Argument("const nsAString&", "aJSON")], + body=dedent( + """ AutoJSAPI jsapi; JSObject* cleanGlobal = SimpleGlobalObject::Create(SimpleGlobalObject::GlobalType::BindingDetail); if (!cleanGlobal) { @@ -14375,13 +16748,17 @@ def initFromJSONMethod(self): bool ok = ParseJSON(cx, aJSON, &json); NS_ENSURE_TRUE(ok, false); return Init(cx, json); - """)) + """ + ), + ) def toJSONMethod(self): return ClassMethod( - "ToJSON", "bool", - [Argument('nsAString&', 'aJSON')], - body=dedent(""" + "ToJSON", + "bool", + [Argument("nsAString&", "aJSON")], + body=dedent( + """ AutoJSAPI jsapi; jsapi.Init(); JSContext *cx = jsapi.cx(); @@ -14402,7 +16779,10 @@ def toJSONMethod(self): } JS::Rooted obj(cx, &val.toObject()); return StringifyToJSON(cx, obj, aJSON); - """), const=True) + """ + ), + const=True, + ) def toObjectInternalMethod(self): body = "" @@ -14416,7 +16796,8 @@ def toObjectInternalMethod(self): } """, - dictName=self.makeClassName(self.dictionary)) + dictName=self.makeClassName(self.dictionary), + ) if self.dictionary.parent: body += fill( @@ -14428,7 +16809,8 @@ def toObjectInternalMethod(self): JS::Rooted obj(cx, &rval.toObject()); """, - dictName=self.makeClassName(self.dictionary.parent)) + dictName=self.makeClassName(self.dictionary.parent), + ) else: body += dedent( """ @@ -14438,22 +16820,32 @@ def toObjectInternalMethod(self): } rval.set(JS::ObjectValue(*obj)); - """) + """ + ) if self.memberInfo: - body += "\n".join(self.getMemberDefinition(m).define() - for m in self.memberInfo) + body += "\n".join( + self.getMemberDefinition(m).define() for m in self.memberInfo + ) body += "\nreturn true;\n" - return ClassMethod("ToObjectInternal", "bool", [ - Argument('JSContext*', 'cx'), - Argument('JS::MutableHandle', 'rval'), - ], const=True, body=body) + return ClassMethod( + "ToObjectInternal", + "bool", + [ + Argument("JSContext*", "cx"), + Argument("JS::MutableHandle", "rval"), + ], + const=True, + body=body, + ) def initIdsMethod(self): assert self.needToInitIds - return initIdsClassMethod([m.identifier.name for m in self.dictionary.members], - "%sAtoms" % self.makeClassName(self.dictionary)) + return initIdsClassMethod( + [m.identifier.name for m in self.dictionary.members], + "%sAtoms" % self.makeClassName(self.dictionary), + ) def traceDictionaryMethod(self): body = "" @@ -14461,38 +16853,50 @@ def traceDictionaryMethod(self): cls = self.makeClassName(self.dictionary.parent) body += "%s::TraceDictionary(trc);\n" % cls - memberTraces = [self.getMemberTrace(m) - for m in self.dictionary.members - if typeNeedsRooting(m.type)] + memberTraces = [ + self.getMemberTrace(m) + for m in self.dictionary.members + if typeNeedsRooting(m.type) + ] if memberTraces: body += "\n".join(memberTraces) - return ClassMethod("TraceDictionary", "void", [ - Argument("JSTracer*", "trc"), - ], body=body) + return ClassMethod( + "TraceDictionary", + "void", + [ + Argument("JSTracer*", "trc"), + ], + body=body, + ) @staticmethod def dictionaryNeedsCycleCollection(dictionary): - return (any(idlTypeNeedsCycleCollection(m.type) for m in dictionary.members) or - (dictionary.parent and - CGDictionary.dictionaryNeedsCycleCollection(dictionary.parent))) + return any(idlTypeNeedsCycleCollection(m.type) for m in dictionary.members) or ( + dictionary.parent + and CGDictionary.dictionaryNeedsCycleCollection(dictionary.parent) + ) def traverseForCCMethod(self): body = "" - if (self.dictionary.parent and - self.dictionaryNeedsCycleCollection(self.dictionary.parent)): + if self.dictionary.parent and self.dictionaryNeedsCycleCollection( + self.dictionary.parent + ): cls = self.makeClassName(self.dictionary.parent) body += "%s::TraverseForCC(aCallback, aFlags);\n" % cls for m, _ in self.memberInfo: if idlTypeNeedsCycleCollection(m.type): - memberName = self.makeMemberName(m.identifier.name); - body += ('ImplCycleCollectionTraverse(aCallback, %s, "%s", aFlags);\n' % - (memberName, memberName)) + memberName = self.makeMemberName(m.identifier.name) + body += ( + 'ImplCycleCollectionTraverse(aCallback, %s, "%s", aFlags);\n' + % (memberName, memberName) + ) return ClassMethod( - "TraverseForCC", "void", + "TraverseForCC", + "void", [ Argument("nsCycleCollectionTraversalCallback&", "aCallback"), Argument("uint32_t", "aFlags"), @@ -14501,26 +16905,32 @@ def traverseForCCMethod(self): # Inline so we don't pay a codesize hit unless someone actually uses # this traverse method. inline=True, - bodyInHeader=True) + bodyInHeader=True, + ) def unlinkForCCMethod(self): body = "" - if (self.dictionary.parent and - self.dictionaryNeedsCycleCollection(self.dictionary.parent)): + if self.dictionary.parent and self.dictionaryNeedsCycleCollection( + self.dictionary.parent + ): cls = self.makeClassName(self.dictionary.parent) body += "%s::UnlinkForCC();\n" % cls for m, _ in self.memberInfo: if idlTypeNeedsCycleCollection(m.type): - memberName = self.makeMemberName(m.identifier.name); - body += ('ImplCycleCollectionUnlink(%s);\n' % memberName) + memberName = self.makeMemberName(m.identifier.name) + body += "ImplCycleCollectionUnlink(%s);\n" % memberName return ClassMethod( - "UnlinkForCC", "void", [], body=body, + "UnlinkForCC", + "void", + [], + body=body, # Inline so we don't pay a codesize hit unless someone actually uses # this unlink method. inline=True, - bodyInHeader=True) + bodyInHeader=True, + ) def assignmentOperator(self): body = CGList([]) @@ -14529,65 +16939,78 @@ def assignmentOperator(self): for m, _ in self.memberInfo: memberName = self.makeMemberName(m.identifier.name) if m.canHaveMissingValue(): - memberAssign = CGGeneric(fill( - """ + memberAssign = CGGeneric( + fill( + """ ${name}.Reset(); if (aOther.${name}.WasPassed()) { ${name}.Construct(aOther.${name}.Value()); } """, - name=memberName)) + name=memberName, + ) + ) else: - memberAssign = CGGeneric( - "%s = aOther.%s;\n" % (memberName, memberName)) + memberAssign = CGGeneric("%s = aOther.%s;\n" % (memberName, memberName)) body.append(memberAssign) body.append(CGGeneric("return *this;\n")) return ClassMethod( - "operator=", "%s&" % self.makeClassName(self.dictionary), - [Argument("const %s&" % self.makeClassName(self.dictionary), - "aOther")], - body=body.define()) + "operator=", + "%s&" % self.makeClassName(self.dictionary), + [Argument("const %s&" % self.makeClassName(self.dictionary), "aOther")], + body=body.define(), + ) def canHaveEqualsOperator(self): - return all(m.type.isString() or m.type.isPrimitive() for (m,_) in - self.memberInfo) + return all( + m.type.isString() or m.type.isPrimitive() for (m, _) in self.memberInfo + ) def equalsOperator(self): body = CGList([]) for m, _ in self.memberInfo: memberName = self.makeMemberName(m.identifier.name) - memberTest = CGGeneric(fill( - """ + memberTest = CGGeneric( + fill( + """ if (${memberName} != aOther.${memberName}) { return false; } """, - memberName=memberName)) + memberName=memberName, + ) + ) body.append(memberTest) body.append(CGGeneric("return true;\n")) return ClassMethod( - "operator==", "bool", - [Argument("const %s&" % self.makeClassName(self.dictionary), - "aOther") - ], const=True, body=body.define()) + "operator==", + "bool", + [Argument("const %s&" % self.makeClassName(self.dictionary), "aOther")], + const=True, + body=body.define(), + ) def getStructs(self): d = self.dictionary selfName = self.makeClassName(d) - members = [ClassMember(self.makeMemberName(m[0].identifier.name), - self.getMemberType(m), - visibility="public", - body=self.getMemberInitializer(m), - hasIgnoreInitCheckFlag=True) - for m in self.memberInfo] + members = [ + ClassMember( + self.makeMemberName(m[0].identifier.name), + self.getMemberType(m), + visibility="public", + body=self.getMemberInitializer(m), + hasIgnoreInitCheckFlag=True, + ) + for m in self.memberInfo + ] if d.parent: # We always want to init our parent with our non-initializing # constructor arg, because either we're about to init ourselves (and # hence our parent) or we don't want any init happening. baseConstructors = [ - "%s(%s)" % (self.makeClassName(d.parent), - self.getNonInitializingCtorArg()) + "%s(%s)" + % (self.makeClassName(d.parent), self.getNonInitializingCtorArg()) ] else: baseConstructors = None @@ -14595,7 +17018,7 @@ def getStructs(self): if d.needsConversionFromJS: initArgs = "nullptr, JS::NullHandleValue" else: - initArgs ="" + initArgs = "" ctors = [ ClassConstructor( [], @@ -14603,14 +17026,17 @@ def getStructs(self): baseConstructors=baseConstructors, body=( "// Safe to pass a null context if we pass a null value\n" - "Init(%s);\n" % initArgs)), + "Init(%s);\n" % initArgs + ), + ), ClassConstructor( [Argument("const FastDictionaryInitializer&", "")], visibility="public", baseConstructors=baseConstructors, explicit=True, bodyInHeader=True, - body='// Do nothing here; this is used by our "Fast" subclass\n') + body='// Do nothing here; this is used by our "Fast" subclass\n', + ), ] methods = [] @@ -14624,15 +17050,13 @@ def getStructs(self): methods.append(self.simpleInitMethod()) canBeRepresentedAsJSON = self.dictionarySafeToJSONify(d) - if (canBeRepresentedAsJSON and - d.getExtendedAttribute("GenerateInitFromJSON")): + if canBeRepresentedAsJSON and d.getExtendedAttribute("GenerateInitFromJSON"): methods.append(self.initFromJSONMethod()) if d.needsConversionToJS: methods.append(self.toObjectInternalMethod()) - if (canBeRepresentedAsJSON and - d.getExtendedAttribute("GenerateToJSON")): + if canBeRepresentedAsJSON and d.getExtendedAttribute("GenerateToJSON"): methods.append(self.toJSONMethod()) methods.append(self.traceDictionaryMethod()) @@ -14652,13 +17076,16 @@ def getStructs(self): # Note: gcc's -Wextra has a warning against not initializng our # base explicitly. If we have one. Use our non-initializing base # constructor to get around that. - ctors.append(ClassConstructor([Argument("const %s&" % selfName, - "aOther")], - bodyInHeader=True, - visibility="public", - baseConstructors=baseConstructors, - explicit=True, - body="*this = aOther;\n")) + ctors.append( + ClassConstructor( + [Argument("const %s&" % selfName, "aOther")], + bodyInHeader=True, + visibility="public", + baseConstructors=baseConstructors, + explicit=True, + body="*this = aOther;\n", + ) + ) methods.append(self.assignmentOperator()) else: disallowCopyConstruction = True @@ -14666,31 +17093,32 @@ def getStructs(self): if self.canHaveEqualsOperator(): methods.append(self.equalsOperator()) - struct = CGClass(selfName, - bases=[ClassBase(self.base())], - members=members, - constructors=ctors, - methods=methods, - isStruct=True, - disallowCopyConstruction=disallowCopyConstruction) + struct = CGClass( + selfName, + bases=[ClassBase(self.base())], + members=members, + constructors=ctors, + methods=methods, + isStruct=True, + disallowCopyConstruction=disallowCopyConstruction, + ) fastDictionaryCtor = ClassConstructor( [], visibility="public", bodyInHeader=True, - baseConstructors=["%s(%s)" % - (selfName, - self.getNonInitializingCtorArg())], - body="// Doesn't matter what int we pass to the parent constructor\n") + baseConstructors=["%s(%s)" % (selfName, self.getNonInitializingCtorArg())], + body="// Doesn't matter what int we pass to the parent constructor\n", + ) - fastStruct = CGClass("Fast" + selfName, - bases=[ClassBase(selfName)], - constructors=[fastDictionaryCtor], - isStruct=True) + fastStruct = CGClass( + "Fast" + selfName, + bases=[ClassBase(selfName)], + constructors=[fastDictionaryCtor], + isStruct=True, + ) - return CGList([struct, - CGNamespace('binding_detail', fastStruct)], - "\n") + return CGList([struct, CGNamespace("binding_detail", fastStruct)], "\n") def deps(self): return self.dictionary.getDeps() @@ -14747,7 +17175,7 @@ def getMemberConversion(self, memberInfo, isKnownMissing=False): # it's scoped down to the conversion so we can just use # anything we want. "holderName": "holder", - "passedToJSImpl": "passedToJSImpl" + "passedToJSImpl": "passedToJSImpl", } if isKnownMissing: @@ -14767,13 +17195,14 @@ def getMemberConversion(self, memberInfo, isKnownMissing=False): replacements["haveValue"] = "!isNull && !temp->isUndefined()" propId = self.makeIdName(member.identifier.name) - propGet = ("JS_GetPropertyById(cx, *object, atomsCache->%s, temp.ptr())" % - propId) + propGet = "JS_GetPropertyById(cx, *object, atomsCache->%s, temp.ptr())" % propId conversionReplacements = { "prop": self.makeMemberName(member.identifier.name), - "convert": string.Template(conversionInfo.template).substitute(replacements), - "propGet": propGet + "convert": string.Template(conversionInfo.template).substitute( + replacements + ), + "propGet": propGet, } # The conversion code will only run where a default value or a value passed # by the author needs to get converted, so we can remember if we have any @@ -14782,24 +17211,30 @@ def getMemberConversion(self, memberInfo, isKnownMissing=False): if isKnownMissing: conversion = "" else: - setTempValue = CGGeneric(dedent( - """ + setTempValue = CGGeneric( + dedent( + """ if (!${propGet}) { return false; } - """)) + """ + ) + ) conditions = getConditionList(member, "cx", "*object") if len(conditions) != 0: - setTempValue = CGIfElseWrapper(conditions.define(), - setTempValue, - CGGeneric("temp->setUndefined();\n")) + setTempValue = CGIfElseWrapper( + conditions.define(), + setTempValue, + CGGeneric("temp->setUndefined();\n"), + ) setTempValue = CGIfWrapper(setTempValue, "!isNull") conversion = setTempValue.define() if member.defaultValue: - if (member.type.isUnion() and - (not member.type.nullable() or - not isinstance(member.defaultValue, IDLNullValue))): + if member.type.isUnion() and ( + not member.type.nullable() + or not isinstance(member.defaultValue, IDLNullValue) + ): # Since this has a default value, it might have been initialized # already. Go ahead and uninit it before we try to init it # again. @@ -14811,7 +17246,8 @@ def getMemberConversion(self, memberInfo, isKnownMissing=False): ${memberName}.Value().Uninit(); } """, - memberName=memberName) + memberName=memberName, + ) else: conversion += "%s.Uninit();\n" % memberName conversion += "${convert}" @@ -14828,18 +17264,24 @@ def getMemberConversion(self, memberInfo, isKnownMissing=False): // just assume they know what they're doing. return cx.ThrowErrorMessage("%s"); } - """ % self.getMemberSourceDescription(member)) - conversionReplacements["convert"] = indent(conversionReplacements["convert"]).rstrip() + """ + % self.getMemberSourceDescription(member) + ) + conversionReplacements["convert"] = indent( + conversionReplacements["convert"] + ).rstrip() else: conversion += ( "if (!isNull && !temp->isUndefined()) {\n" " ${prop}.Construct();\n" "${convert}" - "}\n") - conversionReplacements["convert"] = indent(conversionReplacements["convert"]) + "}\n" + ) + conversionReplacements["convert"] = indent( + conversionReplacements["convert"] + ) - return CGGeneric( - string.Template(conversion).substitute(conversionReplacements)) + return CGGeneric(string.Template(conversion).substitute(conversionReplacements)) def getMemberDefinition(self, memberInfo): member = memberInfo[0] @@ -14853,54 +17295,74 @@ def getMemberDefinition(self, memberInfo): # If you have to change this list (which you shouldn't!), make sure it # continues to match the list in test_Object.prototype_props.html - if (member.identifier.name in - ["constructor", "toString", "toLocaleString", "valueOf", - "hasOwnProperty", "isPrototypeOf", "propertyIsEnumerable", - "__defineGetter__", "__defineSetter__", "__lookupGetter__", - "__lookupSetter__", "__proto__"]): - raise TypeError("'%s' member of %s dictionary shadows " - "a property of Object.prototype, and Xrays to " - "Object can't handle that.\n" - "%s" % - (member.identifier.name, - self.dictionary.identifier.name, - member.location)) + if member.identifier.name in [ + "constructor", + "toString", + "toLocaleString", + "valueOf", + "hasOwnProperty", + "isPrototypeOf", + "propertyIsEnumerable", + "__defineGetter__", + "__defineSetter__", + "__lookupGetter__", + "__lookupSetter__", + "__proto__", + ]: + raise TypeError( + "'%s' member of %s dictionary shadows " + "a property of Object.prototype, and Xrays to " + "Object can't handle that.\n" + "%s" + % ( + member.identifier.name, + self.dictionary.identifier.name, + member.location, + ) + ) propDef = ( - 'JS_DefinePropertyById(cx, obj, atomsCache->%s, temp, JSPROP_ENUMERATE)' % - self.makeIdName(member.identifier.name)) + "JS_DefinePropertyById(cx, obj, atomsCache->%s, temp, JSPROP_ENUMERATE)" + % self.makeIdName(member.identifier.name) + ) innerTemplate = wrapForType( - member.type, self.descriptorProvider, + member.type, + self.descriptorProvider, { - 'result': "currentValue", - 'successCode': ("if (!%s) {\n" - " return false;\n" - "}\n" - "break;\n" % propDef), - 'jsvalRef': "temp", - 'jsvalHandle': "&temp", - 'returnsNewObject': False, + "result": "currentValue", + "successCode": ( + "if (!%s) {\n" " return false;\n" "}\n" "break;\n" % propDef + ), + "jsvalRef": "temp", + "jsvalHandle": "&temp", + "returnsNewObject": False, # 'obj' can just be allowed to be the string "obj", since that # will be our dictionary object, which is presumably itself in # the right scope. - 'spiderMonkeyInterfacesAreStructs': True - }) + "spiderMonkeyInterfacesAreStructs": True, + }, + ) conversion = CGGeneric(innerTemplate) - conversion = CGWrapper(conversion, - pre=("JS::Rooted temp(cx);\n" - "%s const & currentValue = %s;\n" % - (declType.define(), memberData) - )) + conversion = CGWrapper( + conversion, + pre=( + "JS::Rooted temp(cx);\n" + "%s const & currentValue = %s;\n" % (declType.define(), memberData) + ), + ) # Now make sure that our successCode can actually break out of the # conversion. This incidentally gives us a scope for 'temp' and # 'currentValue'. conversion = CGWrapper( CGIndenter(conversion), - pre=("do {\n" - " // block for our 'break' successCode and scope for 'temp' and 'currentValue'\n"), - post="} while(false);\n") + pre=( + "do {\n" + " // block for our 'break' successCode and scope for 'temp' and 'currentValue'\n" + ), + post="} while(false);\n", + ) if member.canHaveMissingValue(): # Only do the conversion if we have a value conversion = CGIfWrapper(conversion, "%s.WasPassed()" % memberLoc) @@ -14919,34 +17381,39 @@ def getMemberTrace(self, member): # The data is inside the Optional<> memberData = "%s.Value()" % memberLoc - memberName = "%s.%s" % (self.makeClassName(self.dictionary), - memberLoc) + memberName = "%s.%s" % (self.makeClassName(self.dictionary), memberLoc) if type.isObject(): - trace = CGGeneric('JS::UnsafeTraceRoot(trc, %s, "%s");\n' % - ("&"+memberData, memberName)) + trace = CGGeneric( + 'JS::UnsafeTraceRoot(trc, %s, "%s");\n' % ("&" + memberData, memberName) + ) if type.nullable(): trace = CGIfWrapper(trace, memberData) elif type.isAny(): - trace = CGGeneric('JS::UnsafeTraceRoot(trc, %s, "%s");\n' % - ("&"+memberData, memberName)) - elif (type.isSequence() or type.isDictionary() or - type.isSpiderMonkeyInterface() or type.isUnion() or - type.isRecord()): + trace = CGGeneric( + 'JS::UnsafeTraceRoot(trc, %s, "%s");\n' % ("&" + memberData, memberName) + ) + elif ( + type.isSequence() + or type.isDictionary() + or type.isSpiderMonkeyInterface() + or type.isUnion() + or type.isRecord() + ): if type.nullable(): memberNullable = memberData memberData = "%s.Value()" % memberData if type.isSequence(): - trace = CGGeneric('DoTraceSequence(trc, %s);\n' % memberData) + trace = CGGeneric("DoTraceSequence(trc, %s);\n" % memberData) elif type.isDictionary(): - trace = CGGeneric('%s.TraceDictionary(trc);\n' % memberData) + trace = CGGeneric("%s.TraceDictionary(trc);\n" % memberData) elif type.isUnion(): - trace = CGGeneric('%s.TraceUnion(trc);\n' % memberData) + trace = CGGeneric("%s.TraceUnion(trc);\n" % memberData) elif type.isRecord(): - trace = CGGeneric('TraceRecord(trc, %s);\n' % memberData) + trace = CGGeneric("TraceRecord(trc, %s);\n" % memberData) else: assert type.isSpiderMonkeyInterface() - trace = CGGeneric('%s.TraceSelf(trc);\n' % memberData) + trace = CGGeneric("%s.TraceSelf(trc);\n" % memberData) if type.nullable(): trace = CGIfWrapper(trace, "!%s.IsNull()" % memberNullable) else: @@ -14980,8 +17447,10 @@ def getMemberInitializer(self, memberInfo): return initializerForType(type) def getMemberSourceDescription(self, member): - return ("'%s' member of %s" % - (member.identifier.name, self.dictionary.identifier.name)) + return "'%s' member of %s" % ( + member.identifier.name, + self.dictionary.identifier.name, + ) @staticmethod def makeIdName(name): @@ -14993,8 +17462,9 @@ def getNonInitializingCtorArg(): @staticmethod def isDictionaryCopyConstructible(dictionary): - if (dictionary.parent and - not CGDictionary.isDictionaryCopyConstructible(dictionary.parent)): + if dictionary.parent and not CGDictionary.isDictionaryCopyConstructible( + dictionary.parent + ): return False return all(isTypeCopyConstructible(m.type) for m in dictionary.members) @@ -15020,8 +17490,7 @@ def typeSafeToJSONify(type): if type.isUnion(): # OK if everything in it is ok. - return all(CGDictionary.typeSafeToJSONify(t) - for t in type.flatMemberTypes) + return all(CGDictionary.typeSafeToJSONify(t) for t in type.flatMemberTypes) if type.isDictionary(): # OK if the dictionary is OK @@ -15046,82 +17515,101 @@ def typeSafeToJSONify(type): @staticmethod def dictionarySafeToJSONify(dictionary): # The dictionary itself is OK, so we're good if all our types are. - return all(CGDictionary.typeSafeToJSONify(m.type) - for m in dictionary.members) + return all(CGDictionary.typeSafeToJSONify(m.type) for m in dictionary.members) class CGRegisterWorkerBindings(CGAbstractMethod): def __init__(self, config): - CGAbstractMethod.__init__(self, None, 'RegisterWorkerBindings', 'bool', - [Argument('JSContext*', 'aCx'), - Argument('JS::Handle', 'aObj')]) + CGAbstractMethod.__init__( + self, + None, + "RegisterWorkerBindings", + "bool", + [Argument("JSContext*", "aCx"), Argument("JS::Handle", "aObj")], + ) self.config = config def definition_body(self): - descriptors = self.config.getDescriptors(hasInterfaceObject=True, - isExposedInAnyWorker=True, - register=True) + descriptors = self.config.getDescriptors( + hasInterfaceObject=True, isExposedInAnyWorker=True, register=True + ) conditions = [] for desc in descriptors: bindingNS = toBindingNamespace(desc.name) condition = "!%s::GetConstructorObject(aCx)" % bindingNS if desc.isExposedConditionally(): condition = ( - "%s::ConstructorEnabled(aCx, aObj) && " % bindingNS - + condition) + "%s::ConstructorEnabled(aCx, aObj) && " % bindingNS + condition + ) conditions.append(condition) - lines = [CGIfWrapper(CGGeneric("return false;\n"), condition) for - condition in conditions] + lines = [ + CGIfWrapper(CGGeneric("return false;\n"), condition) + for condition in conditions + ] lines.append(CGGeneric("return true;\n")) return CGList(lines, "\n").define() + class CGRegisterWorkerDebuggerBindings(CGAbstractMethod): def __init__(self, config): - CGAbstractMethod.__init__(self, None, 'RegisterWorkerDebuggerBindings', 'bool', - [Argument('JSContext*', 'aCx'), - Argument('JS::Handle', 'aObj')]) + CGAbstractMethod.__init__( + self, + None, + "RegisterWorkerDebuggerBindings", + "bool", + [Argument("JSContext*", "aCx"), Argument("JS::Handle", "aObj")], + ) self.config = config def definition_body(self): - descriptors = self.config.getDescriptors(hasInterfaceObject=True, - isExposedInWorkerDebugger=True, - register=True) + descriptors = self.config.getDescriptors( + hasInterfaceObject=True, isExposedInWorkerDebugger=True, register=True + ) conditions = [] for desc in descriptors: bindingNS = toBindingNamespace(desc.name) condition = "!%s::GetConstructorObject(aCx)" % bindingNS if desc.isExposedConditionally(): condition = ( - "%s::ConstructorEnabled(aCx, aObj) && " % bindingNS - + condition) + "%s::ConstructorEnabled(aCx, aObj) && " % bindingNS + condition + ) conditions.append(condition) - lines = [CGIfWrapper(CGGeneric("return false;\n"), condition) for - condition in conditions] + lines = [ + CGIfWrapper(CGGeneric("return false;\n"), condition) + for condition in conditions + ] lines.append(CGGeneric("return true;\n")) return CGList(lines, "\n").define() + class CGRegisterWorkletBindings(CGAbstractMethod): def __init__(self, config): - CGAbstractMethod.__init__(self, None, 'RegisterWorkletBindings', 'bool', - [Argument('JSContext*', 'aCx'), - Argument('JS::Handle', 'aObj')]) + CGAbstractMethod.__init__( + self, + None, + "RegisterWorkletBindings", + "bool", + [Argument("JSContext*", "aCx"), Argument("JS::Handle", "aObj")], + ) self.config = config def definition_body(self): - descriptors = self.config.getDescriptors(hasInterfaceObject=True, - isExposedInAnyWorklet=True, - register=True) + descriptors = self.config.getDescriptors( + hasInterfaceObject=True, isExposedInAnyWorklet=True, register=True + ) conditions = [] for desc in descriptors: bindingNS = toBindingNamespace(desc.name) condition = "!%s::GetConstructorObject(aCx)" % bindingNS if desc.isExposedConditionally(): condition = ( - "%s::ConstructorEnabled(aCx, aObj) && " % bindingNS - + condition) + "%s::ConstructorEnabled(aCx, aObj) && " % bindingNS + condition + ) conditions.append(condition) - lines = [CGIfWrapper(CGGeneric("return false;\n"), condition) for - condition in conditions] + lines = [ + CGIfWrapper(CGGeneric("return false;\n"), condition) + for condition in conditions + ] lines.append(CGGeneric("return true;\n")) return CGList(lines, "\n").define() @@ -15130,10 +17618,13 @@ def getGlobalNames(config): names = [] for desc in config.getDescriptors(registersGlobalNamesOnWindow=True): names.append((desc.name, desc)) - names.extend((n.identifier.name, desc) for n in desc.interface.namedConstructors) + names.extend( + (n.identifier.name, desc) for n in desc.interface.namedConstructors + ) names.extend((n, desc) for n in desc.interface.legacyWindowAliases) return names + class CGGlobalNames(CGGeneric): def __init__(self, config): currentOffset = 0 @@ -15148,7 +17639,8 @@ def __init__(self, config): # Generate the entry declaration # XXX(nika): mCreate & mEnabled require relocations. If we want to # reduce those, we could move them into separate tables. - nativeEntry = fill(""" + nativeEntry = fill( + """ { /* mNameOffset */ ${nameOffset}, // "${name}" /* mNameLength */ ${nameLength}, @@ -15161,8 +17653,12 @@ def __init__(self, config): nameLength=len(name), name=name, realname=desc.name, - enabled=("%s_Binding::ConstructorEnabled" % desc.name - if desc.isExposedConditionally() else "nullptr")) + enabled=( + "%s_Binding::ConstructorEnabled" % desc.name + if desc.isExposedConditionally() + else "nullptr" + ), + ) entries.append((name, nativeEntry)) @@ -15170,29 +17666,39 @@ def __init__(self, config): # PerfectHash will assert if we give it an empty set of entries, so we # just generate a dummy value. if len(entries) == 0: - CGGeneric.__init__(self, define=dedent(''' + CGGeneric.__init__( + self, + define=dedent( + """ static_assert(false, "No WebIDL global name entries!"); - ''')) + """ + ), + ) return # Build the perfect hash function. phf = PerfectHash(entries, GLOBAL_NAMES_PHF_SIZE) # Generate code for the PHF - phfCodegen = phf.codegen('WebIDLGlobalNameHash::sEntries', - 'WebIDLNameTableEntry') + phfCodegen = phf.codegen( + "WebIDLGlobalNameHash::sEntries", "WebIDLNameTableEntry" + ) entries = phfCodegen.gen_entries(lambda e: e[1]) getter = phfCodegen.gen_jslinearstr_getter( - name='WebIDLGlobalNameHash::GetEntry', - return_type='const WebIDLNameTableEntry*', - return_entry=dedent(""" + name="WebIDLGlobalNameHash::GetEntry", + return_type="const WebIDLNameTableEntry*", + return_entry=dedent( + """ if (JS_LinearStringEqualsAscii(aKey, sNames + entry.mNameOffset, entry.mNameLength)) { return &entry; } return nullptr; - """)) + """ + ), + ) - define = fill(""" + define = fill( + """ const uint32_t WebIDLGlobalNameHash::sCount = ${count}; const char WebIDLGlobalNameHash::sNames[] = @@ -15206,7 +17712,8 @@ def __init__(self, config): count=len(phf.entries), strings="\n".join(strings) + ";\n", entries=entries, - getter=getter) + getter=getter, + ) CGGeneric.__init__(self, define=define) @@ -15232,11 +17739,11 @@ def dependencySortObjects(objects, dependencyGetter, nameGetter): while len(objects) != 0: # Find the dictionaries that don't depend on anything else # anymore and move them over. - toMove = [o for o in objects if - len(dependencyGetter(o) & objects) == 0] + toMove = [o for o in objects if len(dependencyGetter(o) & objects) == 0] if len(toMove) == 0: - raise TypeError("Loop in dependency graph\n" + - "\n".join(o.location for o in objects)) + raise TypeError( + "Loop in dependency graph\n" + "\n".join(o.location for o in objects) + ) objects = objects - set(toMove) sortedObjects.extend(sorted(toMove, key=nameGetter)) return sortedObjects @@ -15247,6 +17754,7 @@ class ForwardDeclarationBuilder: Create a canonical representation of a set of namespaced forward declarations. """ + def __init__(self): """ The set of declarations is represented as a tree of nested namespaces. @@ -15264,9 +17772,11 @@ def _ensureNonTemplateType(self, type): # forward-declare those, and trying to do it naively is not going to # go well (e.g. we may have :: characters inside the type we're # templated on!). Just bail out. - raise TypeError("Attempt to use ForwardDeclarationBuilder on " - "templated type %s. We don't know how to do that " - "yet." % type) + raise TypeError( + "Attempt to use ForwardDeclarationBuilder on " + "templated type %s. We don't know how to do that " + "yet." % type + ) def _listAdd(self, namespaces, name, isStruct=False): """ @@ -15277,7 +17787,7 @@ def _listAdd(self, namespaces, name, isStruct=False): child = self.children.setdefault(namespaces[0], ForwardDeclarationBuilder()) child._listAdd(namespaces[1:], name, isStruct) else: - assert '::' not in name + assert "::" not in name self.decls.add((name, isStruct)) def addInMozillaDom(self, name, isStruct=False): @@ -15285,7 +17795,7 @@ def addInMozillaDom(self, name, isStruct=False): Add a forward declaration to the mozilla::dom:: namespace. |name| should not contain any other namespaces. """ - self._ensureNonTemplateType(name); + self._ensureNonTemplateType(name) self._listAdd(["mozilla", "dom"], name, isStruct) def add(self, nativeType, isStruct=False): @@ -15293,8 +17803,8 @@ def add(self, nativeType, isStruct=False): Add a forward declaration, where |nativeType| is a string containing the type and its namespaces, in the usual C++ way. """ - self._ensureNonTemplateType(nativeType); - components = nativeType.split('::') + self._ensureNonTemplateType(nativeType) + components = nativeType.split("::") self._listAdd(components[:-1], components[-1], isStruct) def _build(self, atTopLevel): @@ -15303,14 +17813,22 @@ def _build(self, atTopLevel): """ decls = [] if self.decls: - decls.append(CGList([CGClassForwardDeclare(cname, isStruct) - for cname, isStruct in sorted(self.decls)])) + decls.append( + CGList( + [ + CGClassForwardDeclare(cname, isStruct) + for cname, isStruct in sorted(self.decls) + ] + ) + ) for namespace, child in sorted(six.iteritems(self.children)): - decls.append(CGNamespace(namespace, child._build(atTopLevel=False), declareOnly=True)) + decls.append( + CGNamespace(namespace, child._build(atTopLevel=False), declareOnly=True) + ) cg = CGList(decls, "\n") if not atTopLevel and len(decls) + len(self.decls) > 1: - cg = CGWrapper(cg, pre='\n', post='\n') + cg = CGWrapper(cg, pre="\n", post="\n") return cg def build(self): @@ -15354,8 +17872,16 @@ class CGForwardDeclarations(CGWrapper): boolean. If the boolean is true we will declare a struct, otherwise we'll declare a class. """ - def __init__(self, config, descriptors, callbacks, - dictionaries, callbackInterfaces, additionalDeclarations=[]): + + def __init__( + self, + config, + descriptors, + callbacks, + dictionaries, + callbackInterfaces, + additionalDeclarations=[], + ): builder = ForwardDeclarationBuilder() # Needed for at least Wrap. @@ -15375,11 +17901,13 @@ def __init__(self, config, descriptors, callbacks, # declared in the header. if d.interface.maplikeOrSetlikeOrIterable: if d.interface.maplikeOrSetlikeOrIterable.hasKeyType(): - builder.forwardDeclareForType(d.interface.maplikeOrSetlikeOrIterable.keyType, - config) + builder.forwardDeclareForType( + d.interface.maplikeOrSetlikeOrIterable.keyType, config + ) if d.interface.maplikeOrSetlikeOrIterable.hasValueType(): - builder.forwardDeclareForType(d.interface.maplikeOrSetlikeOrIterable.valueType, - config) + builder.forwardDeclareForType( + d.interface.maplikeOrSetlikeOrIterable.valueType, config + ) # We just about always need NativePropertyHooks builder.addInMozillaDom("NativePropertyHooks", isStruct=True) @@ -15423,26 +17951,33 @@ class CGBindingRoot(CGThing): Root codegen class for binding generation. Instantiate the class, and call declare or define to generate header or cpp code (respectively). """ + def __init__(self, config, prefix, webIDLFile): - bindingHeaders = dict.fromkeys(( - 'mozilla/dom/NonRefcountedDOMObject.h', - ), - True) - bindingDeclareHeaders = dict.fromkeys(( - 'mozilla/dom/BindingDeclarations.h', - 'mozilla/dom/Nullable.h', - 'mozilla/ErrorResult.h', - 'GeckoProfiler.h' + bindingHeaders = dict.fromkeys(("mozilla/dom/NonRefcountedDOMObject.h",), True) + bindingDeclareHeaders = dict.fromkeys( + ( + "mozilla/dom/BindingDeclarations.h", + "mozilla/dom/Nullable.h", + "mozilla/ErrorResult.h", + "GeckoProfiler.h", ), - True) + True, + ) - descriptors = config.getDescriptors(webIDLFile=webIDLFile, - hasInterfaceOrInterfacePrototypeObject=True) + descriptors = config.getDescriptors( + webIDLFile=webIDLFile, hasInterfaceOrInterfacePrototypeObject=True + ) unionTypes = UnionsForFile(config, webIDLFile) - (unionHeaders, unionImplheaders, unionDeclarations, traverseMethods, - unlinkMethods, unionStructs) = UnionTypes(unionTypes, config) + ( + unionHeaders, + unionImplheaders, + unionDeclarations, + traverseMethods, + unlinkMethods, + unionStructs, + ) = UnionTypes(unionTypes, config) bindingDeclareHeaders.update(dict.fromkeys(unionHeaders, True)) bindingHeaders.update(dict.fromkeys(unionImplheaders, True)) @@ -15451,33 +17986,47 @@ def __init__(self, config, prefix, webIDLFile): # BindingUtils.h is only needed for SetToObject. # If it stops being inlined or stops calling CallerSubsumes # both this bit and the bit in UnionTypes can be removed. - bindingDeclareHeaders["mozilla/dom/BindingUtils.h"] = any(d.isObject() for t in unionTypes - for d in t.flatMemberTypes) - bindingDeclareHeaders["mozilla/dom/IterableIterator.h"] = any(d.interface.isIteratorInterface() or - d.interface.isIterable() for d in descriptors) + bindingDeclareHeaders["mozilla/dom/BindingUtils.h"] = any( + d.isObject() for t in unionTypes for d in t.flatMemberTypes + ) + bindingDeclareHeaders["mozilla/dom/IterableIterator.h"] = any( + d.interface.isIteratorInterface() or d.interface.isIterable() + for d in descriptors + ) def descriptorHasCrossOriginProperties(desc): def hasCrossOriginProperty(m): props = memberProperties(m, desc) - return (props.isCrossOriginMethod or - props.isCrossOriginGetter or - props.isCrossOriginSetter) + return ( + props.isCrossOriginMethod + or props.isCrossOriginGetter + or props.isCrossOriginSetter + ) return any(hasCrossOriginProperty(m) for m in desc.interface.members) - bindingDeclareHeaders["mozilla/dom/RemoteObjectProxy.h"] = any(descriptorHasCrossOriginProperties(d) for d in descriptors) - bindingDeclareHeaders["jsapi.h"] = any(descriptorHasCrossOriginProperties(d) for d in descriptors) + bindingDeclareHeaders["mozilla/dom/RemoteObjectProxy.h"] = any( + descriptorHasCrossOriginProperties(d) for d in descriptors + ) + bindingDeclareHeaders["jsapi.h"] = any( + descriptorHasCrossOriginProperties(d) for d in descriptors + ) bindingDeclareHeaders["jspubtd.h"] = not bindingDeclareHeaders["jsapi.h"] bindingDeclareHeaders["js/RootingAPI.h"] = not bindingDeclareHeaders["jsapi.h"] def descriptorHasIteratorAlias(desc): def hasIteratorAlias(m): return m.isMethod() and "@@iterator" in m.aliases + return any(hasIteratorAlias(m) for m in desc.interface.members) - bindingHeaders["js/Symbol.h"] = any(descriptorHasIteratorAlias(d) for d in descriptors) + bindingHeaders["js/Symbol.h"] = any( + descriptorHasIteratorAlias(d) for d in descriptors + ) - bindingHeaders["js/shadow/Object.h"] = any(d.interface.hasMembersInSlots() for d in descriptors) + bindingHeaders["js/shadow/Object.h"] = any( + d.interface.hasMembersInSlots() for d in descriptors + ) # The symbols supplied by this header are used so ubiquitously it's not # worth the effort delineating the exact dependency, if it can't be done @@ -15491,19 +18040,25 @@ def hasIteratorAlias(m): def descriptorDeprecated(desc): iface = desc.interface - return any(m.getExtendedAttribute("Deprecated") for m in iface.members + [iface]) + return any( + m.getExtendedAttribute("Deprecated") for m in iface.members + [iface] + ) bindingHeaders["mozilla/dom/Document.h"] = any( - descriptorDeprecated(d) for d in descriptors) + descriptorDeprecated(d) for d in descriptors + ) bindingHeaders["mozilla/dom/DOMJSProxyHandler.h"] = any( - d.concrete and d.proxy for d in descriptors) + d.concrete and d.proxy for d in descriptors + ) bindingHeaders["js/String.h"] = any( - d.needsMissingPropUseCounters for d in descriptors) + d.needsMissingPropUseCounters for d in descriptors + ) hasCrossOriginObjects = any( - d.concrete and d.isMaybeCrossOriginObject() for d in descriptors) + d.concrete and d.isMaybeCrossOriginObject() for d in descriptors + ) bindingHeaders["mozilla/dom/MaybeCrossOriginObject.h"] = hasCrossOriginObjects bindingHeaders["AccessCheck.h"] = hasCrossOriginObjects hasCEReactions = any(d.hasCEReactions() for d in descriptors) @@ -15513,19 +18068,27 @@ def descriptorDeprecated(desc): def descriptorHasChromeOnly(desc): ctor = desc.interface.ctor() - return (any(isChromeOnly(a) or needsContainsHack(a) or - needsCallerType(a) - for a in desc.interface.members) or - desc.interface.getExtendedAttribute("ChromeOnly") is not None or - # JS-implemented interfaces with an interface object get a - # chromeonly _create method. And interfaces with an - # interface object might have a ChromeOnly constructor. - (desc.interface.hasInterfaceObject() and - (desc.interface.isJSImplemented() or - (ctor and isChromeOnly(ctor))))) + return ( + any( + isChromeOnly(a) or needsContainsHack(a) or needsCallerType(a) + for a in desc.interface.members + ) + or desc.interface.getExtendedAttribute("ChromeOnly") is not None + or + # JS-implemented interfaces with an interface object get a + # chromeonly _create method. And interfaces with an + # interface object might have a ChromeOnly constructor. + ( + desc.interface.hasInterfaceObject() + and ( + desc.interface.isJSImplemented() + or (ctor and isChromeOnly(ctor)) + ) + ) + ) # XXXkhuey ugly hack but this is going away soon. - bindingHeaders['xpcprivate.h'] = webIDLFile.endswith("EventTarget.webidl") + bindingHeaders["xpcprivate.h"] = webIDLFile.endswith("EventTarget.webidl") hasThreadChecks = any(d.hasThreadChecks() for d in descriptors) bindingHeaders["nsThreadUtils.h"] = hasThreadChecks @@ -15534,24 +18097,30 @@ def descriptorHasChromeOnly(desc): def dictionaryHasChromeOnly(dictionary): while dictionary: - if (any(isChromeOnly(m) for m in dictionary.members)): + if any(isChromeOnly(m) for m in dictionary.members): return True dictionary = dictionary.parent return False def needsNonSystemPrincipal(member): return ( - member.getExtendedAttribute("NeedsSubjectPrincipal") == ["NonSystem"] or - member.getExtendedAttribute("SetterNeedsSubjectPrincipal") == ["NonSystem"] or - member.getExtendedAttribute("GetterNeedsSubjectPrincipal") == ["NonSystem"]) + member.getExtendedAttribute("NeedsSubjectPrincipal") == ["NonSystem"] + or member.getExtendedAttribute("SetterNeedsSubjectPrincipal") + == ["NonSystem"] + or member.getExtendedAttribute("GetterNeedsSubjectPrincipal") + == ["NonSystem"] + ) def descriptorNeedsNonSystemPrincipal(d): return any(needsNonSystemPrincipal(m) for m in d.interface.members) def descriptorHasPrefDisabler(desc): iface = desc.interface - return any(PropertyDefiner.getControllingCondition(m, desc).hasDisablers() - for m in iface.members if (m.isMethod() or m.isAttr() or m.isConst())) + return any( + PropertyDefiner.getControllingCondition(m, desc).hasDisablers() + for m in iface.members + if (m.isMethod() or m.isAttr() or m.isConst()) + ) def addPrefHeaderForObject(bindingHeaders, obj): """ @@ -15576,36 +18145,51 @@ def addPrefHeadersForDictionary(bindingHeaders, dictionary): addPrefHeaderForObject(bindingHeaders, interface.ctor()) bindingHeaders["mozilla/dom/WebIDLPrefs.h"] = any( - descriptorHasPrefDisabler(d) for d in descriptors) + descriptorHasPrefDisabler(d) for d in descriptors + ) bindingHeaders["nsContentUtils.h"] = ( - any(descriptorHasChromeOnly(d) for d in descriptors) or - any(descriptorNeedsNonSystemPrincipal(d) for d in descriptors) or - any(dictionaryHasChromeOnly(d) for d in dictionaries)) - hasNonEmptyDictionaries = any( - len(dict.members) > 0 for dict in dictionaries) + any(descriptorHasChromeOnly(d) for d in descriptors) + or any(descriptorNeedsNonSystemPrincipal(d) for d in descriptors) + or any(dictionaryHasChromeOnly(d) for d in dictionaries) + ) + hasNonEmptyDictionaries = any(len(dict.members) > 0 for dict in dictionaries) callbacks = config.getCallbacks(webIDLFile) - callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile, - isCallback=True) - jsImplemented = config.getDescriptors(webIDLFile=webIDLFile, - isJSImplemented=True) + callbackDescriptors = config.getDescriptors( + webIDLFile=webIDLFile, isCallback=True + ) + jsImplemented = config.getDescriptors( + webIDLFile=webIDLFile, isJSImplemented=True + ) bindingDeclareHeaders["nsWeakReference.h"] = jsImplemented bindingDeclareHeaders["mozilla/dom/PrototypeList.h"] = descriptors bindingHeaders["nsIGlobalObject.h"] = jsImplemented - bindingHeaders["AtomList.h"] = hasNonEmptyDictionaries or jsImplemented or callbackDescriptors + bindingHeaders["AtomList.h"] = ( + hasNonEmptyDictionaries or jsImplemented or callbackDescriptors + ) def descriptorClearsPropsInSlots(descriptor): if not descriptor.wrapperCache: return False - return any(m.isAttr() and m.getExtendedAttribute("StoreInSlot") - for m in descriptor.interface.members) - bindingHeaders["nsJSUtils.h"] = any(descriptorClearsPropsInSlots(d) for d in descriptors) + return any( + m.isAttr() and m.getExtendedAttribute("StoreInSlot") + for m in descriptor.interface.members + ) + + bindingHeaders["nsJSUtils.h"] = any( + descriptorClearsPropsInSlots(d) for d in descriptors + ) # Make sure we can sanely use binding_detail in generated code. - cgthings = [CGGeneric(dedent( - """ + cgthings = [ + CGGeneric( + dedent( + """ namespace binding_detail {}; // Just to make sure it's known as a namespace using namespace mozilla::dom::binding_detail; - """))] + """ + ) + ) + ] # Do codegen for all the enums enums = config.getEnums(webIDLFile) @@ -15614,13 +18198,11 @@ def descriptorClearsPropsInSlots(descriptor): bindingDeclareHeaders["mozilla/Span.h"] = enums bindingDeclareHeaders["mozilla/ArrayUtils.h"] = enums - hasCode = (descriptors or callbackDescriptors or dictionaries or - callbacks) + hasCode = descriptors or callbackDescriptors or dictionaries or callbacks bindingHeaders["mozilla/dom/BindingUtils.h"] = hasCode bindingHeaders["mozilla/OwningNonNull.h"] = hasCode bindingHeaders[""] = hasCode - bindingHeaders["mozilla/dom/BindingDeclarations.h"] = ( - not hasCode and enums) + bindingHeaders["mozilla/dom/BindingDeclarations.h"] = not hasCode and enums bindingHeaders["WrapperFactory.h"] = descriptors bindingHeaders["mozilla/dom/DOMJSClass.h"] = descriptors @@ -15635,26 +18217,33 @@ def descriptorClearsPropsInSlots(descriptor): descriptorsHaveUseCounters = any( m.getExtendedAttribute("UseCounter") for d in descriptors - for m in d.interface.members) + for m in d.interface.members + ) descriptorsHaveInstrumentedProps = any( - d.instrumentedProps for d in descriptors if d.concrete) + d.instrumentedProps for d in descriptors if d.concrete + ) bindingHeaders["mozilla/UseCounter.h"] = ( - descriptorsHaveUseCounters or descriptorsHaveInstrumentedProps) + descriptorsHaveUseCounters or descriptorsHaveInstrumentedProps + ) # Make sure to not overwrite existing pref header bits! bindingHeaders[prefHeader(MISSING_PROP_PREF)] = ( - bindingHeaders.get(prefHeader(MISSING_PROP_PREF)) or - descriptorsHaveInstrumentedProps) + bindingHeaders.get(prefHeader(MISSING_PROP_PREF)) + or descriptorsHaveInstrumentedProps + ) bindingHeaders["mozilla/dom/SimpleGlobalObject.h"] = any( - CGDictionary.dictionarySafeToJSONify(d) for d in dictionaries) + CGDictionary.dictionarySafeToJSONify(d) for d in dictionaries + ) bindingHeaders["XrayWrapper.h"] = any( - d.wantsXrays and d.wantsXrayExpandoClass for d in descriptors) + d.wantsXrays and d.wantsXrayExpandoClass for d in descriptors + ) bindingHeaders["mozilla/dom/XrayExpandoClass.h"] = any( - d.wantsXrays for d in descriptors) + d.wantsXrays for d in descriptors + ) bindingHeaders["mozilla/dom/StructuredCloneTags.h"] = any( - d.interface.isSerializable() for d in descriptors) - bindingHeaders["mozilla/Atomics.h"] = any( - d.wantsXrays for d in descriptors) + d.interface.isSerializable() for d in descriptors + ) + bindingHeaders["mozilla/Atomics.h"] = any(d.wantsXrays for d in descriptors) for ancestor in (findAncestorWithInstrumentedProps(d) for d in descriptors): if not ancestor: @@ -15700,7 +18289,9 @@ def getName(unionTypeOrDictionary): assert unionTypeOrDictionary.isType() and unionTypeOrDictionary.isUnion() return unionTypeOrDictionary.name - for t in dependencySortObjects(dictionaries + unionStructs, getDependencies, getName): + for t in dependencySortObjects( + dictionaries + unionStructs, getDependencies, getName + ): if t.isDictionary(): cgthings.append(CGDictionary(t, config)) else: @@ -15711,8 +18302,9 @@ def getName(unionTypeOrDictionary): # Do codegen for all the callbacks. cgthings.extend(CGCallbackFunction(c, config) for c in callbacks) - cgthings.extend([CGNamespace('binding_detail', CGFastCallback(c)) - for c in callbacks]) + cgthings.extend( + [CGNamespace("binding_detail", CGFastCallback(c)) for c in callbacks] + ) # Do codegen for all the descriptors cgthings.extend([CGDescriptor(x) for x in descriptors]) @@ -15720,54 +18312,70 @@ def getName(unionTypeOrDictionary): # Do codegen for all the callback interfaces. cgthings.extend([CGCallbackInterface(x) for x in callbackDescriptors]) - cgthings.extend([CGNamespace('binding_detail', - CGFastCallback(x.interface)) - for x in callbackDescriptors]) + cgthings.extend( + [ + CGNamespace("binding_detail", CGFastCallback(x.interface)) + for x in callbackDescriptors + ] + ) # Do codegen for JS implemented classes def getParentDescriptor(desc): if not desc.interface.parent: return set() return {desc.getDescriptor(desc.interface.parent.identifier.name)} - for x in dependencySortObjects(jsImplemented, getParentDescriptor, - lambda d: d.interface.identifier.name): - cgthings.append(CGCallbackInterface(x, spiderMonkeyInterfacesAreStructs=True)) + + for x in dependencySortObjects( + jsImplemented, getParentDescriptor, lambda d: d.interface.identifier.name + ): + cgthings.append( + CGCallbackInterface(x, spiderMonkeyInterfacesAreStructs=True) + ) cgthings.append(CGJSImplClass(x)) # And make sure we have the right number of newlines at the end curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n") # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], - CGWrapper(curr, pre="\n")) + curr = CGNamespace.build(["mozilla", "dom"], CGWrapper(curr, pre="\n")) - curr = CGList([CGForwardDeclarations(config, descriptors, - callbacks, - dictionaries, - callbackDescriptors + jsImplemented, - additionalDeclarations=unionDeclarations), - curr], - "\n") + curr = CGList( + [ + CGForwardDeclarations( + config, + descriptors, + callbacks, + dictionaries, + callbackDescriptors + jsImplemented, + additionalDeclarations=unionDeclarations, + ), + curr, + ], + "\n", + ) # Add header includes. - bindingHeaders = [header - for header, include in six.iteritems(bindingHeaders) - if include] + bindingHeaders = [ + header for header, include in six.iteritems(bindingHeaders) if include + ] bindingDeclareHeaders = [ - header for header, include in six.iteritems(bindingDeclareHeaders) + header + for header, include in six.iteritems(bindingDeclareHeaders) if include ] - curr = CGHeaders(descriptors, - dictionaries, - callbacks, - callbackDescriptors, - bindingDeclareHeaders, - bindingHeaders, - prefix, - curr, - config, - jsImplemented) + curr = CGHeaders( + descriptors, + dictionaries, + callbacks, + callbackDescriptors, + bindingDeclareHeaders, + bindingHeaders, + prefix, + curr, + config, + jsImplemented, + ) # Add include guards. curr = CGIncludeGuard(prefix, curr) @@ -15775,8 +18383,10 @@ def getParentDescriptor(desc): # Add the auto-generated comment. curr = CGWrapper( curr, - pre=(AUTOGENERATED_WITH_SOURCE_WARNING_COMMENT % - os.path.basename(webIDLFile))) + pre=( + AUTOGENERATED_WITH_SOURCE_WARNING_COMMENT % os.path.basename(webIDLFile) + ), + ) # Store the final result. self.root = curr @@ -15792,11 +18402,23 @@ def deps(self): class CGNativeMember(ClassMethod): - def __init__(self, descriptorProvider, member, name, signature, extendedAttrs, - breakAfter=True, passJSBitsAsNeeded=True, visibility="public", - spiderMonkeyInterfacesAreStructs=True, - variadicIsSequence=False, resultNotAddRefed=False, - virtual=False, override=False, canRunScript=False): + def __init__( + self, + descriptorProvider, + member, + name, + signature, + extendedAttrs, + breakAfter=True, + passJSBitsAsNeeded=True, + visibility="public", + spiderMonkeyInterfacesAreStructs=True, + variadicIsSequence=False, + resultNotAddRefed=False, + virtual=False, + override=False, + canRunScript=False, + ): """ If spiderMonkeyInterfacesAreStructs is false, SpiderMonkey interfaces will be passed as JS::Handle. If it's true they will be @@ -15814,20 +18436,24 @@ def __init__(self, descriptorProvider, member, name, signature, extendedAttrs, self.spiderMonkeyInterfacesAreStructs = spiderMonkeyInterfacesAreStructs self.variadicIsSequence = variadicIsSequence breakAfterSelf = "\n" if breakAfter else "" - ClassMethod.__init__(self, name, - self.getReturnType(signature[0], False), - self.getArgs(signature[0], signature[1]), - static=member.isStatic(), - # Mark our getters, which are attrs that - # have a non-void return type, as const. - const=(not member.isStatic() and member.isAttr() and - not signature[0].isVoid()), - breakAfterReturnDecl=" ", - breakAfterSelf=breakAfterSelf, - visibility=visibility, - virtual=virtual, - override=override, - canRunScript=canRunScript) + ClassMethod.__init__( + self, + name, + self.getReturnType(signature[0], False), + self.getArgs(signature[0], signature[1]), + static=member.isStatic(), + # Mark our getters, which are attrs that + # have a non-void return type, as const. + const=( + not member.isStatic() and member.isAttr() and not signature[0].isVoid() + ), + breakAfterReturnDecl=" ", + breakAfterSelf=breakAfterSelf, + visibility=visibility, + virtual=virtual, + override=override, + canRunScript=canRunScript, + ) def getReturnType(self, type, isMember): return self.getRetvalInfo(type, isMember)[0] @@ -15856,9 +18482,11 @@ def getRetvalInfo(self, type, isMember): if type.nullable(): result = CGTemplatedType("Nullable", result) defaultReturnArg = "" - return (result.define(), - "%s(%s)" % (result.define(), defaultReturnArg), - "return ${declName};\n") + return ( + result.define(), + "%s(%s)" % (result.define(), defaultReturnArg), + "return ${declName};\n", + ) if type.isJSString(): if isMember: raise TypeError("JSString not supported as return type member") @@ -15879,8 +18507,7 @@ def getRetvalInfo(self, type, isMember): if type.isEnum(): enumName = type.unroll().inner.identifier.name if type.nullable(): - enumName = CGTemplatedType("Nullable", - CGGeneric(enumName)).define() + enumName = CGTemplatedType("Nullable", CGGeneric(enumName)).define() defaultValue = "%s()" % enumName else: defaultValue = "%s(0)" % enumName @@ -15888,8 +18515,11 @@ def getRetvalInfo(self, type, isMember): if type.isGeckoInterface() or type.isPromise(): if type.isGeckoInterface(): iface = type.unroll().inner - result = CGGeneric(self.descriptorProvider.getDescriptor( - iface.identifier.name).prettyNativeType) + result = CGGeneric( + self.descriptorProvider.getDescriptor( + iface.identifier.name + ).prettyNativeType + ) else: result = CGGeneric("Promise") if self.resultAlreadyAddRefed: @@ -15901,9 +18531,7 @@ def getRetvalInfo(self, type, isMember): warning = "" else: warning = "// Return a raw pointer here to avoid refcounting, but make sure it's safe (the object should be kept alive by the callee).\n" - result = CGWrapper(result, - pre=("%s%s<" % (warning, holder)), - post=">") + result = CGWrapper(result, pre=("%s%s<" % (warning, holder)), post=">") else: result = CGWrapper(result, post="*") # Since we always force an owning type for callback return values, @@ -15911,8 +18539,11 @@ def getRetvalInfo(self, type, isMember): # .forget() to get our already_AddRefed. return result.define(), "nullptr", "return ${declName}.forget();\n" if type.isCallback(): - return ("already_AddRefed<%s>" % type.unroll().callback.identifier.name, - "nullptr", "return ${declName}.forget();\n") + return ( + "already_AddRefed<%s>" % type.unroll().callback.identifier.name, + "nullptr", + "return ${declName}.forget();\n", + ) if type.isAny(): if isMember: # No need for a third element in the isMember case @@ -15930,7 +18561,9 @@ def getRetvalInfo(self, type, isMember): # No need for a third element in the isMember case return "JSObject*", None, None if type.nullable(): - returnCode = "${declName}.IsNull() ? nullptr : ${declName}.Value().Obj()" + returnCode = ( + "${declName}.IsNull() ? nullptr : ${declName}.Value().Obj()" + ) else: returnCode = "${declName}.Obj()" return "void", "", "aRetVal.set(%s);\n" % returnCode @@ -15941,13 +18574,15 @@ def getRetvalInfo(self, type, isMember): assert not isMember # Outparam. if type.nullable(): - returnCode = dedent(""" + returnCode = dedent( + """ if (${declName}.IsNull()) { aRetVal.SetNull(); } else { aRetVal.SetValue() = std::move(${declName}.Value()); } - """) + """ + ) else: returnCode = "aRetVal = std::move(${declName});\n" return "void", "", returnCode @@ -15975,8 +18610,7 @@ def getRetvalInfo(self, type, isMember): # In this case we convert directly into our outparam to start with return "void", "", "" - raise TypeError("Don't know how to declare return value for %s" % - type) + raise TypeError("Don't know how to declare return value for %s" % type) def getArgs(self, returnType, argList): args = [self.getArg(arg) for arg in argList] @@ -16003,8 +18637,9 @@ def getArgs(self, returnType, argList): returnType = returnType.inner # And now the actual underlying type elementDecl = self.getReturnType(returnType.inner, True) - type = CGTemplatedType("Record", [recordKeyDeclType(returnType), - CGGeneric(elementDecl)]) + type = CGTemplatedType( + "Record", [recordKeyDeclType(returnType), CGGeneric(elementDecl)] + ) if nullable: type = CGTemplatedType("Nullable", type) args.append(Argument("%s&" % type.define(), "aRetVal")) @@ -16017,20 +18652,22 @@ def getArgs(self, returnType, argList): dictType = CGTemplatedType("Nullable", dictType) args.append(Argument("%s&" % dictType.define(), "aRetVal")) elif returnType.isUnion(): - args.append(Argument("%s&" % - CGUnionStruct.unionTypeDecl(returnType, True), - "aRetVal")) + args.append( + Argument( + "%s&" % CGUnionStruct.unionTypeDecl(returnType, True), "aRetVal" + ) + ) elif returnType.isAny(): args.append(Argument("JS::MutableHandle", "aRetVal")) elif returnType.isObject() or returnType.isSpiderMonkeyInterface(): args.append(Argument("JS::MutableHandle", "aRetVal")) # And the nsIPrincipal - if 'needsSubjectPrincipal' in self.extendedAttrs: + if "needsSubjectPrincipal" in self.extendedAttrs: # Cheat and assume self.descriptorProvider is a descriptor if self.descriptorProvider.interface.isExposedInAnyWorker(): args.append(Argument("Maybe", "aSubjectPrincipal")) - elif 'needsNonSystemSubjectPrincipal' in self.extendedAttrs: + elif "needsNonSystemSubjectPrincipal" in self.extendedAttrs: args.append(Argument("nsIPrincipal*", "aPrincipal")) else: args.append(Argument("nsIPrincipal&", "aPrincipal")) @@ -16038,10 +18675,10 @@ def getArgs(self, returnType, argList): if needsCallerType(self.member): args.append(Argument("CallerType", "aCallerType")) # And the ErrorResult or OOMReporter - if 'infallible' not in self.extendedAttrs: + if "infallible" not in self.extendedAttrs: # Use aRv so it won't conflict with local vars named "rv" args.append(Argument("ErrorResult&", "aRv")) - elif 'canOOM' in self.extendedAttrs: + elif "canOOM" in self.extendedAttrs: args.append(Argument("OOMReporter&", "aRv")) # The legacycaller thisval @@ -16050,13 +18687,22 @@ def getArgs(self, returnType, argList): assert self.member.isIdentifierLess() args.insert(0, Argument("const JS::Value&", "aThisVal")) # And jscontext bits. - if needCx(returnType, argList, self.extendedAttrs, - self.passJSBitsAsNeeded, self.member.isStatic()): + if needCx( + returnType, + argList, + self.extendedAttrs, + self.passJSBitsAsNeeded, + self.member.isStatic(), + ): args.insert(0, Argument("JSContext*", "cx")) - if needScopeObject(returnType, argList, self.extendedAttrs, - self.descriptorProvider.wrapperCache, - self.passJSBitsAsNeeded, - self.member.getExtendedAttribute("StoreInSlot")): + if needScopeObject( + returnType, + argList, + self.extendedAttrs, + self.descriptorProvider.wrapperCache, + self.passJSBitsAsNeeded, + self.member.getExtendedAttribute("StoreInSlot"), + ): args.insert(1, Argument("JS::Handle", "obj")) # And if we're static, a global if self.member.isStatic(): @@ -16109,7 +18755,7 @@ def doGetArgType(self, type, optional, isMember): return "WindowProxyHolder", True, False argIsPointer = type.nullable() or iface.isExternal() - forceOwningType = (iface.isCallback() or isMember) + forceOwningType = iface.isCallback() or isMember if argIsPointer: if (optional or isMember) and forceOwningType: typeDecl = "RefPtr<%s>" @@ -16123,9 +18769,16 @@ def doGetArgType(self, type, optional, isMember): typeDecl = "NonNull<%s>" else: typeDecl = "%s&" - return ((typeDecl % - self.descriptorProvider.getDescriptor(iface.identifier.name).prettyNativeType), - False, False) + return ( + ( + typeDecl + % self.descriptorProvider.getDescriptor( + iface.identifier.name + ).prettyNativeType + ), + False, + False, + ) if type.isSpiderMonkeyInterface(): if not self.spiderMonkeyInterfacesAreStructs: @@ -16226,8 +18879,9 @@ def getArg(self, arg): """ Get the full argument declaration for an argument """ - decl, ref = self.getArgType(arg.type, arg.canHaveMissingValue(), - "Variadic" if arg.variadic else False) + decl, ref = self.getArgType( + arg.type, arg.canHaveMissingValue(), "Variadic" if arg.variadic else False + ) if ref: decl = CGWrapper(decl, pre="const ", post="&") @@ -16239,13 +18893,16 @@ def arguments(self): class CGExampleMethod(CGNativeMember): def __init__(self, descriptor, method, signature, isConstructor, breakAfter=True): - CGNativeMember.__init__(self, descriptor, method, - CGSpecializedMethod.makeNativeName(descriptor, - method), - signature, - descriptor.getExtendedAttributes(method), - breakAfter=breakAfter, - variadicIsSequence=True) + CGNativeMember.__init__( + self, + descriptor, + method, + CGSpecializedMethod.makeNativeName(descriptor, method), + signature, + descriptor.getExtendedAttributes(method), + breakAfter=breakAfter, + variadicIsSequence=True, + ) def declare(self, cgClass): assert self.member.isMethod() @@ -16254,21 +18911,23 @@ def declare(self, cgClass): # machinery, so the implementor of the interface doesn't have to worry # about it. if self.member.isMaplikeOrSetlikeOrIterableMethod(): - return '' - return CGNativeMember.declare(self, cgClass); + return "" + return CGNativeMember.declare(self, cgClass) def define(self, cgClass): - return '' + return "" class CGExampleGetter(CGNativeMember): def __init__(self, descriptor, attr): - CGNativeMember.__init__(self, descriptor, attr, - CGSpecializedGetter.makeNativeName(descriptor, - attr), - (attr.type, []), - descriptor.getExtendedAttributes(attr, - getter=True)) + CGNativeMember.__init__( + self, + descriptor, + attr, + CGSpecializedGetter.makeNativeName(descriptor, attr), + (attr.type, []), + descriptor.getExtendedAttributes(attr, getter=True), + ) def declare(self, cgClass): assert self.member.isAttr() @@ -16277,32 +18936,43 @@ def declare(self, cgClass): # binding machinery, so the implementor of the interface doesn't have to # worry about it. if self.member.isMaplikeOrSetlikeAttr(): - return '' - return CGNativeMember.declare(self, cgClass); + return "" + return CGNativeMember.declare(self, cgClass) def define(self, cgClass): - return '' + return "" class CGExampleSetter(CGNativeMember): def __init__(self, descriptor, attr): - CGNativeMember.__init__(self, descriptor, attr, - CGSpecializedSetter.makeNativeName(descriptor, - attr), - (BuiltinTypes[IDLBuiltinType.Types.void], - [FakeArgument(attr.type, attr)]), - descriptor.getExtendedAttributes(attr, - setter=True)) + CGNativeMember.__init__( + self, + descriptor, + attr, + CGSpecializedSetter.makeNativeName(descriptor, attr), + (BuiltinTypes[IDLBuiltinType.Types.void], [FakeArgument(attr.type, attr)]), + descriptor.getExtendedAttributes(attr, setter=True), + ) def define(self, cgClass): - return '' + return "" class CGBindingImplClass(CGClass): """ Common codegen for generating a C++ implementation of a WebIDL interface """ - def __init__(self, descriptor, cgMethod, cgGetter, cgSetter, wantGetParent=True, wrapMethodName="WrapObject", skipStaticMethods=False): + + def __init__( + self, + descriptor, + cgMethod, + cgGetter, + cgSetter, + wantGetParent=True, + wrapMethodName="WrapObject", + skipStaticMethods=False, + ): """ cgMethod, cgGetter and cgSetter are classes used to codegen methods, getters and setters. @@ -16319,11 +18989,10 @@ def appendMethod(m, isConstructor=False): for s in sigs[:-1]: # Don't put a blank line after overloads, until we # get to the last one. - self.methodDecls.append(cgMethod(descriptor, m, s, - isConstructor, - breakAfter=False)) - self.methodDecls.append(cgMethod(descriptor, m, sigs[-1], - isConstructor)) + self.methodDecls.append( + cgMethod(descriptor, m, s, isConstructor, breakAfter=False) + ) + self.methodDecls.append(cgMethod(descriptor, m, sigs[-1], isConstructor)) if iface.ctor(): appendMethod(iface.ctor(), isConstructor=True) @@ -16358,8 +19027,11 @@ def appendSpecialOperation(name, op): # This is a total hack. The '&' belongs with the # type, not the name! But it works, and is simpler # than trying to somehow make this pretty. - args.append(FakeArgument(BuiltinTypes[IDLBuiltinType.Types.boolean], - op, name="&found")) + args.append( + FakeArgument( + BuiltinTypes[IDLBuiltinType.Types.boolean], op, name="&found" + ) + ) if name == "Stringifier": if op.isIdentifierLess(): # XXXbz I wish we were consistent about our renaming here. @@ -16375,10 +19047,15 @@ def appendSpecialOperation(name, op): # We already added this method return self.methodDecls.append( - CGNativeMember(descriptor, op, - name, - (returnType, args), - descriptor.getExtendedAttributes(op))) + CGNativeMember( + descriptor, + op, + name, + (returnType, args), + descriptor.getExtendedAttributes(op), + ) + ) + # Sort things by name so we get stable ordering in the output. ops = sorted(descriptor.operations.items(), key=lambda x: x[0]) for name, op in ops: @@ -16389,55 +19066,85 @@ def appendSpecialOperation(name, op): # But we don't need it if we already have an infallible # "length" attribute, which we often do. haveLengthAttr = any( - m for m in iface.members if m.isAttr() and - CGSpecializedGetter.makeNativeName(descriptor, m) == "Length") + m + for m in iface.members + if m.isAttr() + and CGSpecializedGetter.makeNativeName(descriptor, m) == "Length" + ) if not haveLengthAttr: self.methodDecls.append( - CGNativeMember(descriptor, FakeMember(), - "Length", - (BuiltinTypes[IDLBuiltinType.Types.unsigned_long], - []), - {"infallible": True})) + CGNativeMember( + descriptor, + FakeMember(), + "Length", + (BuiltinTypes[IDLBuiltinType.Types.unsigned_long], []), + {"infallible": True}, + ) + ) # And if we support named properties we need to be able to # enumerate the supported names. if descriptor.supportsNamedProperties(): self.methodDecls.append( CGNativeMember( - descriptor, FakeMember(), + descriptor, + FakeMember(), "GetSupportedNames", - (IDLSequenceType(None, - BuiltinTypes[IDLBuiltinType.Types.domstring]), - []), - {"infallible": True})) + ( + IDLSequenceType( + None, BuiltinTypes[IDLBuiltinType.Types.domstring] + ), + [], + ), + {"infallible": True}, + ) + ) if descriptor.concrete: - wrapArgs = [Argument('JSContext*', 'aCx'), - Argument('JS::Handle', 'aGivenProto')] + wrapArgs = [ + Argument("JSContext*", "aCx"), + Argument("JS::Handle", "aGivenProto"), + ] if not descriptor.wrapperCache: wrapReturnType = "bool" - wrapArgs.append(Argument('JS::MutableHandle', - 'aReflector')) + wrapArgs.append(Argument("JS::MutableHandle", "aReflector")) else: wrapReturnType = "JSObject*" - self.methodDecls.insert(0, - ClassMethod(wrapMethodName, wrapReturnType, - wrapArgs, virtual=descriptor.wrapperCache, - breakAfterReturnDecl=" ", - override=descriptor.wrapperCache, - body=self.getWrapObjectBody())) + self.methodDecls.insert( + 0, + ClassMethod( + wrapMethodName, + wrapReturnType, + wrapArgs, + virtual=descriptor.wrapperCache, + breakAfterReturnDecl=" ", + override=descriptor.wrapperCache, + body=self.getWrapObjectBody(), + ), + ) if descriptor.hasCEReactions(): - self.methodDecls.insert(0, - ClassMethod("GetDocGroup", "DocGroup*", [], - const=True, - breakAfterReturnDecl=" ", - body=self.getGetDocGroupBody())) + self.methodDecls.insert( + 0, + ClassMethod( + "GetDocGroup", + "DocGroup*", + [], + const=True, + breakAfterReturnDecl=" ", + body=self.getGetDocGroupBody(), + ), + ) if wantGetParent: - self.methodDecls.insert(0, - ClassMethod("GetParentObject", - self.getGetParentObjectReturnType(), - [], const=True, - breakAfterReturnDecl=" ", - body=self.getGetParentObjectBody())) + self.methodDecls.insert( + 0, + ClassMethod( + "GetParentObject", + self.getGetParentObjectReturnType(), + [], + const=True, + breakAfterReturnDecl=" ", + body=self.getGetParentObjectBody(), + ), + ) # Invoke CGClass.__init__ in any subclasses afterwards to do the actual codegen. @@ -16451,7 +19158,8 @@ def getGetParentObjectReturnType(self): // This should return something that eventually allows finding a // path to the global this object is associated with. Most simply, // returning an actual global works. - nsIGlobalObject*""") + nsIGlobalObject*""" + ) def getGetParentObjectBody(self): return None @@ -16467,20 +19175,33 @@ class CGExampleClass(CGBindingImplClass): """ Codegen for the actual example class implementation for this descriptor """ + def __init__(self, descriptor): - CGBindingImplClass.__init__(self, descriptor, - CGExampleMethod, CGExampleGetter, CGExampleSetter, - wantGetParent=descriptor.wrapperCache) + CGBindingImplClass.__init__( + self, + descriptor, + CGExampleMethod, + CGExampleGetter, + CGExampleSetter, + wantGetParent=descriptor.wrapperCache, + ) self.parentIface = descriptor.interface.parent if self.parentIface: - self.parentDesc = descriptor.getDescriptor( - self.parentIface.identifier.name) + self.parentDesc = descriptor.getDescriptor(self.parentIface.identifier.name) bases = [ClassBase(self.nativeLeafName(self.parentDesc))] else: - bases = [ClassBase("nsISupports /* or NonRefcountedDOMObject if this is a non-refcounted object */")] + bases = [ + ClassBase( + "nsISupports /* or NonRefcountedDOMObject if this is a non-refcounted object */" + ) + ] if descriptor.wrapperCache: - bases.append(ClassBase("nsWrapperCache /* Change wrapperCache in the binding configuration if you don't want this */")) + bases.append( + ClassBase( + "nsWrapperCache /* Change wrapperCache in the binding configuration if you don't want this */" + ) + ) destructorVisibility = "protected" if self.parentIface: @@ -16488,34 +19209,42 @@ def __init__(self, descriptor): "public:\n" " NS_DECL_ISUPPORTS_INHERITED\n" " NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS_INHERITED(%s, %s)\n" - "\n" % (self.nativeLeafName(descriptor), - self.nativeLeafName(self.parentDesc))) + "\n" + % ( + self.nativeLeafName(descriptor), + self.nativeLeafName(self.parentDesc), + ) + ) else: extradeclarations = ( "public:\n" " NS_DECL_CYCLE_COLLECTING_ISUPPORTS\n" " NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(%s)\n" - "\n" % self.nativeLeafName(descriptor)) + "\n" % self.nativeLeafName(descriptor) + ) if descriptor.interface.hasChildInterfaces(): decorators = "" else: decorators = "final" - CGClass.__init__(self, self.nativeLeafName(descriptor), - bases=bases, - constructors=[ClassConstructor([], - visibility="public")], - destructor=ClassDestructor(visibility=destructorVisibility), - methods=self.methodDecls, - decorators=decorators, - extradeclarations=extradeclarations) + CGClass.__init__( + self, + self.nativeLeafName(descriptor), + bases=bases, + constructors=[ClassConstructor([], visibility="public")], + destructor=ClassDestructor(visibility=destructorVisibility), + methods=self.methodDecls, + decorators=decorators, + extradeclarations=extradeclarations, + ) def define(self): # Just override CGClass and do our own thing nativeType = self.nativeLeafName(self.descriptor) - ctordtor = fill(""" + ctordtor = fill( + """ ${nativeType}::${nativeType}() { // Add |MOZ_COUNT_CTOR(${nativeType});| for a non-refcounted object. @@ -16526,10 +19255,12 @@ def define(self): // Add |MOZ_COUNT_DTOR(${nativeType});| for a non-refcounted object. } """, - nativeType=nativeType) + nativeType=nativeType, + ) if self.parentIface: - ccImpl = fill(""" + ccImpl = fill( + """ // Only needed for refcounted objects. # error "If you don't have members that need cycle collection, @@ -16544,9 +19275,11 @@ def define(self): """, nativeType=nativeType, - parentType=self.nativeLeafName(self.parentDesc)) + parentType=self.nativeLeafName(self.parentDesc), + ) else: - ccImpl = fill(""" + ccImpl = fill( + """ // Only needed for refcounted objects. NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_0(${nativeType}) @@ -16558,7 +19291,8 @@ def define(self): NS_INTERFACE_MAP_END """, - nativeType=nativeType) + nativeType=nativeType, + ) classImpl = ccImpl + ctordtor + "\n" if self.descriptor.concrete: @@ -16583,13 +19317,14 @@ def define(self): nativeType=nativeType, reflectorArg=reflectorArg, ifaceName=self.descriptor.name, - reflectorPassArg=reflectorPassArg) + reflectorPassArg=reflectorPassArg, + ) return classImpl @staticmethod def nativeLeafName(descriptor): - return descriptor.nativeType.split('::')[-1] + return descriptor.nativeType.split("::")[-1] class CGExampleRoot(CGThing): @@ -16598,11 +19333,11 @@ class CGExampleRoot(CGThing): class and call declare or define to generate header or cpp code, respectively. """ + def __init__(self, config, interfaceName): descriptor = config.getDescriptor(interfaceName) - self.root = CGWrapper(CGExampleClass(descriptor), - pre="\n", post="\n") + self.root = CGWrapper(CGExampleClass(descriptor), pre="\n", post="\n") self.root = CGNamespace.build(["mozilla", "dom"], self.root) @@ -16625,33 +19360,50 @@ def __init__(self, config, interfaceName): for arg in sig[1]: builder.forwardDeclareForType(arg.type, config) - self.root = CGList([builder.build(), - self.root], "\n") + self.root = CGList([builder.build(), self.root], "\n") # Throw in our #includes - self.root = CGHeaders([], [], [], [], - ["nsWrapperCache.h", - "nsCycleCollectionParticipant.h", - "mozilla/Attributes.h", - "mozilla/ErrorResult.h", - "mozilla/dom/BindingDeclarations.h", - "js/TypeDecls.h"], - ["mozilla/dom/%s.h" % interfaceName, - ("mozilla/dom/%s" % - CGHeaders.getDeclarationFilename(descriptor.interface))], "", self.root) + self.root = CGHeaders( + [], + [], + [], + [], + [ + "nsWrapperCache.h", + "nsCycleCollectionParticipant.h", + "mozilla/Attributes.h", + "mozilla/ErrorResult.h", + "mozilla/dom/BindingDeclarations.h", + "js/TypeDecls.h", + ], + [ + "mozilla/dom/%s.h" % interfaceName, + ( + "mozilla/dom/%s" + % CGHeaders.getDeclarationFilename(descriptor.interface) + ), + ], + "", + self.root, + ) # And now some include guards self.root = CGIncludeGuard(interfaceName, self.root) # And our license block comes before everything else - self.root = CGWrapper(self.root, pre=dedent(""" + self.root = CGWrapper( + self.root, + pre=dedent( + """ /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim:set ts=2 sw=2 sts=2 et cindent: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - """)) + """ + ), + ) def declare(self): return self.root.declare() @@ -16669,17 +19421,35 @@ class CGJSImplMember(CGNativeMember): Base class for generating code for the members of the implementation class for a JS-implemented WebIDL interface. """ - def __init__(self, descriptorProvider, member, name, signature, - extendedAttrs, breakAfter=True, passJSBitsAsNeeded=True, - visibility="public", variadicIsSequence=False, - virtual=False, override=False): - CGNativeMember.__init__(self, descriptorProvider, member, name, - signature, extendedAttrs, breakAfter=breakAfter, - passJSBitsAsNeeded=passJSBitsAsNeeded, - visibility=visibility, - variadicIsSequence=variadicIsSequence, - virtual=virtual, - override=override) + + def __init__( + self, + descriptorProvider, + member, + name, + signature, + extendedAttrs, + breakAfter=True, + passJSBitsAsNeeded=True, + visibility="public", + variadicIsSequence=False, + virtual=False, + override=False, + ): + CGNativeMember.__init__( + self, + descriptorProvider, + member, + name, + signature, + extendedAttrs, + breakAfter=breakAfter, + passJSBitsAsNeeded=passJSBitsAsNeeded, + visibility=visibility, + variadicIsSequence=variadicIsSequence, + virtual=virtual, + override=override, + ) self.body = self.getImpl() def getArgs(self, returnType, argList): @@ -16693,45 +19463,55 @@ class CGJSImplMethod(CGJSImplMember): Class for generating code for the methods for a JS-implemented WebIDL interface. """ + def __init__(self, descriptor, method, signature, isConstructor, breakAfter=True): self.signature = signature self.descriptor = descriptor self.isConstructor = isConstructor - CGJSImplMember.__init__(self, descriptor, method, - CGSpecializedMethod.makeNativeName(descriptor, - method), - signature, - descriptor.getExtendedAttributes(method), - breakAfter=breakAfter, - variadicIsSequence=True, - passJSBitsAsNeeded=False) + CGJSImplMember.__init__( + self, + descriptor, + method, + CGSpecializedMethod.makeNativeName(descriptor, method), + signature, + descriptor.getExtendedAttributes(method), + breakAfter=breakAfter, + variadicIsSequence=True, + passJSBitsAsNeeded=False, + ) def getArgs(self, returnType, argList): if self.isConstructor: # Skip the JS::Compartment bits for constructors; it's handled # manually in getImpl. But we do need our aGivenProto argument. We # allow it to be omitted if the default proto is desired. - return (CGNativeMember.getArgs(self, returnType, argList) + - [Argument("JS::Handle", "aGivenProto", "nullptr")]) + return CGNativeMember.getArgs(self, returnType, argList) + [ + Argument("JS::Handle", "aGivenProto", "nullptr") + ] return CGJSImplMember.getArgs(self, returnType, argList) def getImpl(self): args = self.getArgs(self.signature[0], self.signature[1]) if not self.isConstructor: - return 'return mImpl->%s(%s);\n' % (self.name, ", ".join(arg.name for arg in args)) + return "return mImpl->%s(%s);\n" % ( + self.name, + ", ".join(arg.name for arg in args), + ) assert self.descriptor.interface.isJSImplemented() - if self.name != 'Constructor': - raise TypeError("Named constructors are not supported for JS implemented WebIDL. See bug 851287.") + if self.name != "Constructor": + raise TypeError( + "Named constructors are not supported for JS implemented WebIDL. See bug 851287." + ) if len(self.signature[1]) != 0: # The first two arguments to the constructor implementation are not # arguments to the WebIDL constructor, so don't pass them to # __Init(). The last argument is the prototype we're supposed to # use, and shouldn't get passed to __Init() either. - assert args[0].argType == 'const GlobalObject&' - assert args[1].argType == 'JSContext*' - assert args[-1].argType == 'JS::Handle' - assert args[-1].name == 'aGivenProto' + assert args[0].argType == "const GlobalObject&" + assert args[1].argType == "JSContext*" + assert args[-1].argType == "JS::Handle" + assert args[-1].name == "aGivenProto" constructorArgs = [arg.name for arg in args[2:-1]] constructorArgs.append("js::GetNonCCWObjectRealm(scopeObj)") initCall = fill( @@ -16751,7 +19531,8 @@ def getImpl(self): return nullptr; } """, - args=", ".join(constructorArgs)) + args=", ".join(constructorArgs), + ) else: initCall = "" return fill( @@ -16766,18 +19547,17 @@ def getImpl(self): """, contractId=self.descriptor.interface.getJSImplementation(), implClass=self.descriptor.name, - initCall=initCall) + initCall=initCall, + ) # We're always fallible def callbackGetterName(attr, descriptor): - return "Get" + MakeNativeName( - descriptor.binaryNameFor(attr.identifier.name)) + return "Get" + MakeNativeName(descriptor.binaryNameFor(attr.identifier.name)) def callbackSetterName(attr, descriptor): - return "Set" + MakeNativeName( - descriptor.binaryNameFor(attr.identifier.name)) + return "Set" + MakeNativeName(descriptor.binaryNameFor(attr.identifier.name)) class CGJSImplGetter(CGJSImplMember): @@ -16785,20 +19565,24 @@ class CGJSImplGetter(CGJSImplMember): Class for generating code for the getters of attributes for a JS-implemented WebIDL interface. """ + def __init__(self, descriptor, attr): - CGJSImplMember.__init__(self, descriptor, attr, - CGSpecializedGetter.makeNativeName(descriptor, - attr), - (attr.type, []), - descriptor.getExtendedAttributes(attr, - getter=True), - passJSBitsAsNeeded=False) + CGJSImplMember.__init__( + self, + descriptor, + attr, + CGSpecializedGetter.makeNativeName(descriptor, attr), + (attr.type, []), + descriptor.getExtendedAttributes(attr, getter=True), + passJSBitsAsNeeded=False, + ) def getImpl(self): callbackArgs = [arg.name for arg in self.getArgs(self.member.type, [])] - return 'return mImpl->%s(%s);\n' % ( + return "return mImpl->%s(%s);\n" % ( callbackGetterName(self.member, self.descriptorProvider), - ", ".join(callbackArgs)) + ", ".join(callbackArgs), + ) class CGJSImplSetter(CGJSImplMember): @@ -16806,39 +19590,59 @@ class CGJSImplSetter(CGJSImplMember): Class for generating code for the setters of attributes for a JS-implemented WebIDL interface. """ + def __init__(self, descriptor, attr): - CGJSImplMember.__init__(self, descriptor, attr, - CGSpecializedSetter.makeNativeName(descriptor, - attr), - (BuiltinTypes[IDLBuiltinType.Types.void], - [FakeArgument(attr.type, attr)]), - descriptor.getExtendedAttributes(attr, - setter=True), - passJSBitsAsNeeded=False) + CGJSImplMember.__init__( + self, + descriptor, + attr, + CGSpecializedSetter.makeNativeName(descriptor, attr), + (BuiltinTypes[IDLBuiltinType.Types.void], [FakeArgument(attr.type, attr)]), + descriptor.getExtendedAttributes(attr, setter=True), + passJSBitsAsNeeded=False, + ) def getImpl(self): - callbackArgs = [arg.name for arg in self.getArgs(BuiltinTypes[IDLBuiltinType.Types.void], - [FakeArgument(self.member.type, self.member)])] - return 'mImpl->%s(%s);\n' % ( + callbackArgs = [ + arg.name + for arg in self.getArgs( + BuiltinTypes[IDLBuiltinType.Types.void], + [FakeArgument(self.member.type, self.member)], + ) + ] + return "mImpl->%s(%s);\n" % ( callbackSetterName(self.member, self.descriptorProvider), - ", ".join(callbackArgs)) + ", ".join(callbackArgs), + ) class CGJSImplClass(CGBindingImplClass): def __init__(self, descriptor): - CGBindingImplClass.__init__(self, descriptor, CGJSImplMethod, CGJSImplGetter, CGJSImplSetter, skipStaticMethods=True) + CGBindingImplClass.__init__( + self, + descriptor, + CGJSImplMethod, + CGJSImplGetter, + CGJSImplSetter, + skipStaticMethods=True, + ) if descriptor.interface.parent: parentClass = descriptor.getDescriptor( - descriptor.interface.parent.identifier.name).jsImplParent + descriptor.interface.parent.identifier.name + ).jsImplParent baseClasses = [ClassBase(parentClass)] isupportsDecl = "NS_DECL_ISUPPORTS_INHERITED\n" - ccDecl = ("NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(%s, %s)\n" % - (descriptor.name, parentClass)) - constructorBody = dedent(""" + ccDecl = "NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(%s, %s)\n" % ( + descriptor.name, + parentClass, + ) + constructorBody = dedent( + """ // Make sure we're an nsWrapperCache already MOZ_ASSERT(static_cast(this)); - """) + """ + ) extradefinitions = fill( """ NS_IMPL_CYCLE_COLLECTION_INHERITED(${ifaceName}, ${parentClass}, mImpl, mParent) @@ -16848,13 +19652,17 @@ def __init__(self, descriptor): NS_INTERFACE_MAP_END_INHERITING(${parentClass}) """, ifaceName=self.descriptor.name, - parentClass=parentClass) + parentClass=parentClass, + ) else: - baseClasses = [ClassBase("nsSupportsWeakReference"), - ClassBase("nsWrapperCache")] + baseClasses = [ + ClassBase("nsSupportsWeakReference"), + ClassBase("nsWrapperCache"), + ] isupportsDecl = "NS_DECL_CYCLE_COLLECTING_ISUPPORTS\n" - ccDecl = ("NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(%s)\n" % - descriptor.name) + ccDecl = ( + "NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS(%s)\n" % descriptor.name + ) extradefinitions = fill( """ NS_IMPL_CYCLE_COLLECTION_CLASS(${ifaceName}) @@ -16877,7 +19685,8 @@ def __init__(self, descriptor): NS_INTERFACE_MAP_ENTRY(nsISupportsWeakReference) NS_INTERFACE_MAP_END """, - ifaceName=self.descriptor.name) + ifaceName=self.descriptor.name, + ) extradeclarations = fill( """ @@ -16892,16 +19701,19 @@ def __init__(self, descriptor): """, isupportsDecl=isupportsDecl, ccDecl=ccDecl, - jsImplName=jsImplName(descriptor.name)) + jsImplName=jsImplName(descriptor.name), + ) if descriptor.interface.getExtendedAttribute("WantsEventListenerHooks"): # No need to do too much sanity checking here; the # generated code will fail to compile if the methods we # try to overrid aren't on a superclass. self.methodDecls.extend( - self.getEventHookMethod(parentClass, "EventListenerAdded")) + self.getEventHookMethod(parentClass, "EventListenerAdded") + ) self.methodDecls.extend( - self.getEventHookMethod(parentClass, "EventListenerRemoved")) + self.getEventHookMethod(parentClass, "EventListenerRemoved") + ) if descriptor.interface.hasChildInterfaces(): decorators = "" @@ -16912,54 +19724,73 @@ def __init__(self, descriptor): destructor = ClassDestructor(virtual=False, visibility="private") baseConstructors = [ - ("mImpl(new %s(nullptr, aJSImplObject, aJSImplGlobal, /* aIncumbentGlobal = */ nullptr))" % - jsImplName(descriptor.name)), - "mParent(aParent)"] + ( + "mImpl(new %s(nullptr, aJSImplObject, aJSImplGlobal, /* aIncumbentGlobal = */ nullptr))" + % jsImplName(descriptor.name) + ), + "mParent(aParent)", + ] parentInterface = descriptor.interface.parent while parentInterface: if parentInterface.isJSImplemented(): baseConstructors.insert( - 0, "%s(aJSImplObject, aJSImplGlobal, aParent)" % parentClass) + 0, "%s(aJSImplObject, aJSImplGlobal, aParent)" % parentClass + ) break parentInterface = parentInterface.parent if not parentInterface and descriptor.interface.parent: # We only have C++ ancestors, so only pass along the window - baseConstructors.insert(0, - "%s(aParent)" % parentClass) + baseConstructors.insert(0, "%s(aParent)" % parentClass) constructor = ClassConstructor( - [Argument("JS::Handle", "aJSImplObject"), - Argument("JS::Handle", "aJSImplGlobal"), - Argument("nsIGlobalObject*", "aParent")], + [ + Argument("JS::Handle", "aJSImplObject"), + Argument("JS::Handle", "aJSImplGlobal"), + Argument("nsIGlobalObject*", "aParent"), + ], visibility="public", - baseConstructors=baseConstructors) + baseConstructors=baseConstructors, + ) self.methodDecls.append( - ClassMethod("_Create", - "bool", - JSNativeArguments(), - static=True, - body=self.getCreateFromExistingBody())) - - if (descriptor.interface.isJSImplemented() and - descriptor.interface.maplikeOrSetlikeOrIterable and - descriptor.interface.maplikeOrSetlikeOrIterable.isMaplike()): + ClassMethod( + "_Create", + "bool", + JSNativeArguments(), + static=True, + body=self.getCreateFromExistingBody(), + ) + ) + + if ( + descriptor.interface.isJSImplemented() + and descriptor.interface.maplikeOrSetlikeOrIterable + and descriptor.interface.maplikeOrSetlikeOrIterable.isMaplike() + ): self.methodDecls.append( - ClassMethod("__OnGet", - "void", - [Argument("JS::Handle", "aKey"), - Argument("JS::Handle", "aValue"), - Argument("ErrorResult&", "aRv")], - body="mImpl->__OnGet(aKey, aValue, aRv);\n")) - - CGClass.__init__(self, descriptor.name, - bases=baseClasses, - constructors=[constructor], - destructor=destructor, - methods=self.methodDecls, - decorators=decorators, - extradeclarations=extradeclarations, - extradefinitions=extradefinitions) + ClassMethod( + "__OnGet", + "void", + [ + Argument("JS::Handle", "aKey"), + Argument("JS::Handle", "aValue"), + Argument("ErrorResult&", "aRv"), + ], + body="mImpl->__OnGet(aKey, aValue, aRv);\n", + ) + ) + + CGClass.__init__( + self, + descriptor.name, + bases=baseClasses, + constructors=[constructor], + destructor=destructor, + methods=self.methodDecls, + decorators=decorators, + extradeclarations=extradeclarations, + extradefinitions=extradefinitions, + ) def getWrapObjectBody(self): return fill( @@ -16980,7 +19811,8 @@ def getWrapObjectBody(self): } return obj; """, - name=self.descriptor.name) + name=self.descriptor.name, + ) def getGetParentObjectReturnType(self): return "nsISupports*" @@ -16996,7 +19828,8 @@ def getGetDocGroupBody(self): return nullptr; } return window->GetDocGroup(); - """) + """ + ) def getCreateFromExistingBody(self): # XXXbz we could try to get parts of this (e.g. the argument @@ -17033,7 +19866,8 @@ def getCreateFromExistingBody(self): return GetOrCreateDOMReflector(cx, impl, args.rval()); """, ifaceName=self.descriptor.interface.identifier.name, - implName=self.descriptor.name) + implName=self.descriptor.name, + ) def getEventHookMethod(self, parentClass, methodName): body = fill( @@ -17042,24 +19876,32 @@ def getEventHookMethod(self, parentClass, methodName): mImpl->${methodName}(Substring(nsDependentAtomString(aType), 2), IgnoreErrors()); """, parentClass=parentClass, - methodName=methodName) - return [ClassMethod(methodName, - "void", - [Argument("nsAtom*", "aType")], - virtual=True, - override=True, - body=body), - ClassUsingDeclaration(parentClass, methodName)] + methodName=methodName, + ) + return [ + ClassMethod( + methodName, + "void", + [Argument("nsAtom*", "aType")], + virtual=True, + override=True, + body=body, + ), + ClassUsingDeclaration(parentClass, methodName), + ] def isJSImplementedDescriptor(descriptorProvider): - return (isinstance(descriptorProvider, Descriptor) and - descriptorProvider.interface.isJSImplemented()) + return ( + isinstance(descriptorProvider, Descriptor) + and descriptorProvider.interface.isJSImplemented() + ) class CGCallback(CGClass): - def __init__(self, idlObject, descriptorProvider, baseName, methods, - getters=[], setters=[]): + def __init__( + self, idlObject, descriptorProvider, baseName, methods, getters=[], setters=[] + ): self.baseName = baseName self._deps = idlObject.getDeps() self.idlObject = idlObject @@ -17079,19 +19921,29 @@ def __init__(self, idlObject, descriptorProvider, baseName, methods, else: realMethods.extend(self.getMethodImpls(method)) realMethods.append( - ClassMethod("operator==", "bool", - [Argument("const %s&" % self.name, "aOther")], - inline=True, bodyInHeader=True, - const=True, - body=("return %s::operator==(aOther);\n" % baseName))) - CGClass.__init__(self, self.name, - bases=[ClassBase(baseName)], - constructors=self.getConstructors(), - methods=realMethods+getters+setters) + ClassMethod( + "operator==", + "bool", + [Argument("const %s&" % self.name, "aOther")], + inline=True, + bodyInHeader=True, + const=True, + body=("return %s::operator==(aOther);\n" % baseName), + ) + ) + CGClass.__init__( + self, + self.name, + bases=[ClassBase(baseName)], + constructors=self.getConstructors(), + methods=realMethods + getters + setters, + ) def getConstructors(self): - if (not self.idlObject.isInterface() and - not self.idlObject._treatNonObjectAsNull): + if ( + not self.idlObject.isInterface() + and not self.idlObject._treatNonObjectAsNull + ): body = "MOZ_ASSERT(JS::IsCallable(mCallback));\n" else: # Not much we can assert about it, other than not being null, and @@ -17099,43 +19951,53 @@ def getConstructors(self): body = "" return [ ClassConstructor( - [Argument("JSContext*", "aCx"), - Argument("JS::Handle", "aCallback"), - Argument("JS::Handle", "aCallbackGlobal"), - Argument("nsIGlobalObject*", "aIncumbentGlobal")], + [ + Argument("JSContext*", "aCx"), + Argument("JS::Handle", "aCallback"), + Argument("JS::Handle", "aCallbackGlobal"), + Argument("nsIGlobalObject*", "aIncumbentGlobal"), + ], bodyInHeader=True, visibility="public", explicit=True, baseConstructors=[ - "%s(aCx, aCallback, aCallbackGlobal, aIncumbentGlobal)" % - self.baseName, + "%s(aCx, aCallback, aCallbackGlobal, aIncumbentGlobal)" + % self.baseName, ], - body=body), + body=body, + ), ClassConstructor( - [Argument("JSObject*", "aCallback"), - Argument("JSObject*", "aCallbackGlobal"), - Argument("const FastCallbackConstructor&", "")], + [ + Argument("JSObject*", "aCallback"), + Argument("JSObject*", "aCallbackGlobal"), + Argument("const FastCallbackConstructor&", ""), + ], bodyInHeader=True, visibility="public", explicit=True, baseConstructors=[ - "%s(aCallback, aCallbackGlobal, FastCallbackConstructor())" % - self.baseName, + "%s(aCallback, aCallbackGlobal, FastCallbackConstructor())" + % self.baseName, ], - body=body), + body=body, + ), ClassConstructor( - [Argument("JSObject*", "aCallback"), - Argument("JSObject*", "aCallbackGlobal"), - Argument("JSObject*", "aAsyncStack"), - Argument("nsIGlobalObject*", "aIncumbentGlobal")], + [ + Argument("JSObject*", "aCallback"), + Argument("JSObject*", "aCallbackGlobal"), + Argument("JSObject*", "aAsyncStack"), + Argument("nsIGlobalObject*", "aIncumbentGlobal"), + ], bodyInHeader=True, visibility="public", explicit=True, baseConstructors=[ - "%s(aCallback, aCallbackGlobal, aAsyncStack, aIncumbentGlobal)" % - self.baseName, + "%s(aCallback, aCallbackGlobal, aAsyncStack, aIncumbentGlobal)" + % self.baseName, ], - body=body)] + body=body, + ), + ] def getMethodImpls(self, method): assert method.needThisHandling @@ -17156,11 +20018,13 @@ def getMethodImpls(self, method): # the private method. argnames = [arg.name for arg in args] argnamesWithThis = ["s.GetCallContext()", "thisValJS"] + argnames - argnamesWithoutThis = ["s.GetCallContext()", "JS::UndefinedHandleValue"] + argnames + argnamesWithoutThis = [ + "s.GetCallContext()", + "JS::UndefinedHandleValue", + ] + argnames # Now that we've recorded the argnames for our call to our private # method, insert our optional argument for the execution reason. - args.append(Argument("const char*", "aExecutionReason", - "nullptr")) + args.append(Argument("const char*", "aExecutionReason", "nullptr")) # Make copies of the arg list for the two "without rv" overloads. Note # that those don't need aExceptionHandling or aRealm arguments because @@ -17172,8 +20036,9 @@ def getMethodImpls(self, method): # Add the potional argument for deciding whether the CallSetup should # re-throw exceptions on aRv. - args.append(Argument("ExceptionHandling", "aExceptionHandling", - "eReportExceptions")) + args.append( + Argument("ExceptionHandling", "aExceptionHandling", "eReportExceptions") + ) # And the argument for communicating when exceptions should really be # rethrown. In particular, even when aExceptionHandling is # eRethrowExceptions they won't get rethrown if aRealm is provided @@ -17182,11 +20047,11 @@ def getMethodImpls(self, method): args.append(Argument("JS::Realm*", "aRealm", "nullptr")) # And now insert our template argument. argsWithoutThis = list(args) - args.insert(0, Argument("const T&", "thisVal")) - argsWithoutRv.insert(0, Argument("const T&", "thisVal")) + args.insert(0, Argument("const T&", "thisVal")) + argsWithoutRv.insert(0, Argument("const T&", "thisVal")) argnamesWithoutThisAndRv = [arg.name for arg in argsWithoutThisAndRv] - argnamesWithoutThisAndRv.insert(rvIndex, "IgnoreErrors()"); + argnamesWithoutThisAndRv.insert(rvIndex, "IgnoreErrors()") # If we just leave things like that, and have no actual arguments in the # IDL, we will end up trying to call the templated "without rv" overload # with "rv" as the thisVal. That's no good. So explicitly append the @@ -17214,7 +20079,8 @@ def getMethodImpls(self, method): } """, errorReturn=errorReturn, - executionReason=method.getPrettyName()) + executionReason=method.getPrettyName(), + ) bodyWithThis = fill( """ @@ -17229,7 +20095,8 @@ def getMethodImpls(self, method): setupCall=setupCall, errorReturn=errorReturn, methodName=method.name, - callArgs=", ".join(argnamesWithThis)) + callArgs=", ".join(argnamesWithThis), + ) bodyWithoutThis = fill( """ $*{setupCall} @@ -17238,39 +20105,60 @@ def getMethodImpls(self, method): setupCall=setupCall, errorReturn=errorReturn, methodName=method.name, - callArgs=", ".join(argnamesWithoutThis)) + callArgs=", ".join(argnamesWithoutThis), + ) bodyWithThisWithoutRv = fill( """ return ${methodName}(${callArgs}); """, methodName=method.name, - callArgs=", ".join(argnamesWithoutRv)) + callArgs=", ".join(argnamesWithoutRv), + ) bodyWithoutThisAndRv = fill( """ return ${methodName}(${callArgs}); """, methodName=method.name, - callArgs=", ".join(argnamesWithoutThisAndRv)) + callArgs=", ".join(argnamesWithoutThisAndRv), + ) - return [ClassMethod(method.name, method.returnType, args, - bodyInHeader=True, - templateArgs=["typename T"], - body=bodyWithThis, - canRunScript=method.canRunScript), - ClassMethod(method.name, method.returnType, argsWithoutThis, - bodyInHeader=True, - body=bodyWithoutThis, - canRunScript=method.canRunScript), - ClassMethod(method.name, method.returnType, argsWithoutRv, - bodyInHeader=True, - templateArgs=["typename T"], - body=bodyWithThisWithoutRv, - canRunScript=method.canRunScript), - ClassMethod(method.name, method.returnType, argsWithoutThisAndRv, - bodyInHeader=True, - body=bodyWithoutThisAndRv, - canRunScript=method.canRunScript), - method] + return [ + ClassMethod( + method.name, + method.returnType, + args, + bodyInHeader=True, + templateArgs=["typename T"], + body=bodyWithThis, + canRunScript=method.canRunScript, + ), + ClassMethod( + method.name, + method.returnType, + argsWithoutThis, + bodyInHeader=True, + body=bodyWithoutThis, + canRunScript=method.canRunScript, + ), + ClassMethod( + method.name, + method.returnType, + argsWithoutRv, + bodyInHeader=True, + templateArgs=["typename T"], + body=bodyWithThisWithoutRv, + canRunScript=method.canRunScript, + ), + ClassMethod( + method.name, + method.returnType, + argsWithoutThisAndRv, + bodyInHeader=True, + body=bodyWithoutThisAndRv, + canRunScript=method.canRunScript, + ), + method, + ] def deps(self): return self._deps @@ -17280,11 +20168,12 @@ class CGCallbackFunction(CGCallback): def __init__(self, callback, descriptorProvider): self.callback = callback if callback.isConstructor(): - methods=[ConstructCallback(callback, descriptorProvider)] + methods = [ConstructCallback(callback, descriptorProvider)] else: - methods=[CallCallback(callback, descriptorProvider)] - CGCallback.__init__(self, callback, descriptorProvider, - "CallbackFunction", methods) + methods = [CallCallback(callback, descriptorProvider)] + CGCallback.__init__( + self, callback, descriptorProvider, "CallbackFunction", methods + ) def getConstructors(self): return CGCallback.getConstructors(self) + [ @@ -17293,7 +20182,9 @@ def getConstructors(self): bodyInHeader=True, visibility="public", explicit=True, - baseConstructors=["CallbackFunction(aOther)"])] + baseConstructors=["CallbackFunction(aOther)"], + ) + ] class CGFastCallback(CGClass): @@ -17301,36 +20192,45 @@ def __init__(self, idlObject): self._deps = idlObject.getDeps() baseName = idlObject.identifier.name constructor = ClassConstructor( - [Argument("JSObject*", "aCallback"), - Argument("JSObject*", "aCallbackGlobal")], + [ + Argument("JSObject*", "aCallback"), + Argument("JSObject*", "aCallbackGlobal"), + ], bodyInHeader=True, visibility="public", explicit=True, baseConstructors=[ - "%s(aCallback, aCallbackGlobal, FastCallbackConstructor())" % - baseName, + "%s(aCallback, aCallbackGlobal, FastCallbackConstructor())" % baseName, ], - body="") - - traceMethod = ClassMethod("Trace", "void", - [Argument("JSTracer*", "aTracer")], - inline=True, - bodyInHeader=True, - visibility="public", - body="%s::Trace(aTracer);\n" % baseName) - holdMethod = ClassMethod("FinishSlowJSInitIfMoreThanOneOwner", "void", - [Argument("JSContext*", "aCx")], - inline=True, - bodyInHeader=True, - visibility="public", - body=( - "%s::FinishSlowJSInitIfMoreThanOneOwner(aCx);\n" % - baseName)) - - CGClass.__init__(self, "Fast%s" % baseName, - bases=[ClassBase(baseName)], - constructors=[constructor], - methods=[traceMethod, holdMethod]) + body="", + ) + + traceMethod = ClassMethod( + "Trace", + "void", + [Argument("JSTracer*", "aTracer")], + inline=True, + bodyInHeader=True, + visibility="public", + body="%s::Trace(aTracer);\n" % baseName, + ) + holdMethod = ClassMethod( + "FinishSlowJSInitIfMoreThanOneOwner", + "void", + [Argument("JSContext*", "aCx")], + inline=True, + bodyInHeader=True, + visibility="public", + body=("%s::FinishSlowJSInitIfMoreThanOneOwner(aCx);\n" % baseName), + ) + + CGClass.__init__( + self, + "Fast%s" % baseName, + bases=[ClassBase(baseName)], + constructors=[constructor], + methods=[traceMethod, holdMethod], + ) def deps(self): return self._deps @@ -17339,21 +20239,42 @@ def deps(self): class CGCallbackInterface(CGCallback): def __init__(self, descriptor, spiderMonkeyInterfacesAreStructs=False): iface = descriptor.interface - attrs = [m for m in iface.members - if (m.isAttr() and not m.isStatic() and - (not m.isMaplikeOrSetlikeAttr() or - not iface.isJSImplemented()))] - getters = [CallbackGetter(a, descriptor, spiderMonkeyInterfacesAreStructs) - for a in attrs] - setters = [CallbackSetter(a, descriptor, spiderMonkeyInterfacesAreStructs) - for a in attrs if not a.readonly] - methods = [m for m in iface.members - if (m.isMethod() and not m.isStatic() and - not m.isIdentifierLess() and - (not m.isMaplikeOrSetlikeOrIterableMethod() or - not iface.isJSImplemented()))] - methods = [CallbackOperation(m, sig, descriptor, spiderMonkeyInterfacesAreStructs) - for m in methods for sig in m.signatures()] + attrs = [ + m + for m in iface.members + if ( + m.isAttr() + and not m.isStatic() + and (not m.isMaplikeOrSetlikeAttr() or not iface.isJSImplemented()) + ) + ] + getters = [ + CallbackGetter(a, descriptor, spiderMonkeyInterfacesAreStructs) + for a in attrs + ] + setters = [ + CallbackSetter(a, descriptor, spiderMonkeyInterfacesAreStructs) + for a in attrs + if not a.readonly + ] + methods = [ + m + for m in iface.members + if ( + m.isMethod() + and not m.isStatic() + and not m.isIdentifierLess() + and ( + not m.isMaplikeOrSetlikeOrIterableMethod() + or not iface.isJSImplemented() + ) + ) + ] + methods = [ + CallbackOperation(m, sig, descriptor, spiderMonkeyInterfacesAreStructs) + for m in methods + for sig in m.signatures() + ] needInitId = False if iface.isJSImplemented() and iface.ctor(): @@ -17364,37 +20285,48 @@ def __init__(self, descriptor, spiderMonkeyInterfacesAreStructs=False): needInitId = True needOnGetId = False - if (iface.isJSImplemented() and - iface.maplikeOrSetlikeOrIterable and - iface.maplikeOrSetlikeOrIterable.isMaplike()): + if ( + iface.isJSImplemented() + and iface.maplikeOrSetlikeOrIterable + and iface.maplikeOrSetlikeOrIterable.isMaplike() + ): methods.append(CGJSImplOnGetOperation(descriptor)) needOnGetId = True - idlist = [descriptor.binaryNameFor(m.identifier.name) - for m in iface.members - if m.isAttr() or m.isMethod()] + idlist = [ + descriptor.binaryNameFor(m.identifier.name) + for m in iface.members + if m.isAttr() or m.isMethod() + ] if needInitId: idlist.append("__init") if needOnGetId: idlist.append("__onget") - if (iface.isJSImplemented() and - iface.getExtendedAttribute("WantsEventListenerHooks")): - methods.append(CGJSImplEventHookOperation(descriptor, - "eventListenerAdded")) - methods.append(CGJSImplEventHookOperation(descriptor, - "eventListenerRemoved")) + if iface.isJSImplemented() and iface.getExtendedAttribute( + "WantsEventListenerHooks" + ): + methods.append(CGJSImplEventHookOperation(descriptor, "eventListenerAdded")) + methods.append( + CGJSImplEventHookOperation(descriptor, "eventListenerRemoved") + ) idlist.append("eventListenerAdded") idlist.append("eventListenerRemoved") if len(idlist) != 0: - methods.append(initIdsClassMethod(idlist, - iface.identifier.name + "Atoms")) - CGCallback.__init__(self, iface, descriptor, "CallbackInterface", - methods, getters=getters, setters=setters) + methods.append(initIdsClassMethod(idlist, iface.identifier.name + "Atoms")) + CGCallback.__init__( + self, + iface, + descriptor, + "CallbackInterface", + methods, + getters=getters, + setters=setters, + ) -class FakeMember(): +class FakeMember: def __init__(self, name=None): if name is not None: self.identifier = FakeIdentifier(name) @@ -17420,11 +20352,17 @@ class CallbackMember(CGNativeMember): # XXXbz It's OK to use CallbackKnownNotGray for wrapScope because # CallSetup already handled the unmark-gray bits for us. we don't have # anything better to use for 'obj', really... - def __init__(self, sig, name, descriptorProvider, needThisHandling, - rethrowContentException=False, - spiderMonkeyInterfacesAreStructs=False, - wrapScope=None, - canRunScript=False): + def __init__( + self, + sig, + name, + descriptorProvider, + needThisHandling, + rethrowContentException=False, + spiderMonkeyInterfacesAreStructs=False, + wrapScope=None, + canRunScript=False, + ): """ needThisHandling is True if we need to be able to accept a specified thisObj, False otherwise. @@ -17437,10 +20375,12 @@ def __init__(self, sig, name, descriptorProvider, needThisHandling, self.argCount = len(args) if self.argCount > 0: # Check for variadic arguments - lastArg = args[self.argCount-1] + lastArg = args[self.argCount - 1] if lastArg.variadic: - self.argCountStr = ("(%d - 1) + %s.Length()" % - (self.argCount, lastArg.identifier.name)) + self.argCountStr = "(%d - 1) + %s.Length()" % ( + self.argCount, + lastArg.identifier.name, + ) else: self.argCountStr = "%d" % self.argCount self.needThisHandling = needThisHandling @@ -17453,17 +20393,23 @@ def __init__(self, sig, name, descriptorProvider, needThisHandling, # We don't care, for callback codegen, whether our original member was # a method or attribute or whatnot. Just always pass FakeMember() # here. - CGNativeMember.__init__(self, descriptorProvider, FakeMember(), - name, (self.retvalType, args), - extendedAttrs={}, - passJSBitsAsNeeded=False, - visibility=visibility, - spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, - canRunScript=canRunScript) + CGNativeMember.__init__( + self, + descriptorProvider, + FakeMember(), + name, + (self.retvalType, args), + extendedAttrs={}, + passJSBitsAsNeeded=False, + visibility=visibility, + spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, + canRunScript=canRunScript, + ) # We have to do all the generation of our body now, because # the caller relies on us throwing if we can't manage it. - self.exceptionCode = ("aRv.Throw(NS_ERROR_UNEXPECTED);\n" - "return%s;\n" % self.getDefaultRetval()) + self.exceptionCode = ( + "aRv.Throw(NS_ERROR_UNEXPECTED);\n" "return%s;\n" % self.getDefaultRetval() + ) self.body = self.getImpl() def getImpl(self): @@ -17480,7 +20426,8 @@ def getImpl(self): } """, argCount=self.argCountStr, - errorReturn=self.getDefaultRetval()) + errorReturn=self.getDefaultRetval(), + ) else: # Avoid weird 0-sized arrays argvDecl = "" @@ -17499,7 +20446,7 @@ def getResultConversion(self, isDefinitelyObject=False): # wrapping things into our current compartment (that of mCallback) # is what we want. "obj": "nullptr", - "passedToJSImpl": "false" + "passedToJSImpl": "false", } if isJSImplementedDescriptor(self.descriptorProvider): @@ -17508,19 +20455,22 @@ def getResultConversion(self, isDefinitelyObject=False): isCallbackReturnValue = "Callback" sourceDescription = "return value of %s" % self.getPrettyName() convertType = instantiateJSToNativeConversion( - getJSToNativeConversionInfo(self.retvalType, - self.descriptorProvider, - isDefinitelyObject=isDefinitelyObject, - exceptionCode=self.exceptionCode, - isCallbackReturnValue=isCallbackReturnValue, - # Allow returning a callback type that - # allows non-callable objects. - allowTreatNonCallableAsNull=True, - sourceDescription=sourceDescription), - replacements) + getJSToNativeConversionInfo( + self.retvalType, + self.descriptorProvider, + isDefinitelyObject=isDefinitelyObject, + exceptionCode=self.exceptionCode, + isCallbackReturnValue=isCallbackReturnValue, + # Allow returning a callback type that + # allows non-callable objects. + allowTreatNonCallableAsNull=True, + sourceDescription=sourceDescription, + ), + replacements, + ) assignRetval = string.Template( - self.getRetvalInfo(self.retvalType, - False)[2]).substitute(replacements) + self.getRetvalInfo(self.retvalType, False)[2] + ).substitute(replacements) type = convertType.define() return type + assignRetval @@ -17528,8 +20478,9 @@ def getArgConversions(self): # Just reget the arglist from self.originalSig, because our superclasses # just have way to many members they like to clobber, so I can't find a # safe member name to store it in. - argConversions = [self.getArgConversion(i, arg) - for i, arg in enumerate(self.originalSig[1])] + argConversions = [ + self.getArgConversion(i, arg) for i, arg in enumerate(self.originalSig[1]) + ] if not argConversions: return "\n" @@ -17538,10 +20489,10 @@ def getArgConversions(self): argConversions.reverse() # Wrap each one in a scope so that any locals it has don't leak out, and # also so that we can just "break;" for our successCode. - argConversions = [CGWrapper(CGIndenter(CGGeneric(c)), - pre="do {\n", - post="} while (false);\n") - for c in argConversions] + argConversions = [ + CGWrapper(CGIndenter(CGGeneric(c)), pre="do {\n", post="} while (false);\n") + for c in argConversions + ] if self.argCount > 0: argConversions.insert(0, self.getArgcDecl()) # And slap them together. @@ -17569,21 +20520,25 @@ def getArgConversion(self, i, arg): prepend = "" if arg.type.isUnion() and self.wrapScope is None: - prepend += "JS::Rooted callbackObj(cx, CallbackKnownNotGray());\n" + prepend += ( + "JS::Rooted callbackObj(cx, CallbackKnownNotGray());\n" + ) self.wrapScope = "callbackObj" conversion = prepend + wrapForType( - arg.type, self.descriptorProvider, + arg.type, + self.descriptorProvider, { - 'result': result, - 'successCode': "continue;\n" if arg.variadic else "break;\n", - 'jsvalRef': "argv[%s]" % jsvalIndex, - 'jsvalHandle': "argv[%s]" % jsvalIndex, - 'obj': self.wrapScope, - 'returnsNewObject': False, - 'exceptionCode': self.exceptionCode, - 'spiderMonkeyInterfacesAreStructs': self.spiderMonkeyInterfacesAreStructs - }) + "result": result, + "successCode": "continue;\n" if arg.variadic else "break;\n", + "jsvalRef": "argv[%s]" % jsvalIndex, + "jsvalHandle": "argv[%s]" % jsvalIndex, + "obj": self.wrapScope, + "returnsNewObject": False, + "exceptionCode": self.exceptionCode, + "spiderMonkeyInterfacesAreStructs": self.spiderMonkeyInterfacesAreStructs, + }, + ) if arg.variadic: conversion = fill( @@ -17594,7 +20549,8 @@ def getArgConversion(self, i, arg): break; """, arg=arg.identifier.name, - conversion=conversion) + conversion=conversion, + ) elif arg.canHaveMissingValue(): conversion = fill( """ @@ -17610,7 +20566,8 @@ def getArgConversion(self, i, arg): argName=arg.identifier.name, conversion=conversion, iPlus1=i + 1, - i=i) + i=i, + ) return conversion def getDefaultRetval(self): @@ -17625,16 +20582,20 @@ def getArgs(self, returnType, argList): # Since we don't need this handling, we're the actual method that # will be called, so we need an aRethrowExceptions argument. if not self.rethrowContentException: - args.append(Argument("const char*", "aExecutionReason", - "nullptr")) - args.append(Argument("ExceptionHandling", "aExceptionHandling", - "eReportExceptions")) + args.append(Argument("const char*", "aExecutionReason", "nullptr")) + args.append( + Argument( + "ExceptionHandling", "aExceptionHandling", "eReportExceptions" + ) + ) args.append(Argument("JS::Realm*", "aRealm", "nullptr")) return args # We want to allow the caller to pass in a "this" value, as # well as a BindingCallContext. - return [Argument("BindingCallContext&", "cx"), - Argument("JS::Handle", "aThisVal")] + args + return [ + Argument("BindingCallContext&", "cx"), + Argument("JS::Handle", "aThisVal"), + ] + args def getCallSetup(self): if self.needThisHandling: @@ -17644,8 +20605,13 @@ def getCallSetup(self): if self.rethrowContentException: # getArgs doesn't add the aExceptionHandling argument but does add # aRealm for us. - callSetup += ', "%s", eRethrowContentExceptions, aRealm, /* aIsJSImplementedWebIDL = */ ' % self.getPrettyName() - callSetup += toStringBool(isJSImplementedDescriptor(self.descriptorProvider)) + callSetup += ( + ', "%s", eRethrowContentExceptions, aRealm, /* aIsJSImplementedWebIDL = */ ' + % self.getPrettyName() + ) + callSetup += toStringBool( + isJSImplementedDescriptor(self.descriptorProvider) + ) else: callSetup += ', "%s", aExceptionHandling, aRealm' % self.getPrettyName() callSetup += ");\n" @@ -17660,7 +20626,8 @@ def getCallSetup(self): """, callSetup=callSetup, - errorReturn=self.getDefaultRetval()) + errorReturn=self.getDefaultRetval(), + ) def getArgcDecl(self): return CGGeneric("unsigned argc = %s;\n" % self.argCountStr) @@ -17669,24 +20636,30 @@ def getArgcDecl(self): def ensureASCIIName(idlObject): type = "attribute" if idlObject.isAttr() else "operation" if re.match("[^\x20-\x7E]", idlObject.identifier.name): - raise SyntaxError('Callback %s name "%s" contains non-ASCII ' - "characters. We can't handle that. %s" % - (type, idlObject.identifier.name, - idlObject.location)) + raise SyntaxError( + 'Callback %s name "%s" contains non-ASCII ' + "characters. We can't handle that. %s" + % (type, idlObject.identifier.name, idlObject.location) + ) if re.match('"', idlObject.identifier.name): - raise SyntaxError("Callback %s name '%s' contains " - "double-quote character. We can't handle " - "that. %s" % - (type, idlObject.identifier.name, - idlObject.location)) + raise SyntaxError( + "Callback %s name '%s' contains " + "double-quote character. We can't handle " + "that. %s" % (type, idlObject.identifier.name, idlObject.location) + ) class ConstructCallback(CallbackMember): def __init__(self, callback, descriptorProvider): self.callback = callback - CallbackMember.__init__(self, callback.signatures()[0], "Construct", - descriptorProvider, needThisHandling=False, - canRunScript=True) + CallbackMember.__init__( + self, + callback.signatures()[0], + "Construct", + descriptorProvider, + needThisHandling=False, + canRunScript=True, + ) def getRvalDecl(self): # Box constructedObj for getJSToNativeConversionInfo(). @@ -17710,25 +20683,37 @@ def getCall(self): rval.setObject(*constructedObj); """, args=args, - errorReturn=self.getDefaultRetval()) + errorReturn=self.getDefaultRetval(), + ) def getResultConversion(self): - return CallbackMember.getResultConversion(self, - isDefinitelyObject=True); + return CallbackMember.getResultConversion(self, isDefinitelyObject=True) def getPrettyName(self): return self.callback.identifier.name class CallbackMethod(CallbackMember): - def __init__(self, sig, name, descriptorProvider, needThisHandling, - rethrowContentException=False, - spiderMonkeyInterfacesAreStructs=False, - canRunScript=False): - CallbackMember.__init__(self, sig, name, descriptorProvider, - needThisHandling, rethrowContentException, - spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, - canRunScript=canRunScript) + def __init__( + self, + sig, + name, + descriptorProvider, + needThisHandling, + rethrowContentException=False, + spiderMonkeyInterfacesAreStructs=False, + canRunScript=False, + ): + CallbackMember.__init__( + self, + sig, + name, + descriptorProvider, + needThisHandling, + rethrowContentException, + spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, + canRunScript=canRunScript, + ) def getRvalDecl(self): return "JS::Rooted rval(cx);\n" @@ -17754,15 +20739,21 @@ def getCall(self): callGuard=self.getCallGuard(), thisVal=self.getThisVal(), args=args, - errorReturn=self.getDefaultRetval()) + errorReturn=self.getDefaultRetval(), + ) class CallCallback(CallbackMethod): def __init__(self, callback, descriptorProvider): self.callback = callback - CallbackMethod.__init__(self, callback.signatures()[0], "Call", - descriptorProvider, needThisHandling=True, - canRunScript=not callback.isRunScriptBoundary()) + CallbackMethod.__init__( + self, + callback.signatures()[0], + "Call", + descriptorProvider, + needThisHandling=True, + canRunScript=not callback.isRunScriptBoundary(), + ) def getThisDecl(self): return "" @@ -17786,14 +20777,28 @@ class CallbackOperationBase(CallbackMethod): """ Common class for implementing various callback operations. """ - def __init__(self, signature, jsName, nativeName, descriptor, - singleOperation, rethrowContentException=False, - spiderMonkeyInterfacesAreStructs=False): + + def __init__( + self, + signature, + jsName, + nativeName, + descriptor, + singleOperation, + rethrowContentException=False, + spiderMonkeyInterfacesAreStructs=False, + ): self.singleOperation = singleOperation self.methodName = descriptor.binaryNameFor(jsName) - CallbackMethod.__init__(self, signature, nativeName, descriptor, - singleOperation, rethrowContentException, - spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs) + CallbackMethod.__init__( + self, + signature, + nativeName, + descriptor, + singleOperation, + rethrowContentException, + spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, + ) def getThisDecl(self): if not self.singleOperation: @@ -17801,10 +20806,12 @@ def getThisDecl(self): # This relies on getCallableDecl declaring a boolean # isCallable in the case when we're a single-operation # interface. - return dedent(""" + return dedent( + """ JS::Rooted thisValue(cx, isCallable ? aThisVal.get() : JS::ObjectValue(*mCallback)); - """) + """ + ) def getThisVal(self): return "thisValue" @@ -17822,9 +20829,10 @@ def getCallableDecl(self): """, methodAtomName=CGDictionary.makeIdName(self.methodName), atomCacheName=self.descriptorProvider.interface.identifier.name + "Atoms", - errorReturn=self.getDefaultRetval()) + errorReturn=self.getDefaultRetval(), + ) if not self.singleOperation: - return 'JS::Rooted callable(cx);\n' + getCallableFromProp + return "JS::Rooted callable(cx);\n" + getCallableFromProp return fill( """ bool isCallable = JS::IsCallable(mCallback); @@ -17835,7 +20843,8 @@ def getCallableDecl(self): $*{getCallableFromProp} } """, - getCallableFromProp=getCallableFromProp) + getCallableFromProp=getCallableFromProp, + ) def getCallGuard(self): return "" @@ -17845,48 +20854,64 @@ class CallbackOperation(CallbackOperationBase): """ Codegen actual WebIDL operations on callback interfaces. """ - def __init__(self, method, signature, descriptor, - spiderMonkeyInterfacesAreStructs): + + def __init__(self, method, signature, descriptor, spiderMonkeyInterfacesAreStructs): self.ensureASCIIName(method) self.method = method jsName = method.identifier.name - CallbackOperationBase.__init__(self, signature, - jsName, - MakeNativeName(descriptor.binaryNameFor(jsName)), - descriptor, descriptor.interface.isSingleOperationInterface(), - rethrowContentException=descriptor.interface.isJSImplemented(), - spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs) + CallbackOperationBase.__init__( + self, + signature, + jsName, + MakeNativeName(descriptor.binaryNameFor(jsName)), + descriptor, + descriptor.interface.isSingleOperationInterface(), + rethrowContentException=descriptor.interface.isJSImplemented(), + spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, + ) def getPrettyName(self): - return "%s.%s" % (self.descriptorProvider.interface.identifier.name, - self.method.identifier.name) + return "%s.%s" % ( + self.descriptorProvider.interface.identifier.name, + self.method.identifier.name, + ) class CallbackAccessor(CallbackMember): """ Shared superclass for CallbackGetter and CallbackSetter. """ - def __init__(self, attr, sig, name, descriptor, - spiderMonkeyInterfacesAreStructs): + + def __init__(self, attr, sig, name, descriptor, spiderMonkeyInterfacesAreStructs): self.ensureASCIIName(attr) self.attrName = attr.identifier.name - CallbackMember.__init__(self, sig, name, descriptor, - needThisHandling=False, - rethrowContentException=descriptor.interface.isJSImplemented(), - spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs) + CallbackMember.__init__( + self, + sig, + name, + descriptor, + needThisHandling=False, + rethrowContentException=descriptor.interface.isJSImplemented(), + spiderMonkeyInterfacesAreStructs=spiderMonkeyInterfacesAreStructs, + ) def getPrettyName(self): - return "%s.%s" % (self.descriptorProvider.interface.identifier.name, - self.attrName) + return "%s.%s" % ( + self.descriptorProvider.interface.identifier.name, + self.attrName, + ) class CallbackGetter(CallbackAccessor): def __init__(self, attr, descriptor, spiderMonkeyInterfacesAreStructs): - CallbackAccessor.__init__(self, attr, - (attr.type, []), - callbackGetterName(attr, descriptor), - descriptor, - spiderMonkeyInterfacesAreStructs) + CallbackAccessor.__init__( + self, + attr, + (attr.type, []), + callbackGetterName(attr, descriptor), + descriptor, + spiderMonkeyInterfacesAreStructs, + ) def getRvalDecl(self): return "JS::Rooted rval(cx);\n" @@ -17904,17 +20929,23 @@ def getCall(self): } """, atomCacheName=self.descriptorProvider.interface.identifier.name + "Atoms", - attrAtomName=CGDictionary.makeIdName(self.descriptorProvider.binaryNameFor(self.attrName)), - errorReturn=self.getDefaultRetval()) + attrAtomName=CGDictionary.makeIdName( + self.descriptorProvider.binaryNameFor(self.attrName) + ), + errorReturn=self.getDefaultRetval(), + ) class CallbackSetter(CallbackAccessor): def __init__(self, attr, descriptor, spiderMonkeyInterfacesAreStructs): - CallbackAccessor.__init__(self, attr, - (BuiltinTypes[IDLBuiltinType.Types.void], - [FakeArgument(attr.type, attr)]), - callbackSetterName(attr, descriptor), - descriptor, spiderMonkeyInterfacesAreStructs) + CallbackAccessor.__init__( + self, + attr, + (BuiltinTypes[IDLBuiltinType.Types.void], [FakeArgument(attr.type, attr)]), + callbackSetterName(attr, descriptor), + descriptor, + spiderMonkeyInterfacesAreStructs, + ) def getRvalDecl(self): # We don't need an rval @@ -17934,8 +20965,11 @@ def getCall(self): } """, atomCacheName=self.descriptorProvider.interface.identifier.name + "Atoms", - attrAtomName=CGDictionary.makeIdName(self.descriptorProvider.binaryNameFor(self.attrName)), - errorReturn=self.getDefaultRetval()) + attrAtomName=CGDictionary.makeIdName( + self.descriptorProvider.binaryNameFor(self.attrName) + ), + errorReturn=self.getDefaultRetval(), + ) def getArgcDecl(self): return None @@ -17945,13 +20979,19 @@ class CGJSImplInitOperation(CallbackOperationBase): """ Codegen the __Init() method used to pass along constructor arguments for JS-implemented WebIDL. """ + def __init__(self, sig, descriptor): assert sig in descriptor.interface.ctor().signatures() - CallbackOperationBase.__init__(self, (BuiltinTypes[IDLBuiltinType.Types.void], sig[1]), - "__init", "__Init", descriptor, - singleOperation=False, - rethrowContentException=True, - spiderMonkeyInterfacesAreStructs=True) + CallbackOperationBase.__init__( + self, + (BuiltinTypes[IDLBuiltinType.Types.void], sig[1]), + "__init", + "__Init", + descriptor, + singleOperation=False, + rethrowContentException=True, + spiderMonkeyInterfacesAreStructs=True, + ) def getPrettyName(self): return "__init" @@ -17963,47 +21003,59 @@ class CGJSImplOnGetOperation(CallbackOperationBase): happening on a JS-implemented maplike. This method takes two arguments (key and value) and returns nothing. """ + def __init__(self, descriptor): CallbackOperationBase.__init__( self, - (BuiltinTypes[IDLBuiltinType.Types.void], - [FakeArgument(BuiltinTypes[IDLBuiltinType.Types.any], - None, - "key"), - FakeArgument(BuiltinTypes[IDLBuiltinType.Types.any], - None, - "value")]), - "__onget", "__OnGet", + ( + BuiltinTypes[IDLBuiltinType.Types.void], + [ + FakeArgument(BuiltinTypes[IDLBuiltinType.Types.any], None, "key"), + FakeArgument(BuiltinTypes[IDLBuiltinType.Types.any], None, "value"), + ], + ), + "__onget", + "__OnGet", descriptor, singleOperation=False, rethrowContentException=True, - spiderMonkeyInterfacesAreStructs=True) + spiderMonkeyInterfacesAreStructs=True, + ) def getPrettyName(self): return "__onget" + class CGJSImplEventHookOperation(CallbackOperationBase): """ Codegen the hooks on a JS impl for adding/removing event listeners. """ + def __init__(self, descriptor, name): self.name = name CallbackOperationBase.__init__( self, - (BuiltinTypes[IDLBuiltinType.Types.void], - [FakeArgument(BuiltinTypes[IDLBuiltinType.Types.domstring], - None, - "aType")]), - name, MakeNativeName(name), + ( + BuiltinTypes[IDLBuiltinType.Types.void], + [ + FakeArgument( + BuiltinTypes[IDLBuiltinType.Types.domstring], None, "aType" + ) + ], + ), + name, + MakeNativeName(name), descriptor, singleOperation=False, rethrowContentException=False, - spiderMonkeyInterfacesAreStructs=True) + spiderMonkeyInterfacesAreStructs=True, + ) def getPrettyName(self): return self.name + def getMaplikeOrSetlikeErrorReturn(helperImpl): """ Generate return values based on whether a maplike or setlike generated @@ -18015,7 +21067,9 @@ def getMaplikeOrSetlikeErrorReturn(helperImpl): """ aRv.Throw(NS_ERROR_UNEXPECTED); return%s; - """ % helperImpl.getDefaultRetval()) + """ + % helperImpl.getDefaultRetval() + ) return "return false;\n" @@ -18039,7 +21093,8 @@ def getMaplikeOrSetlikeBackingObject(descriptor, maplikeOrSetlike, helperImpl=No slot=memberReservedSlot(maplikeOrSetlike, descriptor), func_prefix=func_prefix, errorReturn=getMaplikeOrSetlikeErrorReturn(helperImpl), - selfType=descriptor.nativeType) + selfType=descriptor.nativeType, + ) return ret @@ -18058,9 +21113,11 @@ def getMaplikeOrSetlikeSizeGetterBody(descriptor, attr): args.rval().setNumber(result); return true; """, - getBackingObj=getMaplikeOrSetlikeBackingObject(descriptor, - attr.maplikeOrSetlike), - funcPrefix=attr.maplikeOrSetlike.prefix) + getBackingObj=getMaplikeOrSetlikeBackingObject( + descriptor, attr.maplikeOrSetlike + ), + funcPrefix=attr.maplikeOrSetlike.prefix, + ) class CGMaplikeOrSetlikeMethodGenerator(CGThing): @@ -18071,8 +21128,8 @@ class CGMaplikeOrSetlikeMethodGenerator(CGThing): CGMethodCall/CGPerSignatureCall. Functionality is filled in here instead of using CGCallGenerator. """ - def __init__(self, descriptor, maplikeOrSetlike, methodName, - helperImpl=None): + + def __init__(self, descriptor, maplikeOrSetlike, methodName, helperImpl=None): CGThing.__init__(self) # True if this will be the body of a C++ helper function. self.helperImpl = helperImpl @@ -18085,10 +21142,13 @@ def __init__(self, descriptor, maplikeOrSetlike, methodName, # function. Truncate the double underscore so calling the right # underlying JSAPI function still works. impl_method_name = impl_method_name[2:] - self.cgRoot.append(CGGeneric( - getMaplikeOrSetlikeBackingObject(self.descriptor, - self.maplikeOrSetlike, - self.helperImpl))) + self.cgRoot.append( + CGGeneric( + getMaplikeOrSetlikeBackingObject( + self.descriptor, self.maplikeOrSetlike, self.helperImpl + ) + ) + ) self.returnStmt = getMaplikeOrSetlikeErrorReturn(self.helperImpl) # Generates required code for the method. Method descriptions included @@ -18097,9 +21157,10 @@ def __init__(self, descriptor, maplikeOrSetlike, methodName, try: methodGenerator = getattr(self, impl_method_name) except AttributeError: - raise TypeError("Missing %s method definition '%s'" % - (self.maplikeOrSetlike.maplikeOrSetlikeType, - methodName)) + raise TypeError( + "Missing %s method definition '%s'" + % (self.maplikeOrSetlike.maplikeOrSetlikeType, methodName) + ) # Method generator returns tuple, containing: # # - a list of CGThings representing setup code for preparing to call @@ -18110,21 +21171,26 @@ def __init__(self, descriptor, maplikeOrSetlike, methodName, # Create the actual method call, and then wrap it with the code to # return the value if needed. - funcName = (self.maplikeOrSetlike.prefix + - MakeNativeName(impl_method_name)) + funcName = self.maplikeOrSetlike.prefix + MakeNativeName(impl_method_name) # Append the list of setup code CGThings self.cgRoot.append(CGList(setupCode)) # Create the JS API call - self.cgRoot.append(CGWrapper( - CGGeneric(fill( - """ + self.cgRoot.append( + CGWrapper( + CGGeneric( + fill( + """ if (!JS::${funcName}(${args})) { $*{errorReturn} } """, - funcName=funcName, - args=", ".join(["cx", "backingObj"] + arguments), - errorReturn=self.returnStmt)))) + funcName=funcName, + args=", ".join(["cx", "backingObj"] + arguments), + errorReturn=self.returnStmt, + ) + ) + ) + ) # Append result conversion self.cgRoot.append(CGList(setResult)) @@ -18140,15 +21206,18 @@ def appendArgConversion(self, name): Generate code to convert arguments to JS::Values, so they can be passed into JSAPI functions. """ - return CGGeneric(fill( - """ + return CGGeneric( + fill( + """ JS::Rooted ${name}Val(cx); if (!ToJSValue(cx, ${name}, &${name}Val)) { $*{errorReturn} } """, - name=name, - errorReturn=self.returnStmt)) + name=name, + errorReturn=self.returnStmt, + ) + ) def appendKeyArgConversion(self): """ @@ -18168,9 +21237,9 @@ def appendKeyAndValueArgConversion(self): r = self.appendKeyArgConversion() if self.helperImpl: return self.mergeTuples(r, ([], ["argv[1]"], [])) - return self.mergeTuples(r, ([self.appendArgConversion("arg1")], - ["arg1Val"], - [])) + return self.mergeTuples( + r, ([self.appendArgConversion("arg1")], ["arg1Val"], []) + ) def appendIteratorResult(self): """ @@ -18179,8 +21248,9 @@ def appendIteratorResult(self): something that would return an iterator is called via Xray, fail early. """ # TODO: Bug 1173651 - Remove check once bug 1023984 is fixed. - code = CGGeneric(dedent( - """ + code = CGGeneric( + dedent( + """ // TODO (Bug 1173651): Xrays currently cannot wrap iterators. Change // after bug 1023984 is fixed. if (xpc::WrapperFactory::IsXrayWrapper(obj)) { @@ -18189,26 +21259,37 @@ def appendIteratorResult(self): } JS::Rooted result(cx); JS::Rooted v(cx); - """)) - arguments = "&v" - setResult = CGGeneric(dedent( """ + ) + ) + arguments = "&v" + setResult = CGGeneric( + dedent( + """ result = &v.toObject(); - """)) + """ + ) + ) return ([code], [arguments], [setResult]) def appendSelfResult(self): """ Generate code to return the interface object itself. """ - code = CGGeneric(dedent( - """ + code = CGGeneric( + dedent( + """ JS::Rooted result(cx); - """)) - setResult = CGGeneric(dedent( """ + ) + ) + setResult = CGGeneric( + dedent( + """ result = obj; - """)) + """ + ) + ) return ([code], [], [setResult]) def appendBoolResult(self): @@ -18228,9 +21309,11 @@ def forEach(self): storage slots, then use a helper function in BindingUtils to make sure the callback is called correctly. """ - assert(not self.helperImpl) - code = [CGGeneric(dedent( - """ + assert not self.helperImpl + code = [ + CGGeneric( + dedent( + """ // Create a wrapper function. JSFunction* func = js::NewFunctionWithReserved(cx, ForEachHandler, 3, 0, nullptr); if (!func) { @@ -18242,7 +21325,10 @@ def forEach(self): JS::ObjectValue(*arg0)); js::SetFunctionNativeReserved(funcObj, FOREACH_MAPLIKEORSETLIKEOBJ_SLOT, JS::ObjectValue(*obj)); - """))] + """ + ) + ) + ] arguments = ["funcVal", "arg1"] return (code, arguments, []) @@ -18281,14 +21367,21 @@ def get(self): """ assert self.maplikeOrSetlike.isMaplike() r = self.appendKeyArgConversion() - code = [CGGeneric(dedent( - """ + code = [ + CGGeneric( + dedent( + """ JS::Rooted result(cx); - """))] + """ + ) + ) + ] arguments = ["&result"] if self.descriptor.interface.isJSImplemented(): - callOnGet = [CGGeneric(dedent( - """ + callOnGet = [ + CGGeneric( + dedent( + """ { JS::ExposeValueToActiveJS(result); ErrorResult onGetResult; @@ -18297,7 +21390,10 @@ def get(self): return false; } } - """))] + """ + ) + ) + ] else: callOnGet = [] return self.mergeTuples(r, (code, arguments, callOnGet)) @@ -18309,8 +21405,7 @@ def has(self): Check if an entry exists in the backing object. Returns true if value exists in backing object, false otherwise. """ - return self.mergeTuples(self.appendKeyArgConversion(), - self.appendBoolResult()) + return self.mergeTuples(self.appendKeyArgConversion(), self.appendBoolResult()) def keys(self): """ @@ -18352,8 +21447,7 @@ def delete(self): Deletes an entry from the backing object. Returns true if value existed in backing object, false otherwise. """ - return self.mergeTuples(self.appendKeyArgConversion(), - self.appendBoolResult()) + return self.mergeTuples(self.appendKeyArgConversion(), self.appendBoolResult()) def define(self): return self.cgRoot.define() @@ -18373,37 +21467,50 @@ class HelperFunction(CGAbstractMethod): Generates context retrieval code and rooted JSObject for interface for CGMaplikeOrSetlikeMethodGenerator to use """ + def __init__(self, descriptor, name, args, code, needsBoolReturn=False): self.code = code - CGAbstractMethod.__init__(self, descriptor, name, - "bool" if needsBoolReturn else "void", - args) + CGAbstractMethod.__init__( + self, descriptor, name, "bool" if needsBoolReturn else "void", args + ) def definition_body(self): return self.code - def __init__(self, descriptor, maplikeOrSetlike, name, needsKeyArg=False, - needsValueArg=False, needsBoolReturn=False): + def __init__( + self, + descriptor, + maplikeOrSetlike, + name, + needsKeyArg=False, + needsValueArg=False, + needsBoolReturn=False, + ): args = [] self.maplikeOrSetlike = maplikeOrSetlike self.needsBoolReturn = needsBoolReturn if needsKeyArg: - args.append(FakeArgument(maplikeOrSetlike.keyType, None, 'aKey')) + args.append(FakeArgument(maplikeOrSetlike.keyType, None, "aKey")) if needsValueArg: assert needsKeyArg - args.append(FakeArgument(maplikeOrSetlike.valueType, None, 'aValue')) + args.append(FakeArgument(maplikeOrSetlike.valueType, None, "aValue")) # Run CallbackMember init function to generate argument conversion code. # wrapScope is set to 'obj' when generating maplike or setlike helper # functions, as we don't have access to the CallbackPreserveColor # method. - CallbackMember.__init__(self, - [BuiltinTypes[IDLBuiltinType.Types.void], args], - name, descriptor, False, - wrapScope='obj') + CallbackMember.__init__( + self, + [BuiltinTypes[IDLBuiltinType.Types.void], args], + name, + descriptor, + False, + wrapScope="obj", + ) # Wrap CallbackMember body code into a CGAbstractMethod to make # generation easier. self.implMethod = CGMaplikeOrSetlikeHelperFunctionGenerator.HelperFunction( - descriptor, name, self.args, self.body, needsBoolReturn) + descriptor, name, self.args, self.body, needsBoolReturn + ) def getCallSetup(self): return dedent( @@ -18426,7 +21533,9 @@ def getCallSetup(self): JS::Rooted obj(cx); obj = js::UncheckedUnwrap(&v.toObject(), /* stopAtWindowProxy = */ false); JSAutoRealm reflectorRealm(cx, obj); - """ % self.getDefaultRetval()) + """ + % self.getDefaultRetval() + ) def getArgs(self, returnType, argList): # We don't need the context or the value. We'll generate those instead. @@ -18454,10 +21563,12 @@ def getDefaultRetval(self): return "" def getCall(self): - return CGMaplikeOrSetlikeMethodGenerator(self.descriptorProvider, - self.maplikeOrSetlike, - self.name.lower(), - helperImpl=self).define() + return CGMaplikeOrSetlikeMethodGenerator( + self.descriptorProvider, + self.maplikeOrSetlike, + self.name.lower(), + helperImpl=self, + ).define() def getPrettyName(self): return self.name @@ -18475,41 +21586,52 @@ class CGMaplikeOrSetlikeHelperGenerator(CGNamespace): setlike/maplike interface. Generates function signatures, un/packs backing objects from slot, etc. """ + def __init__(self, descriptor, maplikeOrSetlike): self.descriptor = descriptor # Since iterables are folded in with maplike/setlike, make sure we've # got the right type here. assert maplikeOrSetlike.isMaplike() or maplikeOrSetlike.isSetlike() self.maplikeOrSetlike = maplikeOrSetlike - self.namespace = "%sHelpers" % (self.maplikeOrSetlike.maplikeOrSetlikeOrIterableType.title()) + self.namespace = "%sHelpers" % ( + self.maplikeOrSetlike.maplikeOrSetlikeOrIterableType.title() + ) self.helpers = [ - CGMaplikeOrSetlikeHelperFunctionGenerator(descriptor, - maplikeOrSetlike, - "Clear"), - CGMaplikeOrSetlikeHelperFunctionGenerator(descriptor, - maplikeOrSetlike, - "Delete", - needsKeyArg=True, - needsBoolReturn=True), - CGMaplikeOrSetlikeHelperFunctionGenerator(descriptor, - maplikeOrSetlike, - "Has", - needsKeyArg=True, - needsBoolReturn=True)] + CGMaplikeOrSetlikeHelperFunctionGenerator( + descriptor, maplikeOrSetlike, "Clear" + ), + CGMaplikeOrSetlikeHelperFunctionGenerator( + descriptor, + maplikeOrSetlike, + "Delete", + needsKeyArg=True, + needsBoolReturn=True, + ), + CGMaplikeOrSetlikeHelperFunctionGenerator( + descriptor, + maplikeOrSetlike, + "Has", + needsKeyArg=True, + needsBoolReturn=True, + ), + ] if self.maplikeOrSetlike.isMaplike(): self.helpers.append( - CGMaplikeOrSetlikeHelperFunctionGenerator(descriptor, - maplikeOrSetlike, - "Set", - needsKeyArg=True, - needsValueArg=True)) + CGMaplikeOrSetlikeHelperFunctionGenerator( + descriptor, + maplikeOrSetlike, + "Set", + needsKeyArg=True, + needsValueArg=True, + ) + ) else: - assert(self.maplikeOrSetlike.isSetlike()) + assert self.maplikeOrSetlike.isSetlike() self.helpers.append( - CGMaplikeOrSetlikeHelperFunctionGenerator(descriptor, - maplikeOrSetlike, - "Add", - needsKeyArg=True)) + CGMaplikeOrSetlikeHelperFunctionGenerator( + descriptor, maplikeOrSetlike, "Add", needsKeyArg=True + ) + ) CGNamespace.__init__(self, self.namespace, CGList(self.helpers)) @@ -18520,10 +21642,13 @@ class CGIterableMethodGenerator(CGGeneric): CGMethodCall/CGPerSignatureCall. Functionality is filled in here instead of using CGCallGenerator. """ + def __init__(self, descriptor, iterable, methodName): if methodName == "forEach": - CGGeneric.__init__(self, fill( - """ + CGGeneric.__init__( + self, + fill( + """ if (!JS::IsCallable(arg0)) { cx.ThrowErrorMessage("Argument 1"); return false; @@ -18548,22 +21673,28 @@ def __init__(self, descriptor, iterable, methodName): } } """, - ifaceName=descriptor.interface.identifier.name, - selfType=descriptor.nativeType)) + ifaceName=descriptor.interface.identifier.name, + selfType=descriptor.nativeType, + ), + ) return - CGGeneric.__init__(self, fill( - """ + CGGeneric.__init__( + self, + fill( + """ typedef ${iterClass} itrType; RefPtr result(new itrType(self, itrType::IterableIteratorType::${itrMethod}, &${ifaceName}Iterator_Binding::Wrap)); """, - iterClass=iteratorNativeType(descriptor), - ifaceName=descriptor.interface.identifier.name, - itrMethod=methodName.title())) + iterClass=iteratorNativeType(descriptor), + ifaceName=descriptor.interface.identifier.name, + itrMethod=methodName.title(), + ), + ) -class GlobalGenRoots(): +class GlobalGenRoots: """ Roots for global codegen. @@ -18580,18 +21711,24 @@ def GeneratedAtomList(config): def memberToAtomCacheMember(binaryNameFor, m): binaryMemberName = binaryNameFor(m.identifier.name) - return ClassMember(CGDictionary.makeIdName(binaryMemberName), - "PinnedStringId", visibility="public") + return ClassMember( + CGDictionary.makeIdName(binaryMemberName), + "PinnedStringId", + visibility="public", + ) def buildAtomCacheStructure(idlobj, binaryNameFor, members): - classMembers = [memberToAtomCacheMember(binaryNameFor, m) - for m in members] + classMembers = [memberToAtomCacheMember(binaryNameFor, m) for m in members] structName = idlobj.identifier.name + "Atoms" - return (structName, - CGWrapper(CGClass(structName, - bases=None, - isStruct=True, - members=classMembers), post='\n')) + return ( + structName, + CGWrapper( + CGClass( + structName, bases=None, isStruct=True, members=classMembers + ), + post="\n", + ), + ) for dict in dictionaries: if len(dict.members) == 0: @@ -18599,51 +21736,59 @@ def buildAtomCacheStructure(idlobj, binaryNameFor, members): structs.append(buildAtomCacheStructure(dict, lambda x: x, dict.members)) - for d in (config.getDescriptors(isJSImplemented=True) + - config.getDescriptors(isCallback=True)): + for d in config.getDescriptors(isJSImplemented=True) + config.getDescriptors( + isCallback=True + ): members = [m for m in d.interface.members if m.isAttr() or m.isMethod()] if d.interface.isJSImplemented() and d.interface.ctor(): # We'll have an __init() method. - members.append(FakeMember('__init')) - if (d.interface.isJSImplemented() and - d.interface.maplikeOrSetlikeOrIterable and - d.interface.maplikeOrSetlikeOrIterable.isMaplike()): + members.append(FakeMember("__init")) + if ( + d.interface.isJSImplemented() + and d.interface.maplikeOrSetlikeOrIterable + and d.interface.maplikeOrSetlikeOrIterable.isMaplike() + ): # We'll have an __onget() method. - members.append(FakeMember('__onget')) - if (d.interface.isJSImplemented() and - d.interface.getExtendedAttribute("WantsEventListenerHooks")): - members.append(FakeMember('eventListenerAdded')) - members.append(FakeMember('eventListenerRemoved')) + members.append(FakeMember("__onget")) + if d.interface.isJSImplemented() and d.interface.getExtendedAttribute( + "WantsEventListenerHooks" + ): + members.append(FakeMember("eventListenerAdded")) + members.append(FakeMember("eventListenerRemoved")) if len(members) == 0: continue - structs.append(buildAtomCacheStructure(d.interface, - lambda x: d.binaryNameFor(x), - members)) + structs.append( + buildAtomCacheStructure( + d.interface, lambda x: d.binaryNameFor(x), members + ) + ) structs.sort() generatedStructs = [struct for structName, struct in structs] structNames = [structName for structName, struct in structs] - mainStruct = CGWrapper(CGClass("PerThreadAtomCache", - bases=[ClassBase(structName) for structName in structNames], - isStruct=True), - post='\n') + mainStruct = CGWrapper( + CGClass( + "PerThreadAtomCache", + bases=[ClassBase(structName) for structName in structNames], + isStruct=True, + ), + post="\n", + ) structs = CGList(generatedStructs + [mainStruct]) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], - CGWrapper(structs, pre='\n')) - curr = CGWrapper(curr, post='\n') + curr = CGNamespace.build(["mozilla", "dom"], CGWrapper(structs, pre="\n")) + curr = CGWrapper(curr, post="\n") # Add include statement for PinnedStringId. - declareIncludes = ['mozilla/dom/PinnedStringId.h'] - curr = CGHeaders([], [], [], [], declareIncludes, [], 'GeneratedAtomList', - curr) + declareIncludes = ["mozilla/dom/PinnedStringId.h"] + curr = CGHeaders([], [], [], [], declareIncludes, [], "GeneratedAtomList", curr) # Add include guards. - curr = CGIncludeGuard('GeneratedAtomList', curr) + curr = CGIncludeGuard("GeneratedAtomList", curr) # Add the auto-generated comment. curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) @@ -18655,87 +21800,125 @@ def buildAtomCacheStructure(idlobj, binaryNameFor, members): def GeneratedEventList(config): eventList = CGList([]) for generatedEvent in config.generatedEvents: - eventList.append(CGGeneric(declare=("GENERATED_EVENT(%s)\n" % generatedEvent))) + eventList.append( + CGGeneric(declare=("GENERATED_EVENT(%s)\n" % generatedEvent)) + ) return eventList @staticmethod def PrototypeList(config): # Prototype ID enum. - descriptorsWithPrototype = config.getDescriptors(hasInterfacePrototypeObject=True) + descriptorsWithPrototype = config.getDescriptors( + hasInterfacePrototypeObject=True + ) protos = [d.name for d in descriptorsWithPrototype] - idEnum = CGNamespacedEnum('id', 'ID', ['_ID_Start'] + protos, - [0, '_ID_Start']) + idEnum = CGNamespacedEnum("id", "ID", ["_ID_Start"] + protos, [0, "_ID_Start"]) idEnum = CGList([idEnum]) def fieldSizeAssert(amount, jitInfoField, message): - maxFieldValue = "(uint64_t(1) << (sizeof(std::declval().%s) * 8))" % jitInfoField - return CGGeneric(define="static_assert(%s < %s, \"%s\");\n\n" - % (amount, maxFieldValue, message)) - - idEnum.append(fieldSizeAssert("id::_ID_Count", "protoID", - "Too many prototypes!")) + maxFieldValue = ( + "(uint64_t(1) << (sizeof(std::declval().%s) * 8))" + % jitInfoField + ) + return CGGeneric( + define='static_assert(%s < %s, "%s");\n\n' + % (amount, maxFieldValue, message) + ) + + idEnum.append( + fieldSizeAssert("id::_ID_Count", "protoID", "Too many prototypes!") + ) # Wrap all of that in our namespaces. - idEnum = CGNamespace.build(['mozilla', 'dom', 'prototypes'], - CGWrapper(idEnum, pre='\n')) - idEnum = CGWrapper(idEnum, post='\n') + idEnum = CGNamespace.build( + ["mozilla", "dom", "prototypes"], CGWrapper(idEnum, pre="\n") + ) + idEnum = CGWrapper(idEnum, post="\n") - curr = CGList([CGGeneric(define="#include \n"), - CGGeneric(define="#include \n\n"), - CGGeneric(define='#include "js/experimental/JitInfo.h"\n\n'), - CGGeneric(define='#include "mozilla/dom/PrototypeList.h"\n\n'), - idEnum]) + curr = CGList( + [ + CGGeneric(define="#include \n"), + CGGeneric(define="#include \n\n"), + CGGeneric(define='#include "js/experimental/JitInfo.h"\n\n'), + CGGeneric(define='#include "mozilla/dom/PrototypeList.h"\n\n'), + idEnum, + ] + ) # Let things know the maximum length of the prototype chain. maxMacroName = "MAX_PROTOTYPE_CHAIN_LENGTH" - maxMacro = CGGeneric(declare="#define " + maxMacroName + " " + str(config.maxProtoChainLength)) - curr.append(CGWrapper(maxMacro, post='\n\n')) - curr.append(fieldSizeAssert(maxMacroName, "depth", - "Some inheritance chain is too long!")) + maxMacro = CGGeneric( + declare="#define " + maxMacroName + " " + str(config.maxProtoChainLength) + ) + curr.append(CGWrapper(maxMacro, post="\n\n")) + curr.append( + fieldSizeAssert( + maxMacroName, "depth", "Some inheritance chain is too long!" + ) + ) # Constructor ID enum. constructors = [d.name for d in config.getDescriptors(hasInterfaceObject=True)] - idEnum = CGNamespacedEnum('id', 'ID', ['_ID_Start'] + constructors, - ['prototypes::id::_ID_Count', '_ID_Start']) + idEnum = CGNamespacedEnum( + "id", + "ID", + ["_ID_Start"] + constructors, + ["prototypes::id::_ID_Count", "_ID_Start"], + ) # Wrap all of that in our namespaces. - idEnum = CGNamespace.build(['mozilla', 'dom', 'constructors'], - CGWrapper(idEnum, pre='\n')) - idEnum = CGWrapper(idEnum, post='\n') + idEnum = CGNamespace.build( + ["mozilla", "dom", "constructors"], CGWrapper(idEnum, pre="\n") + ) + idEnum = CGWrapper(idEnum, post="\n") curr.append(idEnum) # Named properties object enum. - namedPropertiesObjects = [d.name for d in config.getDescriptors(hasNamedPropertiesObject=True)] - idEnum = CGNamespacedEnum('id', 'ID', ['_ID_Start'] + namedPropertiesObjects, - ['constructors::id::_ID_Count', '_ID_Start']) + namedPropertiesObjects = [ + d.name for d in config.getDescriptors(hasNamedPropertiesObject=True) + ] + idEnum = CGNamespacedEnum( + "id", + "ID", + ["_ID_Start"] + namedPropertiesObjects, + ["constructors::id::_ID_Count", "_ID_Start"], + ) # Wrap all of that in our namespaces. - idEnum = CGNamespace.build(['mozilla', 'dom', 'namedpropertiesobjects'], - CGWrapper(idEnum, pre='\n')) - idEnum = CGWrapper(idEnum, post='\n') + idEnum = CGNamespace.build( + ["mozilla", "dom", "namedpropertiesobjects"], CGWrapper(idEnum, pre="\n") + ) + idEnum = CGWrapper(idEnum, post="\n") curr.append(idEnum) - traitsDecls = [CGGeneric(declare=dedent(""" + traitsDecls = [ + CGGeneric( + declare=dedent( + """ template struct PrototypeTraits; - """))] + """ + ) + ) + ] traitsDecls.extend(CGPrototypeTraitsClass(d) for d in descriptorsWithPrototype) - ifaceNamesWithProto = [d.interface.identifier.name - for d in descriptorsWithPrototype] - traitsDecls.append(CGStringTable("NamesOfInterfacesWithProtos", - ifaceNamesWithProto)) + ifaceNamesWithProto = [ + d.interface.identifier.name for d in descriptorsWithPrototype + ] + traitsDecls.append( + CGStringTable("NamesOfInterfacesWithProtos", ifaceNamesWithProto) + ) - traitsDecl = CGNamespace.build(['mozilla', 'dom'], - CGList(traitsDecls)) + traitsDecl = CGNamespace.build(["mozilla", "dom"], CGList(traitsDecls)) curr.append(traitsDecl) # Add include guards. - curr = CGIncludeGuard('PrototypeList', curr) + curr = CGIncludeGuard("PrototypeList", curr) # Add the auto-generated comment. curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT) @@ -18746,23 +21929,24 @@ def fieldSizeAssert(amount, jitInfoField, message): @staticmethod def RegisterBindings(config): - curr = CGNamespace.build(['mozilla', 'dom'], CGGlobalNames(config)) - curr = CGWrapper(curr, post='\n') + curr = CGNamespace.build(["mozilla", "dom"], CGGlobalNames(config)) + curr = CGWrapper(curr, post="\n") # Add the includes - defineIncludes = [CGHeaders.getDeclarationFilename(desc.interface) - for desc in config.getDescriptors(hasInterfaceObject=True, - isExposedInWindow=True, - register=True)] - defineIncludes.append('mozilla/dom/WebIDLGlobalNameHash.h') - defineIncludes.append('mozilla/dom/PrototypeList.h') - defineIncludes.append('mozilla/PerfectHash.h') - defineIncludes.append('js/String.h') - curr = CGHeaders([], [], [], [], [], defineIncludes, 'RegisterBindings', - curr) + defineIncludes = [ + CGHeaders.getDeclarationFilename(desc.interface) + for desc in config.getDescriptors( + hasInterfaceObject=True, isExposedInWindow=True, register=True + ) + ] + defineIncludes.append("mozilla/dom/WebIDLGlobalNameHash.h") + defineIncludes.append("mozilla/dom/PrototypeList.h") + defineIncludes.append("mozilla/PerfectHash.h") + defineIncludes.append("js/String.h") + curr = CGHeaders([], [], [], [], [], defineIncludes, "RegisterBindings", curr) # Add include guards. - curr = CGIncludeGuard('RegisterBindings', curr) + curr = CGIncludeGuard("RegisterBindings", curr) # Done. return curr @@ -18773,21 +21957,23 @@ def RegisterWorkerBindings(config): curr = CGRegisterWorkerBindings(config) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], - CGWrapper(curr, post='\n')) - curr = CGWrapper(curr, post='\n') + curr = CGNamespace.build(["mozilla", "dom"], CGWrapper(curr, post="\n")) + curr = CGWrapper(curr, post="\n") # Add the includes - defineIncludes = [CGHeaders.getDeclarationFilename(desc.interface) - for desc in config.getDescriptors(hasInterfaceObject=True, - register=True, - isExposedInAnyWorker=True)] + defineIncludes = [ + CGHeaders.getDeclarationFilename(desc.interface) + for desc in config.getDescriptors( + hasInterfaceObject=True, register=True, isExposedInAnyWorker=True + ) + ] - curr = CGHeaders([], [], [], [], [], defineIncludes, - 'RegisterWorkerBindings', curr) + curr = CGHeaders( + [], [], [], [], [], defineIncludes, "RegisterWorkerBindings", curr + ) # Add include guards. - curr = CGIncludeGuard('RegisterWorkerBindings', curr) + curr = CGIncludeGuard("RegisterWorkerBindings", curr) # Done. return curr @@ -18798,21 +21984,23 @@ def RegisterWorkerDebuggerBindings(config): curr = CGRegisterWorkerDebuggerBindings(config) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], - CGWrapper(curr, post='\n')) - curr = CGWrapper(curr, post='\n') + curr = CGNamespace.build(["mozilla", "dom"], CGWrapper(curr, post="\n")) + curr = CGWrapper(curr, post="\n") # Add the includes - defineIncludes = [CGHeaders.getDeclarationFilename(desc.interface) - for desc in config.getDescriptors(hasInterfaceObject=True, - register=True, - isExposedInWorkerDebugger=True)] + defineIncludes = [ + CGHeaders.getDeclarationFilename(desc.interface) + for desc in config.getDescriptors( + hasInterfaceObject=True, register=True, isExposedInWorkerDebugger=True + ) + ] - curr = CGHeaders([], [], [], [], [], defineIncludes, - 'RegisterWorkerDebuggerBindings', curr) + curr = CGHeaders( + [], [], [], [], [], defineIncludes, "RegisterWorkerDebuggerBindings", curr + ) # Add include guards. - curr = CGIncludeGuard('RegisterWorkerDebuggerBindings', curr) + curr = CGIncludeGuard("RegisterWorkerDebuggerBindings", curr) # Done. return curr @@ -18823,21 +22011,23 @@ def RegisterWorkletBindings(config): curr = CGRegisterWorkletBindings(config) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], - CGWrapper(curr, post='\n')) - curr = CGWrapper(curr, post='\n') + curr = CGNamespace.build(["mozilla", "dom"], CGWrapper(curr, post="\n")) + curr = CGWrapper(curr, post="\n") # Add the includes - defineIncludes = [CGHeaders.getDeclarationFilename(desc.interface) - for desc in config.getDescriptors(hasInterfaceObject=True, - register=True, - isExposedInAnyWorklet=True)] + defineIncludes = [ + CGHeaders.getDeclarationFilename(desc.interface) + for desc in config.getDescriptors( + hasInterfaceObject=True, register=True, isExposedInAnyWorklet=True + ) + ] - curr = CGHeaders([], [], [], [], [], defineIncludes, - 'RegisterWorkletBindings', curr) + curr = CGHeaders( + [], [], [], [], [], defineIncludes, "RegisterWorkletBindings", curr + ) # Add include guards. - curr = CGIncludeGuard('RegisterWorkletBindings', curr) + curr = CGIncludeGuard("RegisterWorkletBindings", curr) # Done. return curr @@ -18845,15 +22035,22 @@ def RegisterWorkletBindings(config): @staticmethod def UnionTypes(config): unionTypes = UnionsForFile(config, None) - (includes, implincludes, declarations, - traverseMethods, unlinkMethods, - unionStructs) = UnionTypes(unionTypes, config) - - unions = CGList(traverseMethods + - unlinkMethods + - [CGUnionStruct(t, config) for t in unionStructs] + - [CGUnionStruct(t, config, True) for t in unionStructs], - "\n") + ( + includes, + implincludes, + declarations, + traverseMethods, + unlinkMethods, + unionStructs, + ) = UnionTypes(unionTypes, config) + + unions = CGList( + traverseMethods + + unlinkMethods + + [CGUnionStruct(t, config) for t in unionStructs] + + [CGUnionStruct(t, config, True) for t in unionStructs], + "\n", + ) includes.add("mozilla/OwningNonNull.h") includes.add("mozilla/dom/UnionMember.h") @@ -18864,9 +22061,9 @@ def UnionTypes(config): includes.add("mozilla/dom/BindingUtils.h") # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], unions) + curr = CGNamespace.build(["mozilla", "dom"], unions) - curr = CGWrapper(curr, post='\n') + curr = CGWrapper(curr, post="\n") builder = ForwardDeclarationBuilder() for className, isStruct in declarations: @@ -18874,11 +22071,10 @@ def UnionTypes(config): curr = CGList([builder.build(), curr], "\n") - curr = CGHeaders([], [], [], [], includes, implincludes, 'UnionTypes', - curr) + curr = CGHeaders([], [], [], [], includes, implincludes, "UnionTypes", curr) # Add include guards. - curr = CGIncludeGuard('UnionTypes', curr) + curr = CGIncludeGuard("UnionTypes", curr) # Done. return curr @@ -18889,19 +22085,18 @@ def UnionConversions(config): for l in six.itervalues(config.unionsPerFilename): unionTypes.extend(l) unionTypes.sort(key=lambda u: u.name) - headers, unions = UnionConversions(unionTypes, - config) + headers, unions = UnionConversions(unionTypes, config) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], unions) + curr = CGNamespace.build(["mozilla", "dom"], unions) - curr = CGWrapper(curr, post='\n') + curr = CGWrapper(curr, post="\n") headers.update(["nsDebug.h", "mozilla/dom/UnionTypes.h"]) - curr = CGHeaders([], [], [], [], headers, [], 'UnionConversions', curr) + curr = CGHeaders([], [], [], [], headers, [], "UnionConversions", curr) # Add include guards. - curr = CGIncludeGuard('UnionConversions', curr) + curr = CGIncludeGuard("UnionConversions", curr) # Done. return curr @@ -18927,7 +22122,8 @@ def WebIDLPrefs(config): extern const WebIDLPrefFunc sWebIDLPrefs[${len}]; """, prefs=",\n".join(map(lambda p: "// " + p[0] + "\n" + p[1], prefs)) + "\n", - len=len(prefs) + 1) + len=len(prefs) + 1, + ) define = fill( """ const WebIDLPrefFunc sWebIDLPrefs[] = { @@ -18935,18 +22131,22 @@ def WebIDLPrefs(config): $*{prefs} }; """, - prefs=",\n".join(map(lambda p: "// " + p[0] + "\nStaticPrefs::" + p[1], prefs)) + "\n") + prefs=",\n".join( + map(lambda p: "// " + p[0] + "\nStaticPrefs::" + p[1], prefs) + ) + + "\n", + ) prefFunctions = CGGeneric(declare=declare, define=define) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], prefFunctions) + curr = CGNamespace.build(["mozilla", "dom"], prefFunctions) - curr = CGWrapper(curr, post='\n') + curr = CGWrapper(curr, post="\n") - curr = CGHeaders([], [], [], [], [], headers, 'WebIDLPrefs', curr) + curr = CGHeaders([], [], [], [], [], headers, "WebIDLPrefs", curr) # Add include guards. - curr = CGIncludeGuard('WebIDLPrefs', curr) + curr = CGIncludeGuard("WebIDLPrefs", curr) # Done. return curr @@ -18954,11 +22154,16 @@ def WebIDLPrefs(config): @staticmethod def WebIDLSerializable(config): # We need a declaration of StructuredCloneTags in the header. - declareIncludes = set(["mozilla/dom/DOMJSClass.h", - "mozilla/dom/StructuredCloneTags.h", - "js/TypeDecls.h"]) - defineIncludes = set(["mozilla/dom/WebIDLSerializable.h", - "mozilla/PerfectHash.h"]) + declareIncludes = set( + [ + "mozilla/dom/DOMJSClass.h", + "mozilla/dom/StructuredCloneTags.h", + "js/TypeDecls.h", + ] + ) + defineIncludes = set( + ["mozilla/dom/WebIDLSerializable.h", "mozilla/PerfectHash.h"] + ) names = list() for d in config.getDescriptors(isSerializable=True): names.append(d.name) @@ -18968,7 +22173,7 @@ def WebIDLSerializable(config): # We can't really create a PerfectHash out of this, but also there's # not much point to this file if we have no [Serializable] objects. # Just spit out an empty file. - return CGIncludeGuard('WebIDLSerializable', CGGeneric("")) + return CGIncludeGuard("WebIDLSerializable", CGGeneric("")) # If we had a lot of serializable things, it might be worth it to use a # PerfectHash here, or an array ordered by sctag value and binary @@ -18987,29 +22192,34 @@ def WebIDLSerializable(config): # holes filled in. Again, worth checking whether this still happens if # we have too many serializable things. if len(names) > 20: - raise TypeError("We now have %s serializable interfaces. " - "Double-check that the compiler is still " - "generating a jump table." % - len(names)); + raise TypeError( + "We now have %s serializable interfaces. " + "Double-check that the compiler is still " + "generating a jump table." % len(names) + ) entries = list() # Make sure we have stable ordering. for name in sorted(names): # Strip off trailing newline to make our formatting look right. - entries.append(fill( - """ + entries.append( + fill( + """ { /* mTag */ ${tag}, /* mDeserialize */ ${name}_Binding::Deserialize } """, - tag=StructuredCloneTag(name), - name=name)[:-1]) + tag=StructuredCloneTag(name), + name=name, + )[:-1] + ) declare = dedent( """ WebIDLDeserializer LookupDeserializer(StructuredCloneTags aTag); - """) + """ + ) define = fill( """ struct WebIDLSerializableEntry { @@ -19030,42 +22240,46 @@ def WebIDLSerializable(config): return nullptr; } """, - entries=",\n".join(entries) + "\n") + entries=",\n".join(entries) + "\n", + ) code = CGGeneric(declare=declare, define=define) # Wrap all of that in our namespaces. - curr = CGNamespace.build(['mozilla', 'dom'], code) + curr = CGNamespace.build(["mozilla", "dom"], code) - curr = CGWrapper(curr, post='\n') + curr = CGWrapper(curr, post="\n") - curr = CGHeaders([], [], [], [], declareIncludes, defineIncludes, - 'WebIDLSerializable', curr) + curr = CGHeaders( + [], [], [], [], declareIncludes, defineIncludes, "WebIDLSerializable", curr + ) # Add include guards. - curr = CGIncludeGuard('WebIDLSerializable', curr) + curr = CGIncludeGuard("WebIDLSerializable", curr) # Done. return curr - # Code generator for simple events class CGEventGetter(CGNativeMember): def __init__(self, descriptor, attr): ea = descriptor.getExtendedAttributes(attr, getter=True) - CGNativeMember.__init__(self, descriptor, attr, - CGSpecializedGetter.makeNativeName(descriptor, - attr), - (attr.type, []), - ea, - resultNotAddRefed=not attr.type.isSequence()) + CGNativeMember.__init__( + self, + descriptor, + attr, + CGSpecializedGetter.makeNativeName(descriptor, attr), + (attr.type, []), + ea, + resultNotAddRefed=not attr.type.isSequence(), + ) self.body = self.getMethodBody() def getArgs(self, returnType, argList): - if 'infallible' not in self.extendedAttrs: + if "infallible" not in self.extendedAttrs: raise TypeError("Event code generator does not support [Throws]!") - if 'canOOM' in self.extendedAttrs: + if "canOOM" in self.extendedAttrs: raise TypeError("Event code generator does not support [CanOOM]!") if not self.member.isAttr(): raise TypeError("Event code generator does not support methods") @@ -19076,15 +22290,22 @@ def getArgs(self, returnType, argList): def getMethodBody(self): type = self.member.type memberName = CGDictionary.makeMemberName(self.member.identifier.name) - if ((type.isPrimitive() and type.tag() in builtinNames) or - type.isEnum() or - type.isPromise() or - type.isGeckoInterface()): + if ( + (type.isPrimitive() and type.tag() in builtinNames) + or type.isEnum() + or type.isPromise() + or type.isGeckoInterface() + ): return "return " + memberName + ";\n" if type.isJSString(): # https://bugzilla.mozilla.org/show_bug.cgi?id=1580167 raise TypeError("JSString not supported as member of a generated event") - if type.isDOMString() or type.isByteString() or type.isUSVString() or type.isUTF8String(): + if ( + type.isDOMString() + or type.isByteString() + or type.isUSVString() + or type.isUTF8String() + ): return "aRetVal = " + memberName + ";\n" if type.isSpiderMonkeyInterface() or type.isObject(): return fill( @@ -19095,31 +22316,43 @@ def getMethodBody(self): aRetVal.set(${memberName}); return; """, - memberName=memberName) + memberName=memberName, + ) if type.isAny(): return fill( """ ${selfName}(aRetVal); """, - selfName=self.name) + selfName=self.name, + ) if type.isUnion(): return "aRetVal = " + memberName + ";\n" if type.isSequence(): if type.nullable(): - return "if (" + memberName + ".IsNull()) { aRetVal.SetNull(); } else { aRetVal.SetValue(" + memberName + ".Value().Clone()); }\n" + return ( + "if (" + + memberName + + ".IsNull()) { aRetVal.SetNull(); } else { aRetVal.SetValue(" + + memberName + + ".Value().Clone()); }\n" + ) else: return "aRetVal = " + memberName + ".Clone();\n" raise TypeError("Event code generator does not support this type!") def declare(self, cgClass): - if getattr(self.member, "originatingInterface", - cgClass.descriptor.interface) != cgClass.descriptor.interface: + if ( + getattr(self.member, "originatingInterface", cgClass.descriptor.interface) + != cgClass.descriptor.interface + ): return "" return CGNativeMember.declare(self, cgClass) def define(self, cgClass): - if getattr(self.member, "originatingInterface", - cgClass.descriptor.interface) != cgClass.descriptor.interface: + if ( + getattr(self.member, "originatingInterface", cgClass.descriptor.interface) + != cgClass.descriptor.interface + ): return "" return CGNativeMember.define(self, cgClass) @@ -19133,13 +22366,16 @@ class CGEventMethod(CGNativeMember): def __init__(self, descriptor, method, signature, isConstructor, breakAfter=True): self.isInit = False - CGNativeMember.__init__(self, descriptor, method, - CGSpecializedMethod.makeNativeName(descriptor, - method), - signature, - descriptor.getExtendedAttributes(method), - breakAfter=breakAfter, - variadicIsSequence=True) + CGNativeMember.__init__( + self, + descriptor, + method, + CGSpecializedMethod.makeNativeName(descriptor, method), + signature, + descriptor.getExtendedAttributes(method), + breakAfter=breakAfter, + variadicIsSequence=True, + ) self.originalArgs = list(self.args) iface = descriptor.interface @@ -19147,15 +22383,18 @@ def __init__(self, descriptor, method, signature, isConstructor, breakAfter=True if not allowed and iface.getExtendedAttribute("LegacyEventInit"): # Allow it, only if it fits the initFooEvent profile exactly # We could check the arg types but it's not worth the effort. - if (method.identifier.name == "init" + iface.identifier.name and - signature[1][0].type.isDOMString() and - signature[1][1].type.isBoolean() and - signature[1][2].type.isBoolean() and + if ( + method.identifier.name == "init" + iface.identifier.name + and signature[1][0].type.isDOMString() + and signature[1][1].type.isBoolean() + and signature[1][2].type.isBoolean() + and # -3 on the left to ignore the type, bubbles, and cancelable parameters # -1 on the right to ignore the .trusted property which bleeds through # here because it is [Unforgeable]. - len(signature[1]) - 3 == len( - [x for x in iface.members if x.isAttr()]) - 1): + len(signature[1]) - 3 + == len([x for x in iface.members if x.isAttr()]) - 1 + ): allowed = True self.isInit = True @@ -19167,9 +22406,9 @@ def getArgs(self, returnType, argList): return args def getArg(self, arg): - decl, ref = self.getArgType(arg.type, - arg.canHaveMissingValue(), - "Variadic" if arg.variadic else False) + decl, ref = self.getArgType( + arg.type, arg.canHaveMissingValue(), "Variadic" if arg.variadic else False + ) if ref: decl = CGWrapper(decl, pre="const ", post="&") @@ -19202,8 +22441,10 @@ def defineInit(self, cgClass): if m.isAttr(): # We need to initialize all the member variables that do # not come from Event. - if getattr(m, "originatingInterface", - iface).identifier.name == "Event": + if ( + getattr(m, "originatingInterface", iface).identifier.name + == "Event" + ): continue name = CGDictionary.makeMemberName(m.identifier.name) members += "%s = %s;\n" % (name, self.args[i].name) @@ -19218,7 +22459,8 @@ def defineInit(self, cgClass): typeArg=self.args[0].name, bubblesArg=self.args[1].name, cancelableArg=self.args[2].name, - members=members) + members=members, + ) return CGNativeMember.define(self, cgClass) @@ -19230,12 +22472,18 @@ def define(self, cgClass): holdJS = "" iface = self.descriptorProvider.interface while iface.identifier.name != "Event": - for m in self.descriptorProvider.getDescriptor(iface.identifier.name).interface.members: + for m in self.descriptorProvider.getDescriptor( + iface.identifier.name + ).interface.members: if m.isAttr(): # We initialize all the other member variables in the # Constructor except those ones coming from the Event. - if getattr(m, "originatingInterface", - cgClass.descriptor.interface).identifier.name == "Event": + if ( + getattr( + m, "originatingInterface", cgClass.descriptor.interface + ).identifier.name + == "Event" + ): continue name = CGDictionary.makeMemberName(m.identifier.name) if m.type.isSequence(): @@ -19252,8 +22500,8 @@ def define(self, cgClass): sequenceCopy = "e->%s.AppendElements(%s);\n" if m.type.nullable(): sequenceCopy = CGIfWrapper( - CGGeneric(sequenceCopy), - "!%s.IsNull()" % source).define() + CGGeneric(sequenceCopy), "!%s.IsNull()" % source + ).define() target += ".SetValue()" source += ".Value()" members += sequenceCopy % (target, source) @@ -19269,16 +22517,23 @@ def define(self, cgClass): } """, varname=name, - srcname=srcname) + srcname=srcname, + ) else: members += fill( """ e->${varname}.set(${srcname}.Obj()); """, - varname=name, srcname=srcname) + varname=name, + srcname=srcname, + ) else: members += "e->%s = %s.%s;\n" % (name, self.args[1].name, name) - if m.type.isAny() or m.type.isObject() or m.type.isSpiderMonkeyInterface(): + if ( + m.type.isAny() + or m.type.isObject() + or m.type.isSpiderMonkeyInterface() + ): holdJS = "mozilla::HoldJSObjects(e.get());\n" iface = iface.parent @@ -19293,11 +22548,12 @@ def define(self, cgClass): $*{holdJS} return e.forget(); """, - nativeType=self.descriptorProvider.nativeType.split('::')[-1], + nativeType=self.descriptorProvider.nativeType.split("::")[-1], eventType=self.args[0].name, eventInit=self.args[1].name, members=members, - holdJS=holdJS) + holdJS=holdJS, + ) self.args.insert(0, Argument("mozilla::dom::EventTarget*", "aOwner")) constructorForNativeCaller = CGNativeMember.define(self, cgClass) + "\n" @@ -19308,7 +22564,8 @@ def define(self, cgClass): return Constructor(owner, ${arg0}, ${arg1}); """, arg0=self.args[0].name, - arg1=self.args[1].name) + arg1=self.args[1].name, + ) if needCx(None, self.arguments(), [], considerTypes=True, static=True): self.args.insert(0, Argument("JSContext*", "aCx")) self.args.insert(0, Argument("const GlobalObject&", "aGlobal")) @@ -19319,16 +22576,27 @@ class CGEventClass(CGBindingImplClass): """ Codegen for the actual Event class implementation for this descriptor """ + def __init__(self, descriptor): - CGBindingImplClass.__init__(self, descriptor, CGEventMethod, CGEventGetter, CGEventSetter, False, "WrapObjectInternal") + CGBindingImplClass.__init__( + self, + descriptor, + CGEventMethod, + CGEventGetter, + CGEventSetter, + False, + "WrapObjectInternal", + ) members = [] extraMethods = [] self.membersNeedingCC = [] self.membersNeedingTrace = [] for m in descriptor.interface.members: - if getattr(m, "originatingInterface", - descriptor.interface) != descriptor.interface: + if ( + getattr(m, "originatingInterface", descriptor.interface) + != descriptor.interface + ): continue if m.isAttr(): @@ -19343,34 +22611,43 @@ def __init__(self, descriptor): ClassMethod( CGSpecializedGetter.makeNativeName(descriptor, m), "void", - [Argument("JS::MutableHandle", - "aRetVal")], + [Argument("JS::MutableHandle", "aRetVal")], const=True, body=fill( """ JS::ExposeValueToActiveJS(${memberName}); aRetVal.set(${memberName}); """, - memberName=CGDictionary.makeMemberName(m.identifier.name)))) - elif (m.type.isObject() or - m.type.isSpiderMonkeyInterface()): + memberName=CGDictionary.makeMemberName( + m.identifier.name + ), + ), + ) + ) + elif m.type.isObject() or m.type.isSpiderMonkeyInterface(): self.membersNeedingTrace.append(m) elif typeNeedsRooting(m.type): raise TypeError( - "Need to implement tracing for event member of type %s" % - m.type) + "Need to implement tracing for event member of type %s" % m.type + ) elif idlTypeNeedsCycleCollection(m.type): self.membersNeedingCC.append(m) nativeType = self.getNativeTypeForIDLType(m.type).define() - members.append(ClassMember(CGDictionary.makeMemberName(m.identifier.name), - nativeType, - visibility="private", - body="body")) + members.append( + ClassMember( + CGDictionary.makeMemberName(m.identifier.name), + nativeType, + visibility="private", + body="body", + ) + ) parent = self.descriptor.interface.parent - self.parentType = self.descriptor.getDescriptor(parent.identifier.name).nativeType.split('::')[-1] - self.nativeType = self.descriptor.nativeType.split('::')[-1] + self.parentType = self.descriptor.getDescriptor( + parent.identifier.name + ).nativeType.split("::")[-1] + self.nativeType = self.descriptor.nativeType.split("::")[-1] if self.needCC(): isupportsDecl = fill( @@ -19379,14 +22656,16 @@ def __init__(self, descriptor): NS_DECL_CYCLE_COLLECTION_SCRIPT_HOLDER_CLASS_INHERITED(${nativeType}, ${parentType}) """, nativeType=self.nativeType, - parentType=self.parentType) + parentType=self.parentType, + ) else: isupportsDecl = fill( """ NS_INLINE_DECL_REFCOUNTING_INHERITED(${nativeType}, ${parentType}) """, nativeType=self.nativeType, - parentType=self.parentType) + parentType=self.parentType, + ) baseDeclarations = fill( """ @@ -19400,43 +22679,54 @@ def __init__(self, descriptor): """, isupportsDecl=isupportsDecl, nativeType=self.nativeType, - parentType=self.parentType) + parentType=self.parentType, + ) className = self.nativeType - asConcreteTypeMethod = ClassMethod("As%s" % className, - "%s*" % className, - [], - virtual=True, - body="return this;\n", - breakAfterReturnDecl=" ", - override=True) + asConcreteTypeMethod = ClassMethod( + "As%s" % className, + "%s*" % className, + [], + virtual=True, + body="return this;\n", + breakAfterReturnDecl=" ", + override=True, + ) extraMethods.append(asConcreteTypeMethod) - CGClass.__init__(self, className, - bases=[ClassBase(self.parentType)], - methods=extraMethods+self.methodDecls, - members=members, - extradeclarations=baseDeclarations) + CGClass.__init__( + self, + className, + bases=[ClassBase(self.parentType)], + methods=extraMethods + self.methodDecls, + members=members, + extradeclarations=baseDeclarations, + ) def getWrapObjectBody(self): - return "return %s_Binding::Wrap(aCx, this, aGivenProto);\n" % self.descriptor.name + return ( + "return %s_Binding::Wrap(aCx, this, aGivenProto);\n" % self.descriptor.name + ) def needCC(self): - return (len(self.membersNeedingCC) != 0 or - len(self.membersNeedingTrace) != 0) + return len(self.membersNeedingCC) != 0 or len(self.membersNeedingTrace) != 0 def implTraverse(self): retVal = "" for m in self.membersNeedingCC: - retVal += (" NS_IMPL_CYCLE_COLLECTION_TRAVERSE(%s)\n" % - CGDictionary.makeMemberName(m.identifier.name)) + retVal += ( + " NS_IMPL_CYCLE_COLLECTION_TRAVERSE(%s)\n" + % CGDictionary.makeMemberName(m.identifier.name) + ) return retVal def implUnlink(self): retVal = "" for m in self.membersNeedingCC: - retVal += (" NS_IMPL_CYCLE_COLLECTION_UNLINK(%s)\n" % - CGDictionary.makeMemberName(m.identifier.name)) + retVal += ( + " NS_IMPL_CYCLE_COLLECTION_UNLINK(%s)\n" + % CGDictionary.makeMemberName(m.identifier.name) + ) for m in self.membersNeedingTrace: name = CGDictionary.makeMemberName(m.identifier.name) if m.type.isAny(): @@ -19450,8 +22740,10 @@ def implUnlink(self): def implTrace(self): retVal = "" for m in self.membersNeedingTrace: - retVal += (" NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(%s)\n" % - CGDictionary.makeMemberName(m.identifier.name)) + retVal += ( + " NS_IMPL_CYCLE_COLLECTION_TRACE_JS_MEMBER_CALLBACK(%s)\n" + % CGDictionary.makeMemberName(m.identifier.name) + ) return retVal def define(self): @@ -19468,8 +22760,9 @@ def define(self): if dropJS != "": dropJS += "mozilla::DropJSObjects(this);\n" # Just override CGClass and do our own thing - ctorParams = ("aOwner, nullptr, nullptr" if self.parentType == "Event" - else "aOwner") + ctorParams = ( + "aOwner, nullptr, nullptr" if self.parentType == "Event" else "aOwner" + ) if self.needCC(): classImpl = fill( @@ -19499,7 +22792,8 @@ def define(self): parentType=self.parentType, traverse=self.implTraverse(), unlink=self.implUnlink(), - trace=self.implTrace()) + trace=self.implTrace(), + ) else: classImpl = "" @@ -19520,7 +22814,8 @@ def define(self): nativeType=self.nativeType, ctorParams=ctorParams, parentType=self.parentType, - dropJS=dropJS) + dropJS=dropJS, + ) return classImpl + CGBindingImplClass.define(self) @@ -19543,15 +22838,16 @@ def getNativeTypeForIDLType(self, type): nativeType = CGGeneric("RefPtr") elif type.isGeckoInterface(): iface = type.unroll().inner - nativeType = self.descriptor.getDescriptor( - iface.identifier.name).nativeType + nativeType = self.descriptor.getDescriptor(iface.identifier.name).nativeType # Now trim off unnecessary namespaces nativeType = nativeType.split("::") if nativeType[0] == "mozilla": nativeType.pop(0) if nativeType[0] == "dom": nativeType.pop(0) - nativeType = CGWrapper(CGGeneric("::".join(nativeType)), pre="RefPtr<", post=">") + nativeType = CGWrapper( + CGGeneric("::".join(nativeType)), pre="RefPtr<", post=">" + ) elif type.isAny(): nativeType = CGGeneric("JS::Heap") elif type.isObject() or type.isSpiderMonkeyInterface(): @@ -19563,21 +22859,26 @@ def getNativeTypeForIDLType(self, type): innerType = type.inner.inner else: innerType = type.inner - if (not innerType.isPrimitive() and not innerType.isEnum() and - not innerType.isDOMString() and not innerType.isByteString() and - not innerType.isUTF8String() and not innerType.isPromise() and - not innerType.isGeckoInterface()): - raise TypeError("Don't know how to properly manage GC/CC for " - "event member of type %s" % - type) + if ( + not innerType.isPrimitive() + and not innerType.isEnum() + and not innerType.isDOMString() + and not innerType.isByteString() + and not innerType.isUTF8String() + and not innerType.isPromise() + and not innerType.isGeckoInterface() + ): + raise TypeError( + "Don't know how to properly manage GC/CC for " + "event member of type %s" % type + ) nativeType = CGTemplatedType( - "nsTArray", - self.getNativeTypeForIDLType(innerType)) + "nsTArray", self.getNativeTypeForIDLType(innerType) + ) if type.nullable(): nativeType = CGTemplatedType("Nullable", nativeType) else: - raise TypeError("Don't know how to declare event member of type %s" % - type) + raise TypeError("Don't know how to declare event member of type %s" % type) return nativeType @@ -19585,13 +22886,13 @@ class CGEventRoot(CGThing): def __init__(self, config, interfaceName): descriptor = config.getDescriptor(interfaceName) - self.root = CGWrapper(CGEventClass(descriptor), - pre="\n", post="\n") + self.root = CGWrapper(CGEventClass(descriptor), pre="\n", post="\n") self.root = CGNamespace.build(["mozilla", "dom"], self.root) - self.root = CGList([CGClassForwardDeclare("JSContext", isStruct=True), - self.root]) + self.root = CGList( + [CGClassForwardDeclare("JSContext", isStruct=True), self.root] + ) parent = descriptor.interface.parent.identifier.name @@ -19606,31 +22907,42 @@ def __init__(self, config, interfaceName): "mozilla/Attributes.h", "mozilla/ErrorResult.h", "mozilla/dom/%sBinding.h" % interfaceName, - 'mozilla/dom/BindingUtils.h', - ], + "mozilla/dom/BindingUtils.h", + ], [ "%s.h" % interfaceName, "js/GCAPI.h", - 'mozilla/dom/Nullable.h', + "mozilla/dom/Nullable.h", ], - "", self.root, config) + "", + self.root, + config, + ) # And now some include guards self.root = CGIncludeGuard(interfaceName, self.root) self.root = CGWrapper( self.root, - pre=(AUTOGENERATED_WITH_SOURCE_WARNING_COMMENT % - os.path.basename(descriptor.interface.filename()))) + pre=( + AUTOGENERATED_WITH_SOURCE_WARNING_COMMENT + % os.path.basename(descriptor.interface.filename()) + ), + ) - self.root = CGWrapper(self.root, pre=dedent(""" + self.root = CGWrapper( + self.root, + pre=dedent( + """ /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ /* vim:set ts=2 sw=2 sts=2 et cindent: */ /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ - """)) + """ + ), + ) def declare(self): return self.root.declare() diff --git a/dom/bindings/Configuration.py b/dom/bindings/Configuration.py index 522eb9d478a190..aacbffa68ad190 100644 --- a/dom/bindings/Configuration.py +++ b/dom/bindings/Configuration.py @@ -19,6 +19,7 @@ class DescriptorProvider: Subclasses must also have a getConfig() method that returns a Configuration. """ + def __init__(self): pass @@ -33,15 +34,17 @@ class Configuration(DescriptorProvider): Represents global configuration state based on IDL parse data and the configuration file. """ + def __init__(self, filename, webRoots, parseData, generatedEvents=[]): DescriptorProvider.__init__(self) # Read the configuration file. glbl = {} - exec(io.open(filename, encoding='utf-8').read(), glbl) - config = glbl['DOMInterfaces'] + exec(io.open(filename, encoding="utf-8").read(), glbl) + config = glbl["DOMInterfaces"] webRoots = tuple(map(os.path.normpath, webRoots)) + def isInWebIDLRoot(path): return any(isChildPath(path, root) for root in webRoots) @@ -61,20 +64,23 @@ def isInWebIDLRoot(path): # different .webidl file than their LHS interface. Make sure we # don't have any of those. See similar block below for partial # interfaces! - if (thing.interface.filename() != thing.filename()): + if thing.interface.filename() != thing.filename(): raise TypeError( "The binding build system doesn't really support " "'includes' statements which don't appear in the " "file in which the left-hand side of the statement is " "defined.\n" "%s\n" - "%s" % - (thing.location, thing.interface.location)) + "%s" % (thing.location, thing.interface.location) + ) assert not thing.isType() - if (not thing.isInterface() and not thing.isNamespace() and - not thing.isInterfaceMixin()): + if ( + not thing.isInterface() + and not thing.isNamespace() + and not thing.isInterfaceMixin() + ): continue # Our build system doesn't support dep builds involving # addition/removal of partial interfaces/namespaces/mixins that @@ -92,8 +98,8 @@ def isInWebIDLRoot(path): "interface/namespace/mixin they are extending is " "defined. Don't do this.\n" "%s\n" - "%s" % - (partial.location, thing.location)) + "%s" % (partial.location, thing.location) + ) # The rest of the logic doesn't apply to mixins. if thing.isInterfaceMixin(): @@ -101,18 +107,21 @@ def isInWebIDLRoot(path): iface = thing if not iface.isExternal(): - if not (iface.getExtendedAttribute("ChromeOnly") or - iface.getExtendedAttribute("Func") == ["nsContentUtils::IsCallerChromeOrFuzzingEnabled"] or - not iface.hasInterfaceObject() or - isInWebIDLRoot(iface.filename())): + if not ( + iface.getExtendedAttribute("ChromeOnly") + or iface.getExtendedAttribute("Func") + == ["nsContentUtils::IsCallerChromeOrFuzzingEnabled"] + or not iface.hasInterfaceObject() + or isInWebIDLRoot(iface.filename()) + ): raise TypeError( "Interfaces which are exposed to the web may only be " "defined in a DOM WebIDL root %r. Consider marking " "the interface [ChromeOnly] or " "[Func='nsContentUtils::IsCallerChromeOrFuzzingEnabled'] " "if you do not want it exposed to the web.\n" - "%s" % - (webRoots, iface.location)) + "%s" % (webRoots, iface.location) + ) self.interfaces[iface.identifier.name] = iface @@ -130,19 +139,18 @@ def isInWebIDLRoot(path): # Keep the descriptor list sorted for determinism. self.descriptors.sort(key=lambda x: x.name) - self.descriptorsByFile = {} for d in self.descriptors: - self.descriptorsByFile.setdefault(d.interface.filename(), - []).append(d) + self.descriptorsByFile.setdefault(d.interface.filename(), []).append(d) self.enums = [e for e in parseData if e.isEnum()] self.dictionaries = [d for d in parseData if d.isDictionary()] - self.dictionariesByName = { d.identifier.name: d for d in self.dictionaries } + self.dictionariesByName = {d.identifier.name: d for d in self.dictionaries} - self.callbacks = [c for c in parseData if - c.isCallback() and not c.isInterface()] + self.callbacks = [ + c for c in parseData if c.isCallback() and not c.isInterface() + ] # Dictionary mapping from a union type name to a set of filenames where # union types with that name are used. @@ -184,7 +192,8 @@ def isInWebIDLRoot(path): # unions for the file where we previously found # them. unionsForFilename = [ - u for u in self.unionsPerFilename[f] + u + for u in self.unionsPerFilename[f] if u.name != t.name ] if len(unionsForFilename) == 0: @@ -197,12 +206,14 @@ def isInWebIDLRoot(path): self.unionsPerFilename[uniqueFilenameForUnion].append(t) filenamesForUnion.add(t.filename()) - for d in getDictionariesConvertedToJS(self.descriptors, self.dictionaries, - self.callbacks): + for d in getDictionariesConvertedToJS( + self.descriptors, self.dictionaries, self.callbacks + ): d.needsConversionToJS = True - for d in getDictionariesConvertedFromJS(self.descriptors, self.dictionaries, - self.callbacks): + for d in getDictionariesConvertedFromJS( + self.descriptors, self.dictionaries, self.callbacks + ): d.needsConversionFromJS = True def getInterface(self, ifname): @@ -213,34 +224,34 @@ def getDescriptors(self, **filters): curr = self.descriptors # Collect up our filters, because we may have a webIDLFile filter that # we always want to apply first. - tofilter = [ (lambda x: x.interface.isExternal(), False) ] + tofilter = [(lambda x: x.interface.isExternal(), False)] for key, val in six.iteritems(filters): - if key == 'webIDLFile': + if key == "webIDLFile": # Special-case this part to make it fast, since most of our # getDescriptors calls are conditioned on a webIDLFile. We may # not have this key, in which case we have no descriptors # either. curr = self.descriptorsByFile.get(val, []) continue - elif key == 'hasInterfaceObject': + elif key == "hasInterfaceObject": getter = lambda x: x.interface.hasInterfaceObject() - elif key == 'hasInterfacePrototypeObject': + elif key == "hasInterfacePrototypeObject": getter = lambda x: x.interface.hasInterfacePrototypeObject() - elif key == 'hasInterfaceOrInterfacePrototypeObject': + elif key == "hasInterfaceOrInterfacePrototypeObject": getter = lambda x: x.hasInterfaceOrInterfacePrototypeObject() - elif key == 'isCallback': + elif key == "isCallback": getter = lambda x: x.interface.isCallback() - elif key == 'isJSImplemented': + elif key == "isJSImplemented": getter = lambda x: x.interface.isJSImplemented() - elif key == 'isExposedInAnyWorker': + elif key == "isExposedInAnyWorker": getter = lambda x: x.interface.isExposedInAnyWorker() - elif key == 'isExposedInWorkerDebugger': + elif key == "isExposedInWorkerDebugger": getter = lambda x: x.interface.isExposedInWorkerDebugger() - elif key == 'isExposedInAnyWorklet': + elif key == "isExposedInAnyWorklet": getter = lambda x: x.interface.isExposedInAnyWorklet() - elif key == 'isExposedInWindow': + elif key == "isExposedInWindow": getter = lambda x: x.interface.isExposedInWindow() - elif key == 'isSerializable': + elif key == "isSerializable": getter = lambda x: x.interface.isSerializable() else: # Have to watch out: just closing over "key" is not enough, @@ -308,23 +319,27 @@ def MemberIsUnforgeable(member, descriptor): # necessarily booleans. Make sure to return a boolean from this # method, because callers will compare its return value to # booleans. - return bool((member.isAttr() or member.isMethod()) and - not member.isStatic() and - (member.isUnforgeable() or - descriptor.interface.getExtendedAttribute("Unforgeable"))) + return bool( + (member.isAttr() or member.isMethod()) + and not member.isStatic() + and ( + member.isUnforgeable() + or descriptor.interface.getExtendedAttribute("Unforgeable") + ) + ) class Descriptor(DescriptorProvider): """ Represents a single descriptor for an interface. See Bindings.conf. """ + def __init__(self, config, interface, desc): DescriptorProvider.__init__(self) self.config = config self.interface = interface - self.wantsXrays = (not interface.isExternal() and - interface.isExposedInWindow()) + self.wantsXrays = not interface.isExternal() and interface.isExposedInWindow() if self.wantsXrays: # We could try to restrict self.wantsXrayExpandoClass further. For @@ -334,7 +349,7 @@ def __init__(self, config, interface, desc): # of not only the members of "interface" but also of all its # ancestors, because those can have members living in our slots too. # For now, do the simple thing. - self.wantsXrayExpandoClass = (interface.totalMembersInSlots != 0) + self.wantsXrayExpandoClass = interface.totalMembersInSlots != 0 # Read the desc, and fill in the relevant defaults. ifaceName = self.interface.identifier.name @@ -353,7 +368,7 @@ def __init__(self, config, interface, desc): else: nativeTypeDefault = "mozilla::dom::" + ifaceName - self.nativeType = desc.get('nativeType', nativeTypeDefault) + self.nativeType = desc.get("nativeType", nativeTypeDefault) # Now create a version of nativeType that doesn't have extra # mozilla::dom:: at the beginning. prettyNativeType = self.nativeType.split("::") @@ -363,7 +378,7 @@ def __init__(self, config, interface, desc): prettyNativeType.pop(0) self.prettyNativeType = "::".join(prettyNativeType) - self.jsImplParent = desc.get('jsImplParent', self.nativeType) + self.jsImplParent = desc.get("jsImplParent", self.nativeType) # Do something sane for JSObject if self.nativeType == "JSObject": @@ -374,51 +389,58 @@ def __init__(self, config, interface, desc): # Use our local version of the header, not the exported one, so that # test bindings, which don't export, will work correctly. basename = os.path.basename(self.interface.filename()) - headerDefault = basename.replace('.webidl', 'Binding.h') + headerDefault = basename.replace(".webidl", "Binding.h") else: - if not self.interface.isExternal() and self.interface.getExtendedAttribute("HeaderFile"): + if not self.interface.isExternal() and self.interface.getExtendedAttribute( + "HeaderFile" + ): headerDefault = self.interface.getExtendedAttribute("HeaderFile")[0] elif self.interface.isIteratorInterface(): headerDefault = "mozilla/dom/IterableIterator.h" else: headerDefault = self.nativeType headerDefault = headerDefault.replace("::", "/") + ".h" - self.headerFile = desc.get('headerFile', headerDefault) + self.headerFile = desc.get("headerFile", headerDefault) self.headerIsDefault = self.headerFile == headerDefault if self.jsImplParent == self.nativeType: self.jsImplParentHeader = self.headerFile else: self.jsImplParentHeader = self.jsImplParent.replace("::", "/") + ".h" - self.notflattened = desc.get('notflattened', False) - self.register = desc.get('register', True) + self.notflattened = desc.get("notflattened", False) + self.register = desc.get("register", True) # If we're concrete, we need to crawl our ancestor interfaces and mark # them as having a concrete descendant. - concreteDefault = (not self.interface.isExternal() and - not self.interface.isCallback() and - not self.interface.isNamespace() and - # We're going to assume that leaf interfaces are - # concrete; otherwise what's the point? Also - # interfaces with constructors had better be - # concrete; otherwise how can you construct them? - (not self.interface.hasChildInterfaces() or - self.interface.ctor() is not None)) - - self.concrete = desc.get('concrete', concreteDefault) - self.hasUnforgeableMembers = (self.concrete and - any(MemberIsUnforgeable(m, self) for m in - self.interface.members)) + concreteDefault = ( + not self.interface.isExternal() + and not self.interface.isCallback() + and not self.interface.isNamespace() + and + # We're going to assume that leaf interfaces are + # concrete; otherwise what's the point? Also + # interfaces with constructors had better be + # concrete; otherwise how can you construct them? + ( + not self.interface.hasChildInterfaces() + or self.interface.ctor() is not None + ) + ) + + self.concrete = desc.get("concrete", concreteDefault) + self.hasUnforgeableMembers = self.concrete and any( + MemberIsUnforgeable(m, self) for m in self.interface.members + ) self.operations = { - 'IndexedGetter': None, - 'IndexedSetter': None, - 'IndexedDeleter': None, - 'NamedGetter': None, - 'NamedSetter': None, - 'NamedDeleter': None, - 'Stringifier': None, - 'LegacyCaller': None, - } + "IndexedGetter": None, + "IndexedSetter": None, + "IndexedDeleter": None, + "NamedGetter": None, + "NamedSetter": None, + "NamedDeleter": None, + "Stringifier": None, + "LegacyCaller": None, + } self.hasDefaultToJSON = False @@ -437,7 +459,7 @@ def addOperation(operation, m): if not self.interface.isExternal(): for m in self.interface.members: if m.isMethod() and m.isStringifier(): - addOperation('Stringifier', m) + addOperation("Stringifier", m) if m.isMethod() and m.isDefaultToJSON(): self.hasDefaultToJSON = True @@ -452,10 +474,15 @@ def addOperation(operation, m): # Check that we don't have duplicated instrumented props. uniqueInstrumentedProps = set(self.instrumentedProps) if len(uniqueInstrumentedProps) != len(self.instrumentedProps): - duplicates = [p for p in uniqueInstrumentedProps if - self.instrumentedProps.count(p) > 1] - raise TypeError("Duplicated instrumented properties: %s.\n%s" % - (duplicates, self.interface.location)) + duplicates = [ + p + for p in uniqueInstrumentedProps + if self.instrumentedProps.count(p) > 1 + ] + raise TypeError( + "Duplicated instrumented properties: %s.\n%s" + % (duplicates, self.interface.location) + ) if self.concrete: self.proxy = False @@ -465,12 +492,16 @@ def addOperation(operation, m): # practice these are on most-derived prototypes. if m.isMethod() and m.isLegacycaller(): if not m.isIdentifierLess(): - raise TypeError("We don't support legacycaller with " - "identifier.\n%s" % m.location) + raise TypeError( + "We don't support legacycaller with " + "identifier.\n%s" % m.location + ) if len(m.signatures()) != 1: - raise TypeError("We don't support overloaded " - "legacycaller.\n%s" % m.location) - addOperation('LegacyCaller', m) + raise TypeError( + "We don't support overloaded " + "legacycaller.\n%s" % m.location + ) + addOperation("LegacyCaller", m) while iface: for m in iface.members: @@ -479,97 +510,110 @@ def addOperation(operation, m): def addIndexedOrNamedOperation(operation, m): if m.isIndexed(): - operation = 'Indexed' + operation + operation = "Indexed" + operation else: assert m.isNamed() - operation = 'Named' + operation + operation = "Named" + operation addOperation(operation, m) if m.isGetter(): - addIndexedOrNamedOperation('Getter', m) + addIndexedOrNamedOperation("Getter", m) if m.isSetter(): - addIndexedOrNamedOperation('Setter', m) + addIndexedOrNamedOperation("Setter", m) if m.isDeleter(): - addIndexedOrNamedOperation('Deleter', m) + addIndexedOrNamedOperation("Deleter", m) if m.isLegacycaller() and iface != self.interface: - raise TypeError("We don't support legacycaller on " - "non-leaf interface %s.\n%s" % - (iface, iface.location)) + raise TypeError( + "We don't support legacycaller on " + "non-leaf interface %s.\n%s" % (iface, iface.location) + ) - iface.setUserData('hasConcreteDescendant', True) + iface.setUserData("hasConcreteDescendant", True) iface = iface.parent - self.proxy = (self.supportsIndexedProperties() or - (self.supportsNamedProperties() and - not self.hasNamedPropertiesObject) or - self.isMaybeCrossOriginObject()) + self.proxy = ( + self.supportsIndexedProperties() + or ( + self.supportsNamedProperties() and not self.hasNamedPropertiesObject + ) + or self.isMaybeCrossOriginObject() + ) if self.proxy: - if (self.isMaybeCrossOriginObject() and - (self.supportsIndexedProperties() or - self.supportsNamedProperties())): - raise TypeError("We don't support named or indexed " - "properties on maybe-cross-origin objects. " - "This lets us assume that their proxy " - "hooks are never called via Xrays. " - "Fix %s.\n%s" % - (self.interface, self.interface.location)) - - if (not self.operations['IndexedGetter'] and - (self.operations['IndexedSetter'] or - self.operations['IndexedDeleter'])): - raise SyntaxError("%s supports indexed properties but does " - "not have an indexed getter.\n%s" % - (self.interface, self.interface.location)) - if (not self.operations['NamedGetter'] and - (self.operations['NamedSetter'] or - self.operations['NamedDeleter'])): - raise SyntaxError("%s supports named properties but does " - "not have a named getter.\n%s" % - (self.interface, self.interface.location)) + if self.isMaybeCrossOriginObject() and ( + self.supportsIndexedProperties() or self.supportsNamedProperties() + ): + raise TypeError( + "We don't support named or indexed " + "properties on maybe-cross-origin objects. " + "This lets us assume that their proxy " + "hooks are never called via Xrays. " + "Fix %s.\n%s" % (self.interface, self.interface.location) + ) + + if not self.operations["IndexedGetter"] and ( + self.operations["IndexedSetter"] + or self.operations["IndexedDeleter"] + ): + raise SyntaxError( + "%s supports indexed properties but does " + "not have an indexed getter.\n%s" + % (self.interface, self.interface.location) + ) + if not self.operations["NamedGetter"] and ( + self.operations["NamedSetter"] or self.operations["NamedDeleter"] + ): + raise SyntaxError( + "%s supports named properties but does " + "not have a named getter.\n%s" + % (self.interface, self.interface.location) + ) iface = self.interface while iface: - iface.setUserData('hasProxyDescendant', True) + iface.setUserData("hasProxyDescendant", True) iface = iface.parent - if desc.get('wantsQI', None) is not None: - self._wantsQI = desc.get('wantsQI', None) - self.wrapperCache = (not self.interface.isCallback() and - not self.interface.isIteratorInterface() and - desc.get('wrapperCache', True)) + if desc.get("wantsQI", None) is not None: + self._wantsQI = desc.get("wantsQI", None) + self.wrapperCache = ( + not self.interface.isCallback() + and not self.interface.isIteratorInterface() + and desc.get("wrapperCache", True) + ) self.name = interface.identifier.name # self.implicitJSContext is a list of names of methods and attributes # that need a JSContext. if self.interface.isJSImplemented(): - self.implicitJSContext = ['constructor'] + self.implicitJSContext = ["constructor"] else: - self.implicitJSContext = desc.get('implicitJSContext', []) + self.implicitJSContext = desc.get("implicitJSContext", []) assert isinstance(self.implicitJSContext, list) self._binaryNames = {} if not self.interface.isExternal(): + def maybeAddBinaryName(member): binaryName = member.getExtendedAttribute("BinaryName") if binaryName: assert isinstance(binaryName, list) assert len(binaryName) == 1 - self._binaryNames.setdefault(member.identifier.name, - binaryName[0]) + self._binaryNames.setdefault(member.identifier.name, binaryName[0]) + for member in self.interface.members: if not member.isAttr() and not member.isMethod(): continue - maybeAddBinaryName(member); + maybeAddBinaryName(member) ctor = self.interface.ctor() if ctor: maybeAddBinaryName(ctor) # Some default binary names for cases when nothing else got set. - self._binaryNames.setdefault('__legacycaller', 'LegacyCall') - self._binaryNames.setdefault('__stringifier', 'Stringify') + self._binaryNames.setdefault("__legacycaller", "LegacyCall") + self._binaryNames.setdefault("__stringifier", "Stringify") # Build the prototype chain. self.prototypeChain = [] @@ -577,12 +621,14 @@ def maybeAddBinaryName(member): parent = interface while parent: self.needsMissingPropUseCounters = ( - self.needsMissingPropUseCounters or - parent.getExtendedAttribute("InstrumentedProps")) + self.needsMissingPropUseCounters + or parent.getExtendedAttribute("InstrumentedProps") + ) self.prototypeChain.insert(0, parent.identifier.name) parent = parent.parent - config.maxProtoChainLength = max(config.maxProtoChainLength, - len(self.prototypeChain)) + config.maxProtoChainLength = max( + config.maxProtoChainLength, len(self.prototypeChain) + ) def binaryNameFor(self, name): return self._binaryNames.get(name, name) @@ -598,8 +644,10 @@ def parentPrototypeName(self): return self.getDescriptor(self.prototypeChain[-2]).name def hasInterfaceOrInterfacePrototypeObject(self): - return (self.interface.hasInterfaceObject() or - self.interface.hasInterfacePrototypeObject()) + return ( + self.interface.hasInterfaceObject() + or self.interface.hasInterfacePrototypeObject() + ) @property def hasNamedPropertiesObject(self): @@ -607,7 +655,7 @@ def hasNamedPropertiesObject(self): def getExtendedAttributes(self, member, getter=False, setter=False): def ensureValidBoolExtendedAttribute(attr, name): - if (attr is not None and attr is not True): + if attr is not None and attr is not True: raise TypeError("Unknown value for '%s': %s" % (name, attr[0])) def ensureValidThrowsExtendedAttribute(attr): @@ -630,11 +678,15 @@ def maybeAppendCanOOMToAttrs(attrs, canOOM): attrs.append("canOOM") def maybeAppendNeedsSubjectPrincipalToAttrs(attrs, needsSubjectPrincipal): - if (needsSubjectPrincipal is not None and - needsSubjectPrincipal is not True and - needsSubjectPrincipal != ["NonSystem"]): - raise TypeError("Unknown value for 'NeedsSubjectPrincipal': %s" % - needsSubjectPrincipal[0]) + if ( + needsSubjectPrincipal is not None + and needsSubjectPrincipal is not True + and needsSubjectPrincipal != ["NonSystem"] + ): + raise TypeError( + "Unknown value for 'NeedsSubjectPrincipal': %s" + % needsSubjectPrincipal[0] + ) if needsSubjectPrincipal is not None: attrs.append("needsSubjectPrincipal") @@ -642,12 +694,14 @@ def maybeAppendNeedsSubjectPrincipalToAttrs(attrs, needsSubjectPrincipal): attrs.append("needsNonSystemSubjectPrincipal") name = member.identifier.name - throws = self.interface.isJSImplemented() or member.getExtendedAttribute("Throws") + throws = self.interface.isJSImplemented() or member.getExtendedAttribute( + "Throws" + ) canOOM = member.getExtendedAttribute("CanOOM") needsSubjectPrincipal = member.getExtendedAttribute("NeedsSubjectPrincipal") attrs = [] if name in self.implicitJSContext: - attrs.append('implicitJSContext') + attrs.append("implicitJSContext") if member.isMethod(): # JSObject-returning [NewObject] methods must be fallible, # since they have to (fallibly) allocate the new JSObject. @@ -658,8 +712,7 @@ def maybeAppendNeedsSubjectPrincipalToAttrs(attrs, needsSubjectPrincipal): canOOM = True maybeAppendInfallibleToAttrs(attrs, throws) maybeAppendCanOOMToAttrs(attrs, canOOM) - maybeAppendNeedsSubjectPrincipalToAttrs(attrs, - needsSubjectPrincipal) + maybeAppendNeedsSubjectPrincipalToAttrs(attrs, needsSubjectPrincipal) return attrs assert member.isAttr() @@ -674,14 +727,18 @@ def maybeAppendNeedsSubjectPrincipalToAttrs(attrs, needsSubjectPrincipal): maybeAppendCanOOMToAttrs(attrs, canOOM) if needsSubjectPrincipal is None: needsSubjectPrincipalAttr = ( - "GetterNeedsSubjectPrincipal" if getter else "SetterNeedsSubjectPrincipal") + "GetterNeedsSubjectPrincipal" + if getter + else "SetterNeedsSubjectPrincipal" + ) needsSubjectPrincipal = member.getExtendedAttribute( - needsSubjectPrincipalAttr) + needsSubjectPrincipalAttr + ) maybeAppendNeedsSubjectPrincipalToAttrs(attrs, needsSubjectPrincipal) return attrs def supportsIndexedProperties(self): - return self.operations['IndexedGetter'] is not None + return self.operations["IndexedGetter"] is not None def lengthNeedsCallerType(self): """ @@ -691,11 +748,11 @@ def lengthNeedsCallerType(self): calls need one too. """ assert self.supportsIndexedProperties() - indexedGetter = self.operations['IndexedGetter'] + indexedGetter = self.operations["IndexedGetter"] return indexedGetter.getExtendedAttribute("NeedsCallerType") def supportsNamedProperties(self): - return self.operations['NamedGetter'] is not None + return self.operations["NamedGetter"] is not None def supportedNamesNeedCallerType(self): """ @@ -704,13 +761,17 @@ def supportedNamesNeedCallerType(self): GetSupportedNames. """ assert self.supportsNamedProperties() - namedGetter = self.operations['NamedGetter'] + namedGetter = self.operations["NamedGetter"] return namedGetter.getExtendedAttribute("NeedsCallerType") def isMaybeCrossOriginObject(self): # If we're isGlobal and have cross-origin members, we're a Window, and # that's not a cross-origin object. The WindowProxy is. - return self.concrete and self.interface.hasCrossOriginMembers and not self.isGlobal() + return ( + self.concrete + and self.interface.hasCrossOriginMembers + and not self.isGlobal() + ) def needsHeaderInclude(self): """ @@ -721,26 +782,36 @@ def needsHeaderInclude(self): have a parent. """ - return (self.interface.isExternal() or self.concrete or - self.interface.hasInterfacePrototypeObject() or - any((m.isAttr() or m.isMethod()) and m.isStatic() for m in self.interface.members) or - self.interface.parent) + return ( + self.interface.isExternal() + or self.concrete + or self.interface.hasInterfacePrototypeObject() + or any( + (m.isAttr() or m.isMethod()) and m.isStatic() + for m in self.interface.members + ) + or self.interface.parent + ) def hasThreadChecks(self): # isExposedConditionally does not necessarily imply thread checks # (since at least [SecureContext] is independent of them), but we're # only used to decide whether to include nsThreadUtils.h, so we don't # worry about that. - return ((self.isExposedConditionally() and - not self.interface.isExposedInWindow()) or - self.interface.isExposedInSomeButNotAllWorkers()) + return ( + self.isExposedConditionally() and not self.interface.isExposedInWindow() + ) or self.interface.isExposedInSomeButNotAllWorkers() def hasCEReactions(self): - return any(m.getExtendedAttribute("CEReactions") for m in self.interface.members) + return any( + m.getExtendedAttribute("CEReactions") for m in self.interface.members + ) def isExposedConditionally(self): - return (self.interface.isExposedConditionally() or - self.interface.isExposedInSomeButNotAllWorkers()) + return ( + self.interface.isExposedConditionally() + or self.interface.isExposedInSomeButNotAllWorkers() + ) def needsXrayResolveHooks(self): """ @@ -751,9 +822,13 @@ def needsXrayResolveHooks(self): due to chrome touching them and that's all those hooks do on those elements. So we special-case those here. """ - return (self.interface.getExtendedAttribute("NeedResolve") and - self.interface.identifier.name not in ["HTMLObjectElement", - "HTMLEmbedElement"]) + return self.interface.getExtendedAttribute( + "NeedResolve" + ) and self.interface.identifier.name not in [ + "HTMLObjectElement", + "HTMLEmbedElement", + ] + def needsXrayNamedDeleterHook(self): return self.operations["NamedDeleter"] is not None @@ -780,9 +855,11 @@ def namedPropertiesEnumerable(self): @property def registersGlobalNamesOnWindow(self): - return (self.interface.hasInterfaceObject() and - self.interface.isExposedInWindow() and - self.register) + return ( + self.interface.hasInterfaceObject() + and self.interface.isExposedInWindow() + and self.register + ) def getDescriptor(self, interfaceName): """ @@ -801,7 +878,7 @@ def getTypesFromDescriptor(descriptor, includeArgs=True, includeReturns=True): default returns all argument types (which includes types of writable attributes) and all return types (which includes types of all attributes). """ - assert(includeArgs or includeReturns) # Must want _something_. + assert includeArgs or includeReturns # Must want _something_. members = [m for m in descriptor.interface.members] if descriptor.interface.ctor(): members.append(descriptor.interface.ctor()) @@ -816,9 +893,11 @@ def getTypesFromDescriptor(descriptor, includeArgs=True, includeReturns=True): if includeArgs: types.extend(a.type for a in arguments) - types.extend(a.type for a in members if - (a.isAttr() and (includeReturns or - (includeArgs and not a.readonly)))) + types.extend( + a.type + for a in members + if (a.isAttr() and (includeReturns or (includeArgs and not a.readonly))) + ) if descriptor.interface.maplikeOrSetlikeOrIterable: maplikeOrSetlikeOrIterable = descriptor.interface.maplikeOrSetlikeOrIterable @@ -841,7 +920,10 @@ def getTypesFromDescriptor(descriptor, includeArgs=True, includeReturns=True): elif maplikeOrSetlikeOrIterable.isSetlike(): assert maplikeOrSetlikeOrIterable.hasKeyType() assert maplikeOrSetlikeOrIterable.hasValueType() - assert maplikeOrSetlikeOrIterable.keyType == maplikeOrSetlikeOrIterable.valueType + assert ( + maplikeOrSetlikeOrIterable.keyType + == maplikeOrSetlikeOrIterable.valueType + ) # As in the maplike case, we don't always declare our return values # quite correctly. if includeReturns: @@ -901,6 +983,7 @@ def getAllTypes(descriptors, dictionaries, callbacks): for t in getTypesFromCallback(callback): yield (t, None) + def iteratorNativeType(descriptor): assert descriptor.interface.isIterable() iterableDecl = descriptor.interface.maplikeOrSetlikeOrIterable diff --git a/dom/bindings/GenerateCSS2PropertiesWebIDL.py b/dom/bindings/GenerateCSS2PropertiesWebIDL.py index 8d45c0158697ac..8bb71f53ffdad0 100644 --- a/dom/bindings/GenerateCSS2PropertiesWebIDL.py +++ b/dom/bindings/GenerateCSS2PropertiesWebIDL.py @@ -11,8 +11,12 @@ # (whether camelCase, _underscorePrefixed, etc.) and the given array of # extended attributes. def generateLine(propName, extendedAttrs): - return " [%s] attribute [TreatNullAs=EmptyString] DOMString %s;\n" % (", ".join(extendedAttrs), - propName) + return " [%s] attribute [TreatNullAs=EmptyString] DOMString %s;\n" % ( + ", ".join(extendedAttrs), + propName, + ) + + def generate(output, idlFilename, dataFile): propList = runpy.run_path(dataFile)["data"] props = "" @@ -21,8 +25,11 @@ def generate(output, idlFilename, dataFile): continue # Unfortunately, even some of the getters here are fallible # (e.g. on nsComputedDOMStyle). - extendedAttrs = ["CEReactions", "Throws", - "SetterNeedsSubjectPrincipal=NonSystem"] + extendedAttrs = [ + "CEReactions", + "Throws", + "SetterNeedsSubjectPrincipal=NonSystem", + ] if p.pref != "": # BackdropFilter is a special case where we want WebIDL to check @@ -36,7 +43,7 @@ def generate(output, idlFilename, dataFile): # webkit properties get a camelcase "webkitFoo" accessor # as well as a capitalized "WebkitFoo" alias (added here). - if (prop.startswith("Webkit")): + if prop.startswith("Webkit"): extendedAttrs.append('BindingAlias="%s"' % prop) # Generate a name with camelCase spelling of property-name (or capitalized, @@ -65,20 +72,26 @@ def generate(output, idlFilename, dataFile): props += generateLine(prop, extendedAttrs) - idlFile = open(idlFilename, "r") idlTemplate = idlFile.read() idlFile.close() - output.write("/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT */\n\n" + - string.Template(idlTemplate).substitute({"props": props}) + '\n') + output.write( + "/* THIS IS AN AUTOGENERATED FILE. DO NOT EDIT */\n\n" + + string.Template(idlTemplate).substitute({"props": props}) + + "\n" + ) + def main(): parser = argparse.ArgumentParser() - parser.add_argument('idlFilename', help='IDL property file template') - parser.add_argument('preprocessorHeader', help='Header file to pass through the preprocessor') + parser.add_argument("idlFilename", help="IDL property file template") + parser.add_argument( + "preprocessorHeader", help="Header file to pass through the preprocessor" + ) args = parser.parse_args() generate(sys.stdout, args.idlFilename, args.preprocessorHeader) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/dom/bindings/mach_commands.py b/dom/bindings/mach_commands.py index 0ee7cc4b4acd3b..28d278015691a6 100644 --- a/dom/bindings/mach_commands.py +++ b/dom/bindings/mach_commands.py @@ -16,16 +16,23 @@ from mozbuild.base import MachCommandBase from mozbuild.util import mkdir + def get_test_parser(): import runtests + return runtests.get_parser + @CommandProvider class WebIDLProvider(MachCommandBase): - @Command('webidl-example', category='misc', - description='Generate example files for a WebIDL interface.') - @CommandArgument('interface', nargs='+', - help='Interface(s) whose examples to generate.') + @Command( + "webidl-example", + category="misc", + description="Generate example files for a WebIDL interface.", + ) + @CommandArgument( + "interface", nargs="+", help="Interface(s) whose examples to generate." + ) def webidl_example(self, interface): from mozwebidlcodegen import BuildSystemWebIDL @@ -33,11 +40,14 @@ def webidl_example(self, interface): for i in interface: manager.generate_example_files(i) - @Command('webidl-parser-test', category='testing', parser=get_test_parser, - description='Run WebIDL tests (Interface Browser parser).') + @Command( + "webidl-parser-test", + category="testing", + parser=get_test_parser, + description="Run WebIDL tests (Interface Browser parser).", + ) def webidl_test(self, **kwargs): - sys.path.insert(0, os.path.join(self.topsrcdir, 'other-licenses', - 'ply')) + sys.path.insert(0, os.path.join(self.topsrcdir, "other-licenses", "ply")) # Ensure the topobjdir exists. On a Taskcluster test run there won't be # an objdir yet. @@ -57,4 +67,5 @@ def webidl_test(self, **kwargs): sys.path.insert(0, self.topobjdir) import runtests + return runtests.run_tests(kwargs["tests"], verbose=kwargs["verbose"]) diff --git a/dom/bindings/moz.build b/dom/bindings/moz.build index 4ea0fce174f105..37f6f3e00af893 100644 --- a/dom/bindings/moz.build +++ b/dom/bindings/moz.build @@ -7,57 +7,55 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Bindings (WebIDL)") -TEST_DIRS += ['test'] +TEST_DIRS += ["test"] -XPIDL_SOURCES += [ - 'nsIScriptError.idl' -] +XPIDL_SOURCES += ["nsIScriptError.idl"] -XPIDL_MODULE = 'dom_bindings' +XPIDL_MODULE = "dom_bindings" EXPORTS.ipc += [ - 'ErrorIPCUtils.h', + "ErrorIPCUtils.h", ] EXPORTS.mozilla += [ - 'ErrorResult.h', - 'RootedOwningNonNull.h', - 'RootedRefPtr.h', + "ErrorResult.h", + "RootedOwningNonNull.h", + "RootedRefPtr.h", ] EXPORTS.mozilla.dom += [ - 'AtomList.h', - 'BindingCallContext.h', - 'BindingDeclarations.h', - 'BindingIPCUtils.h', - 'BindingUtils.h', - 'CallbackFunction.h', - 'CallbackInterface.h', - 'CallbackObject.h', - 'DOMExceptionNames.h', - 'DOMJSClass.h', - 'DOMJSProxyHandler.h', - 'DOMString.h', - 'Errors.msg', - 'Exceptions.h', - 'FakeString.h', - 'IterableIterator.h', - 'JSSlots.h', - 'NonRefcountedDOMObject.h', - 'Nullable.h', - 'PinnedStringId.h', - 'PrimitiveConversions.h', - 'ReadableStream.h', - 'Record.h', - 'RemoteObjectProxy.h', - 'RootedDictionary.h', - 'SimpleGlobalObject.h', - 'SpiderMonkeyInterface.h', - 'ToJSValue.h', - 'TypedArray.h', - 'UnionMember.h', - 'WebIDLGlobalNameHash.h', - 'XrayExpandoClass.h', + "AtomList.h", + "BindingCallContext.h", + "BindingDeclarations.h", + "BindingIPCUtils.h", + "BindingUtils.h", + "CallbackFunction.h", + "CallbackInterface.h", + "CallbackObject.h", + "DOMExceptionNames.h", + "DOMJSClass.h", + "DOMJSProxyHandler.h", + "DOMString.h", + "Errors.msg", + "Exceptions.h", + "FakeString.h", + "IterableIterator.h", + "JSSlots.h", + "NonRefcountedDOMObject.h", + "Nullable.h", + "PinnedStringId.h", + "PrimitiveConversions.h", + "ReadableStream.h", + "Record.h", + "RemoteObjectProxy.h", + "RootedDictionary.h", + "SimpleGlobalObject.h", + "SpiderMonkeyInterface.h", + "ToJSValue.h", + "TypedArray.h", + "UnionMember.h", + "WebIDLGlobalNameHash.h", + "XrayExpandoClass.h", ] # Generated bindings reference *Binding.h, not mozilla/dom/*Binding.h. And, @@ -67,57 +65,57 @@ EXPORTS.mozilla.dom += [ # Ideally, binding generation uses the prefixed header file names. # Bug 932082 tracks. LOCAL_INCLUDES += [ - '!/dist/include/mozilla/dom', + "!/dist/include/mozilla/dom", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/battery', - '/dom/canvas', - '/dom/geolocation', - '/dom/html', - '/dom/indexedDB', - '/dom/media/webaudio', - '/dom/media/webrtc', - '/dom/media/webrtc/common/time_profiling', - '/dom/media/webrtc/jsapi', - '/dom/media/webrtc/libwebrtcglue', - '/dom/media/webrtc/transport', - '/dom/media/webspeech/recognition', - '/dom/svg', - '/dom/xml', - '/dom/xslt/base', - '/dom/xslt/xpath', - '/dom/xul', - '/js/xpconnect/src', - '/js/xpconnect/wrappers', - '/layout/generic', - '/layout/style', - '/layout/xul/tree', - '/media/webrtc/', - '/netwerk/base/', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "/dom/base", + "/dom/battery", + "/dom/canvas", + "/dom/geolocation", + "/dom/html", + "/dom/indexedDB", + "/dom/media/webaudio", + "/dom/media/webrtc", + "/dom/media/webrtc/common/time_profiling", + "/dom/media/webrtc/jsapi", + "/dom/media/webrtc/libwebrtcglue", + "/dom/media/webrtc/transport", + "/dom/media/webspeech/recognition", + "/dom/svg", + "/dom/xml", + "/dom/xslt/base", + "/dom/xslt/xpath", + "/dom/xul", + "/js/xpconnect/src", + "/js/xpconnect/wrappers", + "/layout/generic", + "/layout/style", + "/layout/xul/tree", + "/media/webrtc/", + "/netwerk/base/", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] -LOCAL_INCLUDES += ['/third_party/msgpack/include'] +LOCAL_INCLUDES += ["/third_party/msgpack/include"] -DEFINES['GOOGLE_PROTOBUF_NO_RTTI'] = True -DEFINES['GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER'] = True +DEFINES["GOOGLE_PROTOBUF_NO_RTTI"] = True +DEFINES["GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER"] = True UNIFIED_SOURCES += [ - 'BindingUtils.cpp', - 'CallbackInterface.cpp', - 'CallbackObject.cpp', - 'DOMJSProxyHandler.cpp', - 'Exceptions.cpp', - 'IterableIterator.cpp', - 'nsScriptError.cpp', - 'nsScriptErrorWithStack.cpp', - 'RemoteObjectProxy.cpp', - 'SimpleGlobalObject.cpp', - 'ToJSValue.cpp', - 'WebIDLGlobalNameHash.cpp', + "BindingUtils.cpp", + "CallbackInterface.cpp", + "CallbackObject.cpp", + "DOMJSProxyHandler.cpp", + "Exceptions.cpp", + "IterableIterator.cpp", + "nsScriptError.cpp", + "nsScriptErrorWithStack.cpp", + "RemoteObjectProxy.cpp", + "SimpleGlobalObject.cpp", + "ToJSValue.cpp", + "WebIDLGlobalNameHash.cpp", ] # Some tests, including those for for maplike and setlike, require bindings @@ -125,7 +123,7 @@ UNIFIED_SOURCES += [ # "no test classes are exported" rule stated in the test/ directory, but it's # the only way this will work. Test classes are only built in debug mode, and # all tests requiring use of them are only run in debug mode. -if CONFIG['MOZ_DEBUG'] and CONFIG['ENABLE_TESTS']: +if CONFIG["MOZ_DEBUG"] and CONFIG["ENABLE_TESTS"]: EXPORTS.mozilla.dom += [ "test/TestFunctions.h", "test/TestInterfaceIterableDouble.h", @@ -136,7 +134,7 @@ if CONFIG['MOZ_DEBUG'] and CONFIG['ENABLE_TESTS']: "test/TestInterfaceSetlike.h", "test/TestInterfaceSetlikeNode.h", "test/WrapperCachedNonISupportsTestInterface.h", - ] + ] UNIFIED_SOURCES += [ "test/TestFunctions.cpp", "test/TestInterfaceIterableDouble.cpp", @@ -147,43 +145,45 @@ if CONFIG['MOZ_DEBUG'] and CONFIG['ENABLE_TESTS']: "test/TestInterfaceSetlike.cpp", "test/TestInterfaceSetlikeNode.cpp", "test/WrapperCachedNonISupportsTestInterface.cpp", - ] + ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -SPHINX_TREES['webidl'] = 'docs' +SPHINX_TREES["webidl"] = "docs" -with Files('docs/**'): - SCHEDULES.exclusive = ['docs'] +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] -SPHINX_PYTHON_PACKAGE_DIRS += ['mozwebidlcodegen'] +SPHINX_PYTHON_PACKAGE_DIRS += ["mozwebidlcodegen"] -with Files('mozwebidlcodegen/**.py'): - SCHEDULES.inclusive += ['docs'] +with Files("mozwebidlcodegen/**.py"): + SCHEDULES.inclusive += ["docs"] -if CONFIG['MOZ_BUILD_APP'] in ['browser', 'mobile/android', 'xulrunner']: +if CONFIG["MOZ_BUILD_APP"] in ["browser", "mobile/android", "xulrunner"]: # This is needed for Window.webidl - DEFINES['HAVE_SIDEBAR'] = True + DEFINES["HAVE_SIDEBAR"] = True PYTHON_UNITTEST_MANIFESTS += [ - 'mozwebidlcodegen/test/python.ini', + "mozwebidlcodegen/test/python.ini", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Suppress warnings in third-party code. CXXFLAGS += [ - '-Wno-maybe-uninitialized', + "-Wno-maybe-uninitialized", ] -if CONFIG['COMPILE_ENVIRONMENT']: - GeneratedFile('CSS2Properties.webidl', - script='GenerateCSS2PropertiesWebIDL.py', - entry_point='generate', - inputs=[ - '/dom/webidl/CSS2Properties.webidl.in', - '!/layout/style/ServoCSSPropList.py', - ]) +if CONFIG["COMPILE_ENVIRONMENT"]: + GeneratedFile( + "CSS2Properties.webidl", + script="GenerateCSS2PropertiesWebIDL.py", + entry_point="generate", + inputs=[ + "/dom/webidl/CSS2Properties.webidl.in", + "!/layout/style/ServoCSSPropList.py", + ], + ) diff --git a/dom/bindings/mozwebidlcodegen/__init__.py b/dom/bindings/mozwebidlcodegen/__init__.py index 6bc2609596cb92..31e0da4a36c53c 100644 --- a/dom/bindings/mozwebidlcodegen/__init__.py +++ b/dom/bindings/mozwebidlcodegen/__init__.py @@ -92,48 +92,52 @@ class WebIDLCodegenManagerState(dict): VERSION = 3 def __init__(self, fh=None): - self['version'] = self.VERSION - self['webidls'] = {} - self['global_depends'] = {} + self["version"] = self.VERSION + self["webidls"] = {} + self["global_depends"] = {} if not fh: return state = json.load(fh) - if state['version'] != self.VERSION: - raise Exception('Unknown state version: %s' % state['version']) + if state["version"] != self.VERSION: + raise Exception("Unknown state version: %s" % state["version"]) - self['version'] = state['version'] - self['global_depends'] = state['global_depends'] + self["version"] = state["version"] + self["global_depends"] = state["global_depends"] - for k, v in state['webidls'].items(): - self['webidls'][k] = v + for k, v in state["webidls"].items(): + self["webidls"][k] = v # Sets are converted to lists for serialization because JSON # doesn't support sets. - self['webidls'][k]['inputs'] = set(v['inputs']) - self['webidls'][k]['outputs'] = set(v['outputs']) + self["webidls"][k]["inputs"] = set(v["inputs"]) + self["webidls"][k]["outputs"] = set(v["outputs"]) - self['dictionaries_convertible_to_js'] = set( - state['dictionaries_convertible_to_js']) + self["dictionaries_convertible_to_js"] = set( + state["dictionaries_convertible_to_js"] + ) - self['dictionaries_convertible_from_js'] = set( - state['dictionaries_convertible_from_js']) + self["dictionaries_convertible_from_js"] = set( + state["dictionaries_convertible_from_js"] + ) def dump(self, fh): """Dump serialized state to a file handle.""" normalized = deepcopy(self) - for k, v in self['webidls'].items(): + for k, v in self["webidls"].items(): # Convert sets to lists because JSON doesn't support sets. - normalized['webidls'][k]['outputs'] = sorted(v['outputs']) - normalized['webidls'][k]['inputs'] = sorted(v['inputs']) + normalized["webidls"][k]["outputs"] = sorted(v["outputs"]) + normalized["webidls"][k]["inputs"] = sorted(v["inputs"]) - normalized['dictionaries_convertible_to_js'] = sorted( - self['dictionaries_convertible_to_js']) + normalized["dictionaries_convertible_to_js"] = sorted( + self["dictionaries_convertible_to_js"] + ) - normalized['dictionaries_convertible_from_js'] = sorted( - self['dictionaries_convertible_from_js']) + normalized["dictionaries_convertible_from_js"] = sorted( + self["dictionaries_convertible_from_js"] + ) json.dump(normalized, fh, sort_keys=True) @@ -147,34 +151,43 @@ class WebIDLCodegenManager(LoggingMixin): # Global parser derived declaration files. GLOBAL_DECLARE_FILES = { - 'GeneratedAtomList.h', - 'GeneratedEventList.h', - 'PrototypeList.h', - 'RegisterBindings.h', - 'RegisterWorkerBindings.h', - 'RegisterWorkerDebuggerBindings.h', - 'RegisterWorkletBindings.h', - 'UnionConversions.h', - 'UnionTypes.h', - 'WebIDLPrefs.h', - 'WebIDLSerializable.h', + "GeneratedAtomList.h", + "GeneratedEventList.h", + "PrototypeList.h", + "RegisterBindings.h", + "RegisterWorkerBindings.h", + "RegisterWorkerDebuggerBindings.h", + "RegisterWorkletBindings.h", + "UnionConversions.h", + "UnionTypes.h", + "WebIDLPrefs.h", + "WebIDLSerializable.h", } # Global parser derived definition files. GLOBAL_DEFINE_FILES = { - 'RegisterBindings.cpp', - 'RegisterWorkerBindings.cpp', - 'RegisterWorkerDebuggerBindings.cpp', - 'RegisterWorkletBindings.cpp', - 'UnionTypes.cpp', - 'PrototypeList.cpp', - 'WebIDLPrefs.cpp', - 'WebIDLSerializable.cpp', + "RegisterBindings.cpp", + "RegisterWorkerBindings.cpp", + "RegisterWorkerDebuggerBindings.cpp", + "RegisterWorkletBindings.cpp", + "UnionTypes.cpp", + "PrototypeList.cpp", + "WebIDLPrefs.cpp", + "WebIDLSerializable.cpp", } - def __init__(self, config_path, webidl_root, inputs, exported_header_dir, - codegen_dir, state_path, cache_dir=None, make_deps_path=None, - make_deps_target=None): + def __init__( + self, + config_path, + webidl_root, + inputs, + exported_header_dir, + codegen_dir, + state_path, + cache_dir=None, + make_deps_path=None, + make_deps_target=None, + ): """Create an instance that manages WebIDLs in the build system. config_path refers to a WebIDL config file (e.g. Bindings.conf). @@ -211,22 +224,29 @@ def __init__(self, config_path, webidl_root, inputs, exported_header_dir, self._make_deps_path = make_deps_path self._make_deps_target = make_deps_target - if ((make_deps_path and not make_deps_target) or - (not make_deps_path and make_deps_target)): - raise Exception('Must define both make_deps_path and make_deps_target ' - 'if one is defined.') + if (make_deps_path and not make_deps_target) or ( + not make_deps_path and make_deps_target + ): + raise Exception( + "Must define both make_deps_path and make_deps_target " + "if one is defined." + ) self._parser_results = None self._config = None self._state = WebIDLCodegenManagerState() if os.path.exists(state_path): - with io.open(state_path, 'r') as fh: + with io.open(state_path, "r") as fh: try: self._state = WebIDLCodegenManagerState(fh=fh) except Exception as e: - self.log(logging.WARN, 'webidl_bad_state', {'msg': str(e)}, - 'Bad WebIDL state: {msg}') + self.log( + logging.WARN, + "webidl_bad_state", + {"msg": str(e)}, + "Bad WebIDL state: {msg}", + ) @property def config(self): @@ -287,11 +307,13 @@ def generate_build_files(self): else: changed_inputs = self._compute_changed_inputs() - self._state['global_depends'] = global_hashes - self._state['dictionaries_convertible_to_js'] = set( - d.identifier.name for d in self._config.getDictionariesConvertibleToJS()) - self._state['dictionaries_convertible_from_js'] = set( - d.identifier.name for d in self._config.getDictionariesConvertibleFromJS()) + self._state["global_depends"] = global_hashes + self._state["dictionaries_convertible_to_js"] = set( + d.identifier.name for d in self._config.getDictionariesConvertibleToJS() + ) + self._state["dictionaries_convertible_from_js"] = set( + d.identifier.name for d in self._config.getDictionariesConvertibleFromJS() + ) # Generate bindings from .webidl files. for filename in sorted(changed_inputs): @@ -302,7 +324,7 @@ def generate_build_files(self): result.updated |= written[1] result.unchanged |= written[2] - self._state['webidls'][basename] = dict( + self._state["webidls"][basename] = dict( filename=filename, outputs=written[0] | written[1] | written[2], inputs=set(deps), @@ -320,10 +342,10 @@ def generate_build_files(self): if self._make_deps_path: mk = Makefile() codegen_rule = mk.create_rule([self._make_deps_target]) - codegen_rule.add_dependencies(six.ensure_text(s) for s in - global_hashes.keys()) - codegen_rule.add_dependencies(six.ensure_text(p) for p in - self._input_paths) + codegen_rule.add_dependencies( + six.ensure_text(s) for s in global_hashes.keys() + ) + codegen_rule.add_dependencies(six.ensure_text(p) for p in self._input_paths) with FileAvoidWrite(self._make_deps_path) as fh: mk.dump(fh) @@ -348,15 +370,18 @@ def _parse_webidl(self): import WebIDL from Configuration import Configuration - self.log(logging.INFO, 'webidl_parse', - {'count': len(self._input_paths)}, - 'Parsing {count} WebIDL files.') + self.log( + logging.INFO, + "webidl_parse", + {"count": len(self._input_paths)}, + "Parsing {count} WebIDL files.", + ) hashes = {} parser = WebIDL.Parser(self._cache_dir) for path in sorted(self._input_paths): - with io.open(path, 'r', encoding='utf-8') as fh: + with io.open(path, "r", encoding="utf-8") as fh: data = fh.read() hashes[path] = hashlib.sha1(six.ensure_binary(data)).hexdigest() parser.parse(data, path) @@ -379,16 +404,19 @@ def _parse_webidl(self): ) self._parser_results = parser.finish() - self._config = Configuration(self._config_path, web_roots, - self._parser_results, - self._generated_events_stems_as_array) + self._config = Configuration( + self._config_path, + web_roots, + self._parser_results, + self._generated_events_stems_as_array, + ) self._input_hashes = hashes def _write_global_derived(self): from Codegen import GlobalGenRoots - things = [('declare', f) for f in self.GLOBAL_DECLARE_FILES] - things.extend(('define', f) for f in self.GLOBAL_DEFINE_FILES) + things = [("declare", f) for f in self.GLOBAL_DECLARE_FILES] + things.extend(("define", f) for f in self.GLOBAL_DEFINE_FILES) result = (set(), set(), set()) @@ -396,14 +424,14 @@ def _write_global_derived(self): stem = mozpath.splitext(filename)[0] root = getattr(GlobalGenRoots, stem)(self._config) - if what == 'declare': + if what == "declare": code = root.declare() output_root = self._exported_header_dir - elif what == 'define': + elif what == "define": code = root.define() output_root = self._codegen_dir else: - raise Exception('Unknown global gen type: %s' % what) + raise Exception("Unknown global gen type: %s" % what) output_path = mozpath.join(output_root, filename) self._maybe_write_file(output_path, code, result) @@ -425,8 +453,7 @@ def _compute_changed_inputs(self): # a lot of extra work and most build systems don't do that anyway. # Now we move on to the input files. - old_hashes = {v['filename']: v['sha1'] - for v in self._state['webidls'].values()} + old_hashes = {v["filename"]: v["sha1"] for v in self._state["webidls"].values()} old_filenames = set(old_hashes.keys()) new_filenames = self._input_paths @@ -447,19 +474,23 @@ def _compute_changed_inputs(self): # Inherit dependencies from previous run. The full set of dependencies # is associated with each record, so we don't need to perform any fancy # graph traversal. - for v in self._state['webidls'].values(): - if any(dep for dep in v['inputs'] if dep in changed_inputs): - changed_inputs.add(v['filename']) + for v in self._state["webidls"].values(): + if any(dep for dep in v["inputs"] if dep in changed_inputs): + changed_inputs.add(v["filename"]) # Now check for changes to the set of dictionaries that are convertible to JS - oldDictionariesConvertibleToJS = self._state['dictionaries_convertible_to_js'] + oldDictionariesConvertibleToJS = self._state["dictionaries_convertible_to_js"] newDictionariesConvertibleToJS = self._config.getDictionariesConvertibleToJS() newNames = set(d.identifier.name for d in newDictionariesConvertibleToJS) changedDictionaryNames = oldDictionariesConvertibleToJS ^ newNames # Now check for changes to the set of dictionaries that are convertible from JS - oldDictionariesConvertibleFromJS = self._state['dictionaries_convertible_from_js'] - newDictionariesConvertibleFromJS = self._config.getDictionariesConvertibleFromJS() + oldDictionariesConvertibleFromJS = self._state[ + "dictionaries_convertible_from_js" + ] + newDictionariesConvertibleFromJS = ( + self._config.getDictionariesConvertibleFromJS() + ) newNames = set(d.identifier.name for d in newDictionariesConvertibleFromJS) changedDictionaryNames |= oldDictionariesConvertibleFromJS ^ newNames @@ -486,7 +517,7 @@ def _binding_info(self, p): """ basename = mozpath.basename(p) stem = mozpath.splitext(basename)[0] - binding_stem = '%sBinding' % stem + binding_stem = "%sBinding" % stem if stem in self._exported_stems: header_dir = self._exported_header_dir @@ -496,18 +527,19 @@ def _binding_info(self, p): is_event = stem in self._generated_events_stems files = ( - mozpath.join(header_dir, '%s.h' % binding_stem), - mozpath.join(self._codegen_dir, '%s.cpp' % binding_stem), - mozpath.join(header_dir, '%s.h' % stem) if is_event else None, - mozpath.join(self._codegen_dir, '%s.cpp' % stem) if is_event else None, + mozpath.join(header_dir, "%s.h" % binding_stem), + mozpath.join(self._codegen_dir, "%s.cpp" % binding_stem), + mozpath.join(header_dir, "%s.h" % stem) if is_event else None, + mozpath.join(self._codegen_dir, "%s.cpp" % stem) if is_event else None, ) return stem, binding_stem, is_event, header_dir, files def _example_paths(self, interface): return ( - mozpath.join(self._codegen_dir, '%s-example.h' % interface), - mozpath.join(self._codegen_dir, '%s-example.cpp' % interface)) + mozpath.join(self._codegen_dir, "%s-example.h" % interface), + mozpath.join(self._codegen_dir, "%s-example.cpp" % interface), + ) def expected_build_output_files(self): """Obtain the set of files generate_build_files() should write.""" @@ -535,9 +567,12 @@ def _generate_build_files_for_webidl(self, filename): CGEventRoot, ) - self.log(logging.INFO, 'webidl_generate_build_for_input', - {'filename': filename}, - 'Generating WebIDL files derived from {filename}') + self.log( + logging.INFO, + "webidl_generate_build_for_input", + {"filename": filename}, + "Generating WebIDL files derived from {filename}", + ) stem, binding_stem, is_event, header_dir, files = self._binding_info(filename) root = CGBindingRoot(self._config, binding_stem, filename) @@ -546,8 +581,9 @@ def _generate_build_files_for_webidl(self, filename): if is_event: generated_event = CGEventRoot(self._config, stem) - result = self._maybe_write_codegen(generated_event, files[2], - files[3], result) + result = self._maybe_write_codegen( + generated_event, files[2], files[3], result + ) return result, root.deps() @@ -565,22 +601,22 @@ def _global_dependencies_changed(self): for f in current_files: # This will fail if the file doesn't exist. If a current global # dependency doesn't exist, something else is wrong. - with io.open(f, 'rb') as fh: + with io.open(f, "rb") as fh: current_hashes[f] = hashlib.sha1(fh.read()).hexdigest() # The set of files has changed. - if current_files ^ set(self._state['global_depends'].keys()): + if current_files ^ set(self._state["global_depends"].keys()): return True, current_hashes # Compare hashes. for f, sha1 in current_hashes.items(): - if sha1 != self._state['global_depends'][f]: + if sha1 != self._state["global_depends"][f]: return True, current_hashes return False, current_hashes def _save_state(self): - with io.open(self._state_path, 'w', newline='\n') as fh: + with io.open(self._state_path, "w", newline="\n") as fh: self._state.dump(fh) def _maybe_write_codegen(self, obj, declare_path, define_path, result=None): @@ -608,17 +644,21 @@ def _maybe_write_file(self, path, content, result): def create_build_system_manager(topsrcdir, topobjdir, dist_dir): """Create a WebIDLCodegenManager for use by the build system.""" - src_dir = os.path.join(topsrcdir, 'dom', 'bindings') - obj_dir = os.path.join(topobjdir, 'dom', 'bindings') - webidl_root = os.path.join(topsrcdir, 'dom', 'webidl') + src_dir = os.path.join(topsrcdir, "dom", "bindings") + obj_dir = os.path.join(topobjdir, "dom", "bindings") + webidl_root = os.path.join(topsrcdir, "dom", "webidl") - with io.open(os.path.join(obj_dir, 'file-lists.json'), 'r') as fh: + with io.open(os.path.join(obj_dir, "file-lists.json"), "r") as fh: files = json.load(fh) - inputs = (files['webidls'], files['exported_stems'], - files['generated_events_stems'], files['example_interfaces']) + inputs = ( + files["webidls"], + files["exported_stems"], + files["generated_events_stems"], + files["example_interfaces"], + ) - cache_dir = os.path.join(obj_dir, '_cache') + cache_dir = os.path.join(obj_dir, "_cache") try: os.makedirs(cache_dir) except OSError as e: @@ -626,24 +666,25 @@ def create_build_system_manager(topsrcdir, topobjdir, dist_dir): raise return WebIDLCodegenManager( - os.path.join(src_dir, 'Bindings.conf'), + os.path.join(src_dir, "Bindings.conf"), webidl_root, inputs, - os.path.join(dist_dir, 'include', 'mozilla', 'dom'), + os.path.join(dist_dir, "include", "mozilla", "dom"), obj_dir, - os.path.join(obj_dir, 'codegen.json'), + os.path.join(obj_dir, "codegen.json"), cache_dir=cache_dir, # The make rules include a codegen.pp file containing dependencies. - make_deps_path=os.path.join(obj_dir, 'codegen.pp'), - make_deps_target='webidl.stub', + make_deps_path=os.path.join(obj_dir, "codegen.pp"), + make_deps_target="webidl.stub", ) class BuildSystemWebIDL(MozbuildObject): @property def manager(self): - if not hasattr(self, '_webidl_manager'): + if not hasattr(self, "_webidl_manager"): self._webidl_manager = create_build_system_manager( - self.topsrcdir, self.topobjdir, self.distdir) + self.topsrcdir, self.topobjdir, self.distdir + ) return self._webidl_manager diff --git a/dom/bindings/mozwebidlcodegen/test/test_mozwebidlcodegen.py b/dom/bindings/mozwebidlcodegen/test/test_mozwebidlcodegen.py index 972fe201332a37..c0733e86153e2a 100644 --- a/dom/bindings/mozwebidlcodegen/test/test_mozwebidlcodegen.py +++ b/dom/bindings/mozwebidlcodegen/test/test_mozwebidlcodegen.py @@ -29,27 +29,30 @@ OUR_DIR = mozpath.abspath(mozpath.dirname(__file__)) -TOPSRCDIR = mozpath.normpath(mozpath.join(OUR_DIR, '..', '..', '..', '..')) +TOPSRCDIR = mozpath.normpath(mozpath.join(OUR_DIR, "..", "..", "..", "..")) class TestWebIDLCodegenManager(unittest.TestCase): TEST_STEMS = { - 'Child', - 'Parent', - 'ExampleBinding', - 'TestEvent', + "Child", + "Parent", + "ExampleBinding", + "TestEvent", } @property def _static_input_paths(self): - s = {mozpath.join(OUR_DIR, p) for p in os.listdir(OUR_DIR) - if p.endswith('.webidl')} + s = { + mozpath.join(OUR_DIR, p) + for p in os.listdir(OUR_DIR) + if p.endswith(".webidl") + } return s @property def _config_path(self): - config = mozpath.join(TOPSRCDIR, 'dom', 'bindings', 'Bindings.conf') + config = mozpath.join(TOPSRCDIR, "dom", "bindings", "Bindings.conf") self.assertTrue(os.path.exists(config)) return config @@ -58,7 +61,7 @@ def _get_manager_args(self): tmp = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, tmp) - cache_dir = mozpath.join(tmp, 'cache') + cache_dir = mozpath.join(tmp, "cache") os.mkdir(cache_dir) ip = self._static_input_paths @@ -74,11 +77,11 @@ def _get_manager_args(self): config_path=self._config_path, webidl_root=cache_dir, inputs=inputs, - exported_header_dir=mozpath.join(tmp, 'exports'), - codegen_dir=mozpath.join(tmp, 'codegen'), - state_path=mozpath.join(tmp, 'state.json'), - make_deps_path=mozpath.join(tmp, 'codegen.pp'), - make_deps_target='codegen.pp', + exported_header_dir=mozpath.join(tmp, "exports"), + codegen_dir=mozpath.join(tmp, "codegen"), + state_path=mozpath.join(tmp, "state.json"), + make_deps_path=mozpath.join(tmp, "codegen.pp"), + make_deps_target="codegen.pp", cache_dir=cache_dir, ) @@ -89,19 +92,21 @@ def test_unknown_state_version(self): """Loading a state file with a too new version resets state.""" args = self._get_manager_args() - p = args['state_path'] + p = args["state_path"] - with io.open(p, 'w', newline='\n') as fh: - json.dump({ - 'version': WebIDLCodegenManagerState.VERSION + 1, - 'foobar': '1', - }, fh) + with io.open(p, "w", newline="\n") as fh: + json.dump( + { + "version": WebIDLCodegenManagerState.VERSION + 1, + "foobar": "1", + }, + fh, + ) manager = WebIDLCodegenManager(**args) - self.assertEqual(manager._state['version'], - WebIDLCodegenManagerState.VERSION) - self.assertNotIn('foobar', manager._state) + self.assertEqual(manager._state["version"], WebIDLCodegenManagerState.VERSION) + self.assertNotIn("foobar", manager._state) def test_generate_build_files(self): """generate_build_files() does the right thing from empty.""" @@ -124,32 +129,36 @@ def test_generate_build_files(self): self.assertIn(mozpath.join(manager._codegen_dir, f), output) for s in self.TEST_STEMS: - self.assertTrue(os.path.isfile(mozpath.join( - manager._exported_header_dir, '%sBinding.h' % s))) - self.assertTrue(os.path.isfile(mozpath.join( - manager._codegen_dir, '%sBinding.cpp' % s))) + self.assertTrue( + os.path.isfile( + mozpath.join(manager._exported_header_dir, "%sBinding.h" % s) + ) + ) + self.assertTrue( + os.path.isfile(mozpath.join(manager._codegen_dir, "%sBinding.cpp" % s)) + ) self.assertTrue(os.path.isfile(manager._state_path)) - with io.open(manager._state_path, 'r') as fh: + with io.open(manager._state_path, "r") as fh: state = json.load(fh) - self.assertEqual(state['version'], 3) - self.assertIn('webidls', state) + self.assertEqual(state["version"], 3) + self.assertIn("webidls", state) - child = state['webidls']['Child.webidl'] - self.assertEqual(len(child['inputs']), 2) - self.assertEqual(len(child['outputs']), 2) - self.assertEqual(child['sha1'], 'c41527cad3bc161fa6e7909e48fa11f9eca0468b') + child = state["webidls"]["Child.webidl"] + self.assertEqual(len(child["inputs"]), 2) + self.assertEqual(len(child["outputs"]), 2) + self.assertEqual(child["sha1"], "c41527cad3bc161fa6e7909e48fa11f9eca0468b") def test_generate_build_files_load_state(self): """State should be equivalent when instantiating a new instance.""" args = self._get_manager_args() m1 = WebIDLCodegenManager(**args) - self.assertEqual(len(m1._state['webidls']), 0) + self.assertEqual(len(m1._state["webidls"]), 0) m1.generate_build_files() m2 = WebIDLCodegenManager(**args) - self.assertGreater(len(m2._state['webidls']), 2) + self.assertGreater(len(m2._state["webidls"]), 2) self.assertEqual(m1._state, m2._state) def test_no_change_no_writes(self): @@ -172,7 +181,7 @@ def test_output_file_regenerated(self): m1.generate_build_files() rm_count = 0 - for p in m1._state['webidls']['Child.webidl']['outputs']: + for p in m1._state["webidls"]["Child.webidl"]["outputs"]: rm_count += 1 os.unlink(p) @@ -192,14 +201,14 @@ def test_only_rebuild_self(self): child_path = None for p in m1._input_paths: - if p.endswith('Child.webidl'): + if p.endswith("Child.webidl"): child_path = p break self.assertIsNotNone(child_path) - child_content = io.open(child_path, 'r').read() + child_content = io.open(child_path, "r").read() - with MockedOpen({child_path: child_content + '\n/* */'}): + with MockedOpen({child_path: child_content + "\n/* */"}): m2 = WebIDLCodegenManager(**args) result = m2.generate_build_files() self.assertEqual(result.inputs, set([child_path])) @@ -215,15 +224,15 @@ def test_rebuild_dependencies(self): parent_path = None child_path = None for p in m1._input_paths: - if p.endswith('Parent.webidl'): + if p.endswith("Parent.webidl"): parent_path = p - elif p.endswith('Child.webidl'): + elif p.endswith("Child.webidl"): child_path = p self.assertIsNotNone(parent_path) - parent_content = io.open(parent_path, 'r').read() + parent_content = io.open(parent_path, "r").read() - with MockedOpen({parent_path: parent_content + '\n/* */'}): + with MockedOpen({parent_path: parent_content + "\n/* */"}): m2 = WebIDLCodegenManager(**args) result = m2.generate_build_files() self.assertEqual(result.inputs, {child_path, parent_path}) @@ -241,22 +250,22 @@ def test_python_change_regenerate_everything(self): # Hacking imp to accept a MockedFile doesn't appear possible. So for # the first iteration we read from a temp file. The second iteration # doesn't need to import, so we are fine with a mocked file. - fake_path = mozpath.join(OUR_DIR, 'fakemodule.py') - with NamedTemporaryFile('wt') as fh: - fh.write('# Original content') + fake_path = mozpath.join(OUR_DIR, "fakemodule.py") + with NamedTemporaryFile("wt") as fh: + fh.write("# Original content") fh.flush() - mod = imp.load_source('mozwebidlcodegen.fakemodule', fh.name) + mod = imp.load_source("mozwebidlcodegen.fakemodule", fh.name) mod.__file__ = fake_path args = self._get_manager_args() m1 = WebIDLCodegenManager(**args) - with MockedOpen({fake_path: '# Original content'}): + with MockedOpen({fake_path: "# Original content"}): try: result = m1.generate_build_files() l = len(result.inputs) - with io.open(fake_path, 'wt', newline='\n') as fh: - fh.write('# Modified content') + with io.open(fake_path, "wt", newline="\n") as fh: + fh.write("# Modified content") m2 = WebIDLCodegenManager(**args) result = m2.generate_build_files() @@ -265,7 +274,7 @@ def test_python_change_regenerate_everything(self): result = m2.generate_build_files() self.assertEqual(len(result.inputs), 0) finally: - del sys.modules['mozwebidlcodegen.fakemodule'] + del sys.modules["mozwebidlcodegen.fakemodule"] def test_copy_input(self): """Ensure a copied .webidl file is handled properly.""" @@ -279,22 +288,22 @@ def test_copy_input(self): m1.generate_build_files() old_path = None - for p in args['inputs'][0]: - if p.endswith('Parent.webidl'): + for p in args["inputs"][0]: + if p.endswith("Parent.webidl"): old_path = p break self.assertIsNotNone(old_path) - new_path = mozpath.join(args['cache_dir'], 'Parent.webidl') + new_path = mozpath.join(args["cache_dir"], "Parent.webidl") shutil.copy2(old_path, new_path) - args['inputs'][0].remove(old_path) - args['inputs'][0].add(new_path) + args["inputs"][0].remove(old_path) + args["inputs"][0].add(new_path) m2 = WebIDLCodegenManager(**args) result = m2.generate_build_files() self.assertEqual(len(result.updated), 0) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/dom/bindings/parser/WebIDL.py b/dom/bindings/parser/WebIDL.py index 574e644bced356..b30666e53f950d 100644 --- a/dom/bindings/parser/WebIDL.py +++ b/dom/bindings/parser/WebIDL.py @@ -22,14 +22,14 @@ def parseInt(literal): sign = 0 base = 0 - if string[0] == '-': + if string[0] == "-": sign = -1 string = string[1:] else: sign = 1 - if string[0] == '0' and len(string) > 1: - if string[1] == 'x' or string[1] == 'X': + if string[0] == "0" and len(string) > 1: + if string[1] == "x" or string[1] == "X": base = 16 string = string[2:] else: @@ -45,13 +45,16 @@ def parseInt(literal): def enum(*names, **kw): class Foo(object): attrs = OrderedDict() + def __init__(self, names): for v, k in enumerate(names): self.attrs[k] = v + def __getattr__(self, attr): if attr in self.attrs: return self.attrs[attr] raise AttributeError + def __setattr__(self, name, value): # this makes it read-only raise NotImplementedError @@ -67,10 +70,12 @@ def __init__(self, message, locations, warning=False): self.warning = warning def __str__(self): - return "%s: %s%s%s" % (self.warning and 'warning' or 'error', - self.message, - ", " if len(self.locations) != 0 else "", - "\n".join(self.locations)) + return "%s: %s%s%s" % ( + self.warning and "warning" or "error", + self.message, + ", " if len(self.locations) != 0 else "", + "\n".join(self.locations), + ) class Location(object): @@ -82,8 +87,7 @@ def __init__(self, lexer, lineno, lexpos, filename): self._file = filename if filename else "" def __eq__(self, other): - return (self._lexpos == other._lexpos and - self._file == other._file) + return self._lexpos == other._lexpos and self._file == other._file def filename(self): return self._file @@ -92,8 +96,8 @@ def resolve(self): if self._line: return - startofline = self._lexdata.rfind('\n', 0, self._lexpos) + 1 - endofline = self._lexdata.find('\n', self._lexpos, self._lexpos + 80) + startofline = self._lexdata.rfind("\n", 0, self._lexpos) + 1 + endofline = self._lexdata.find("\n", self._lexpos, self._lexpos + 80) if endofline != -1: self._line = self._lexdata[startofline:endofline] else: @@ -101,7 +105,7 @@ def resolve(self): self._colno = self._lexpos - startofline # Our line number seems to point to the start of self._lexdata - self._lineno += self._lexdata.count('\n', 0, startofline) + self._lineno += self._lexdata.count("\n", 0, startofline) def get(self): self.resolve() @@ -112,8 +116,13 @@ def _pointerline(self): def __str__(self): self.resolve() - return "%s line %s:%s\n%s\n%s" % (self._file, self._lineno, self._colno, - self._line, self._pointerline()) + return "%s line %s:%s\n%s\n%s" % ( + self._file, + self._lineno, + self._colno, + self._line, + self._pointerline(), + ) class BuiltinLocation(object): @@ -121,11 +130,10 @@ def __init__(self, text): self.msg = text + "\n" def __eq__(self, other): - return (isinstance(other, BuiltinLocation) and - self.msg == other.msg) + return isinstance(other, BuiltinLocation) and self.msg == other.msg def filename(self): - return '' + return "" def resolve(self): pass @@ -191,13 +199,13 @@ def _getDependentObjects(self): assert False # Override me! def getDeps(self, visited=None): - """ Return a set of files that this object depends on. If any of - these files are changed the parser needs to be rerun to regenerate - a new IDLObject. + """Return a set of files that this object depends on. If any of + these files are changed the parser needs to be rerun to regenerate + a new IDLObject. - The visited argument is a set of all the objects already visited. - We must test to see if we are in it, and if so, do nothing. This - prevents infinite recursion.""" + The visited argument is a set of all the objects already visited. + We must test to see if we are in it, and if so, do nothing. This + prevents infinite recursion.""" # NB: We can't use visited=set() above because the default value is # evaluated when the def statement is evaluated, not when the function @@ -253,9 +261,9 @@ def QName(self): def ensureUnique(self, identifier, object): """ - Ensure that there is at most one 'identifier' in scope ('self'). - Note that object can be None. This occurs if we end up here for an - interface type we haven't seen yet. + Ensure that there is at most one 'identifier' in scope ('self'). + Note that object can be None. This occurs if we end up here for an + interface type we haven't seen yet. """ assert isinstance(identifier, IDLUnresolvedIdentifier) assert not object or isinstance(object, IDLObjectWithIdentifier) @@ -268,9 +276,9 @@ def ensureUnique(self, identifier, object): # ensureUnique twice with the same object is not allowed assert id(object) != id(self._dict[identifier.name]) - replacement = self.resolveIdentifierConflict(self, identifier, - self._dict[identifier.name], - object) + replacement = self.resolveIdentifierConflict( + self, identifier, self._dict[identifier.name], object + ) self._dict[identifier.name] = replacement return @@ -279,44 +287,53 @@ def ensureUnique(self, identifier, object): self._dict[identifier.name] = object def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject): - if (isinstance(originalObject, IDLExternalInterface) and - isinstance(newObject, IDLExternalInterface) and - originalObject.identifier.name == newObject.identifier.name): + if ( + isinstance(originalObject, IDLExternalInterface) + and isinstance(newObject, IDLExternalInterface) + and originalObject.identifier.name == newObject.identifier.name + ): return originalObject - if (isinstance(originalObject, IDLExternalInterface) or - isinstance(newObject, IDLExternalInterface)): + if isinstance(originalObject, IDLExternalInterface) or isinstance( + newObject, IDLExternalInterface + ): raise WebIDLError( "Name collision between " "interface declarations for identifier '%s' at '%s' and '%s'" - % (identifier.name, - originalObject.location, newObject.location), []) + % (identifier.name, originalObject.location, newObject.location), + [], + ) - if (isinstance(originalObject, IDLDictionary) or - isinstance(newObject, IDLDictionary)): + if isinstance(originalObject, IDLDictionary) or isinstance( + newObject, IDLDictionary + ): raise WebIDLError( "Name collision between dictionary declarations for " "identifier '%s'.\n%s\n%s" - % (identifier.name, - originalObject.location, newObject.location), []) + % (identifier.name, originalObject.location, newObject.location), + [], + ) # We do the merging of overloads here as opposed to in IDLInterface # because we need to merge overloads of NamedConstructors and we need to # detect conflicts in those across interfaces. See also the comment in # IDLInterface.addExtendedAttributes for "NamedConstructor". - if (isinstance(originalObject, IDLMethod) and - isinstance(newObject, IDLMethod)): + if isinstance(originalObject, IDLMethod) and isinstance(newObject, IDLMethod): return originalObject.addOverload(newObject) # Default to throwing, derived classes can override. - conflictdesc = "\n\t%s at %s\n\t%s at %s" % (originalObject, - originalObject.location, - newObject, - newObject.location) + conflictdesc = "\n\t%s at %s\n\t%s at %s" % ( + originalObject, + originalObject.location, + newObject, + newObject.location, + ) raise WebIDLError( "Multiple unresolvable definitions of identifier '%s' in scope '%s'%s" - % (identifier.name, str(self), conflictdesc), []) + % (identifier.name, str(self), conflictdesc), + [], + ) def _lookupIdentifier(self, identifier): return self._dict[identifier.name] @@ -359,8 +376,9 @@ def object(self): class IDLUnresolvedIdentifier(IDLObject): - def __init__(self, location, name, allowDoubleUnderscore=False, - allowForbidden=False): + def __init__( + self, location, name, allowDoubleUnderscore=False, allowForbidden=False + ): IDLObject.__init__(self, location) assert len(name) > 0 @@ -369,14 +387,13 @@ def __init__(self, location, name, allowDoubleUnderscore=False, raise WebIDLError("__noSuchMethod__ is deprecated", [location]) if name[:2] == "__" and name != "__content" and not allowDoubleUnderscore: - raise WebIDLError("Identifiers beginning with __ are reserved", - [location]) - if name[0] == '_' and not allowDoubleUnderscore: + raise WebIDLError("Identifiers beginning with __ are reserved", [location]) + if name[0] == "_" and not allowDoubleUnderscore: name = name[1:] - if (name in ["constructor", "toString"] and - not allowForbidden): - raise WebIDLError("Cannot use reserved identifier '%s'" % (name), - [location]) + if name in ["constructor", "toString"] and not allowForbidden: + raise WebIDLError( + "Cannot use reserved identifier '%s'" % (name), [location] + ) self.name = name @@ -436,14 +453,15 @@ def finish(self, scope): try: scope._lookupIdentifier(self.identifier) except: - raise WebIDLError("Unresolved type '%s'." % self.identifier, - [self.location]) + raise WebIDLError( + "Unresolved type '%s'." % self.identifier, [self.location] + ) obj = self.identifier.resolve(scope, None) return scope.lookupIdentifier(obj) -class IDLExposureMixins(): +class IDLExposureMixins: def __init__(self, location): # _exposureGlobalNames are the global names listed in our [Exposed] # extended attribute. exposureSet is the exposure set as defined in the @@ -460,8 +478,9 @@ def finish(self, scope): # Verify that our [Exposed] value, if any, makes sense. for globalName in self._exposureGlobalNames: if globalName not in scope.globalNames: - raise WebIDLError("Unknown [Exposed] value %s" % globalName, - [self._location]) + raise WebIDLError( + "Unknown [Exposed] value %s" % globalName, [self._location] + ) # Verify that we are exposed _somwhere_ if we have some place to be # exposed. We don't want to assert that we're definitely exposed @@ -470,16 +489,20 @@ def finish(self, scope): # and add global interfaces and [Exposed] annotations to all those # tests. if len(scope.globalNames) != 0: - if (len(self._exposureGlobalNames) == 0): - raise WebIDLError(("'%s' is not exposed anywhere even though we have " - "globals to be exposed to") % self, - [self.location]) + if len(self._exposureGlobalNames) == 0: + raise WebIDLError( + ( + "'%s' is not exposed anywhere even though we have " + "globals to be exposed to" + ) + % self, + [self.location], + ) - globalNameSetToExposureSet(scope, self._exposureGlobalNames, - self.exposureSet) + globalNameSetToExposureSet(scope, self._exposureGlobalNames, self.exposureSet) def isExposedInWindow(self): - return 'Window' in self.exposureSet + return "Window" in self.exposureSet def isExposedInAnyWorker(self): return len(self.getWorkerExposureSet()) > 0 @@ -540,9 +563,11 @@ def isInterface(self): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on external interfaces", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " + "allowed on external interfaces", + [attrs[0].location, self.location], + ) def resolve(self, parentScope): pass @@ -614,42 +639,61 @@ def addExtendedAttributes(self, attrs): # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both a " - "partial interface member and on the " - "partial interface itself", - [member.location, attr.location]) + raise WebIDLError( + "[SecureContext] specified on both a " + "partial interface member and on the " + "partial interface itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) elif identifier == "Exposed": # This just gets propagated to all our members. for member in self.members: if len(member._exposureGlobalNames) != 0: - raise WebIDLError("[Exposed] specified on both a " - "partial interface member and on the " - "partial interface itself", - [member.location, attr.location]) + raise WebIDLError( + "[Exposed] specified on both a " + "partial interface member and on the " + "partial interface itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) else: - raise WebIDLError("Unknown extended attribute %s on partial " - "interface" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on partial " + "interface" % identifier, + [attr.location], + ) def finish(self, scope): if self._finished: return self._finished = True - if (not self._haveSecureContextExtendedAttribute and - self._nonPartialInterfaceOrNamespace.getExtendedAttribute("SecureContext")): + if ( + not self._haveSecureContextExtendedAttribute + and self._nonPartialInterfaceOrNamespace.getExtendedAttribute( + "SecureContext" + ) + ): # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both a " - "partial interface member and on the " - "non-partial interface", - [member.location, - self._nonPartialInterfaceOrNamespace.location]) + raise WebIDLError( + "[SecureContext] specified on both a " + "partial interface member and on the " + "non-partial interface", + [ + member.location, + self._nonPartialInterfaceOrNamespace.location, + ], + ) member.addExtendedAttributes( - [IDLExtendedAttribute(self._nonPartialInterfaceOrNamespace.location, - ("SecureContext",))]) + [ + IDLExtendedAttribute( + self._nonPartialInterfaceOrNamespace.location, + ("SecureContext",), + ) + ] + ) # Need to make sure our non-partial interface or namespace gets # finished so it can report cases when we only have partial # interfaces/namespaces. @@ -672,6 +716,7 @@ def globalNameSetToExposureSet(globalScope, nameSet, exposureSet): for name in nameSet: exposureSet.update(globalScope.globalNameMapping[name]) + class IDLInterfaceOrInterfaceMixinOrNamespace(IDLObjectWithScope, IDLExposureMixins): def __init__(self, location, parentScope, name): assert isinstance(parentScope, IDLScope) @@ -688,8 +733,10 @@ def __init__(self, location, parentScope, name): def finish(self, scope): if not self._isKnownNonPartial: - raise WebIDLError("%s does not have a non-partial declaration" % - str(self), [self.location]) + raise WebIDLError( + "%s does not have a non-partial declaration" % str(self), + [self.location], + ) IDLExposureMixins.finish(self, scope) @@ -704,8 +751,9 @@ def resolveIdentifierConflict(self, scope, identifier, originalObject, newObject assert isinstance(originalObject, IDLInterfaceMember) assert isinstance(newObject, IDLInterfaceMember) - retval = IDLScope.resolveIdentifierConflict(self, scope, identifier, - originalObject, newObject) + retval = IDLScope.resolveIdentifierConflict( + self, scope, identifier, originalObject, newObject + ) # Might be a ctor, which isn't in self.members if newObject in self.members: @@ -725,9 +773,10 @@ def getExtendedAttribute(self, name): def setNonPartial(self, location, members): if self._isKnownNonPartial: - raise WebIDLError("Two non-partial definitions for the " - "same %s" % self.typeName(), - [location, self.location]) + raise WebIDLError( + "Two non-partial definitions for the " "same %s" % self.typeName(), + [location, self.location], + ) self._isKnownNonPartial = True # Now make it look like we were parsed at this new location, since # that's the place where the interface is "really" defined @@ -772,9 +821,11 @@ def finishMembers(self, scope): # sets, make sure they aren't exposed in places where we are not. for member in self.members: if not member.exposureSet.issubset(self.exposureSet): - raise WebIDLError("Interface or interface mixin member has " - "larger exposure set than its container", - [member.location, self.location]) + raise WebIDLError( + "Interface or interface mixin member has " + "larger exposure set than its container", + [member.location, self.location], + ) def isExternal(self): return False @@ -785,7 +836,9 @@ def __init__(self, location, parentScope, name, members, isKnownNonPartial): self.actualExposureGlobalNames = set() assert isKnownNonPartial or len(members) == 0 - IDLInterfaceOrInterfaceMixinOrNamespace.__init__(self, location, parentScope, name) + IDLInterfaceOrInterfaceMixinOrNamespace.__init__( + self, location, parentScope, name + ) if isKnownNonPartial: self.setNonPartial(location, members) @@ -821,26 +874,33 @@ def validate(self): if member.isAttr(): if member.inherit: - raise WebIDLError("Interface mixin member cannot include " - "an inherited attribute", - [member.location, self.location]) + raise WebIDLError( + "Interface mixin member cannot include " + "an inherited attribute", + [member.location, self.location], + ) if member.isStatic(): - raise WebIDLError("Interface mixin member cannot include " - "a static member", - [member.location, self.location]) + raise WebIDLError( + "Interface mixin member cannot include " "a static member", + [member.location, self.location], + ) if member.isMethod(): if member.isStatic(): - raise WebIDLError("Interface mixin member cannot include " - "a static operation", - [member.location, self.location]) - if (member.isGetter() or - member.isSetter() or - member.isDeleter() or - member.isLegacycaller()): - raise WebIDLError("Interface mixin member cannot include a " - "special operation", - [member.location, self.location]) + raise WebIDLError( + "Interface mixin member cannot include " "a static operation", + [member.location, self.location], + ) + if ( + member.isGetter() + or member.isSetter() + or member.isDeleter() + or member.isLegacycaller() + ): + raise WebIDLError( + "Interface mixin member cannot include a " "special operation", + [member.location, self.location], + ) def addExtendedAttributes(self, attrs): for attr in attrs: @@ -848,22 +908,26 @@ def addExtendedAttributes(self, attrs): if identifier == "SecureContext": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both " - "an interface mixin member and on" - "the interface mixin itself", - [member.location, attr.location]) + raise WebIDLError( + "[SecureContext] specified on both " + "an interface mixin member and on" + "the interface mixin itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) elif identifier == "Exposed": - convertExposedAttrToGlobalNameSet(attr, - self._exposureGlobalNames) + convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) else: - raise WebIDLError("Unknown extended attribute %s on interface" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on interface" % identifier, + [attr.location], + ) attrlist = attr.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -873,8 +937,7 @@ def _getDependentObjects(self): class IDLInterfaceOrNamespace(IDLInterfaceOrInterfaceMixinOrNamespace): - def __init__(self, location, parentScope, name, parent, members, - isKnownNonPartial): + def __init__(self, location, parentScope, name, parent, members, isKnownNonPartial): assert isKnownNonPartial or not parent assert isKnownNonPartial or len(members) == 0 @@ -906,22 +969,27 @@ def __init__(self, location, parentScope, name, parent, members, # True if some descendant (including ourselves) has cross-origin members self.hasDescendantWithCrossOriginMembers = False - IDLInterfaceOrInterfaceMixinOrNamespace.__init__(self, location, parentScope, name) + IDLInterfaceOrInterfaceMixinOrNamespace.__init__( + self, location, parentScope, name + ) if isKnownNonPartial: self.setNonPartial(location, parent, members) def ctor(self): - identifier = IDLUnresolvedIdentifier(self.location, "constructor", - allowForbidden=True) + identifier = IDLUnresolvedIdentifier( + self.location, "constructor", allowForbidden=True + ) try: return self._lookupIdentifier(identifier) except: return None def isIterable(self): - return (self.maplikeOrSetlikeOrIterable and - self.maplikeOrSetlikeOrIterable.isIterable()) + return ( + self.maplikeOrSetlikeOrIterable + and self.maplikeOrSetlikeOrIterable.isIterable() + ) def isIteratorInterface(self): return self.iterableInterface is not None @@ -936,13 +1004,17 @@ def finish(self, scope): if len(self.legacyWindowAliases) > 0: if not self.hasInterfaceObject(): - raise WebIDLError("Interface %s unexpectedly has [LegacyWindowAlias] " - "and [NoInterfaceObject] together" % self.identifier.name, - [self.location]) + raise WebIDLError( + "Interface %s unexpectedly has [LegacyWindowAlias] " + "and [NoInterfaceObject] together" % self.identifier.name, + [self.location], + ) if not self.isExposedInWindow(): - raise WebIDLError("Interface %s has [LegacyWindowAlias] " - "but not exposed in Window" % self.identifier.name, - [self.location]) + raise WebIDLError( + "Interface %s has [LegacyWindowAlias] " + "but not exposed in Window" % self.identifier.name, + [self.location], + ) # Generate maplike/setlike interface members. Since generated members # need to be treated like regular interface members, do this before @@ -953,31 +1025,37 @@ def finish(self, scope): # there can only be one maplike/setlike declaration per # interface) if self.maplikeOrSetlikeOrIterable: - raise WebIDLError("%s declaration used on " - "interface that already has %s " - "declaration" % - (member.maplikeOrSetlikeOrIterableType, - self.maplikeOrSetlikeOrIterable.maplikeOrSetlikeOrIterableType), - [self.maplikeOrSetlikeOrIterable.location, - member.location]) + raise WebIDLError( + "%s declaration used on " + "interface that already has %s " + "declaration" + % ( + member.maplikeOrSetlikeOrIterableType, + self.maplikeOrSetlikeOrIterable.maplikeOrSetlikeOrIterableType, + ), + [self.maplikeOrSetlikeOrIterable.location, member.location], + ) self.maplikeOrSetlikeOrIterable = member # If we've got a maplike or setlike declaration, we'll be building all of # our required methods in Codegen. Generate members now. - self.maplikeOrSetlikeOrIterable.expand(self.members, self.isJSImplemented()) + self.maplikeOrSetlikeOrIterable.expand( + self.members, self.isJSImplemented() + ) assert not self.parent or isinstance(self.parent, IDLIdentifierPlaceholder) parent = self.parent.finish(scope) if self.parent else None if parent and isinstance(parent, IDLExternalInterface): - raise WebIDLError("%s inherits from %s which does not have " - "a definition" % - (self.identifier.name, - self.parent.identifier.name), - [self.location]) + raise WebIDLError( + "%s inherits from %s which does not have " + "a definition" % (self.identifier.name, self.parent.identifier.name), + [self.location], + ) if parent and not isinstance(parent, IDLInterface): - raise WebIDLError("%s inherits from %s which is not an interface " % - (self.identifier.name, - self.parent.identifier.name), - [self.location, parent.location]) + raise WebIDLError( + "%s inherits from %s which is not an interface " + % (self.identifier.name, self.parent.identifier.name), + [self.location, parent.location], + ) self.parent = parent @@ -988,9 +1066,10 @@ def finish(self, scope): for m in self.members: if m.isAttr() or m.isMethod(): if m.isStatic(): - raise WebIDLError("Don't mark things explicitly static " - "in namespaces", - [self.location, m.location]) + raise WebIDLError( + "Don't mark things explicitly static " "in namespaces", + [self.location, m.location], + ) # Just mark all our methods/attributes as static. The other # option is to duplicate the relevant InterfaceMembers # production bits but modified to produce static stuff to @@ -1008,55 +1087,63 @@ def finish(self, scope): # Note: This is not a self.parent.isOnGlobalProtoChain() check # because ancestors of a [Global] interface can have other # descendants. - raise WebIDLError("[Global] interface has another interface " - "inheriting from it", - [self.location, self.parent.location]) + raise WebIDLError( + "[Global] interface has another interface " "inheriting from it", + [self.location, self.parent.location], + ) # Make sure that we're not exposed in places where our parent is not if not self.exposureSet.issubset(self.parent.exposureSet): - raise WebIDLError("Interface %s is exposed in globals where its " - "parent interface %s is not exposed." % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + raise WebIDLError( + "Interface %s is exposed in globals where its " + "parent interface %s is not exposed." + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) # Callbacks must not inherit from non-callbacks. # XXXbz Can non-callbacks inherit from callbacks? Spec issue pending. if self.isCallback(): if not self.parent.isCallback(): - raise WebIDLError("Callback interface %s inheriting from " - "non-callback interface %s" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + raise WebIDLError( + "Callback interface %s inheriting from " + "non-callback interface %s" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) elif self.parent.isCallback(): - raise WebIDLError("Non-callback interface %s inheriting from " - "callback interface %s" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + raise WebIDLError( + "Non-callback interface %s inheriting from " + "callback interface %s" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) # Interfaces which have interface objects can't inherit # from [NoInterfaceObject] interfaces. - if (self.parent.getExtendedAttribute("NoInterfaceObject") and - not self.getExtendedAttribute("NoInterfaceObject")): - raise WebIDLError("Interface %s does not have " - "[NoInterfaceObject] but inherits from " - "interface %s which does" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + if self.parent.getExtendedAttribute( + "NoInterfaceObject" + ) and not self.getExtendedAttribute("NoInterfaceObject"): + raise WebIDLError( + "Interface %s does not have " + "[NoInterfaceObject] but inherits from " + "interface %s which does" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) # Interfaces that are not [SecureContext] can't inherit # from [SecureContext] interfaces. - if (self.parent.getExtendedAttribute("SecureContext") and - not self.getExtendedAttribute("SecureContext")): - raise WebIDLError("Interface %s does not have " - "[SecureContext] but inherits from " - "interface %s which does" % - (self.identifier.name, - self.parent.identifier.name), - [self.location, self.parent.location]) + if self.parent.getExtendedAttribute( + "SecureContext" + ) and not self.getExtendedAttribute("SecureContext"): + raise WebIDLError( + "Interface %s does not have " + "[SecureContext] but inherits from " + "interface %s which does" + % (self.identifier.name, self.parent.identifier.name), + [self.location, self.parent.location], + ) for mixin in self.includedMixins: mixin.finish(scope) @@ -1065,7 +1152,8 @@ def finish(self, scope): if cycleInGraph: raise WebIDLError( "Interface %s has itself as ancestor" % self.identifier.name, - [self.location, cycleInGraph.location]) + [self.location, cycleInGraph.location], + ) self.finishMembers(scope) @@ -1074,14 +1162,16 @@ def finish(self, scope): if not self.hasInterfaceObject(): raise WebIDLError( "Can't have both a constructor and [NoInterfaceObject]", - [self.location, ctor.location]) + [self.location, ctor.location], + ) if self.globalNames: raise WebIDLError( "Can't have both a constructor and [Global]", - [self.location, ctor.location]) + [self.location, ctor.location], + ) - assert(ctor._exposureGlobalNames == self._exposureGlobalNames) + assert ctor._exposureGlobalNames == self._exposureGlobalNames ctor._exposureGlobalNames.update(self._exposureGlobalNames) # Remove the constructor operation from our member list so # it doesn't get in the way later. @@ -1091,7 +1181,8 @@ def finish(self, scope): if self.globalNames: raise WebIDLError( "Can't have both a named constructor and [Global]", - [self.location, ctor.location]) + [self.location, ctor.location], + ) assert len(ctor._exposureGlobalNames) == 0 ctor._exposureGlobalNames.update(self._exposureGlobalNames) ctor.finish(scope) @@ -1101,28 +1192,33 @@ def finish(self, scope): # admixed. self.originalMembers = list(self.members) - for mixin in sorted(self.includedMixins, - key=lambda x: x.identifier.name): + for mixin in sorted(self.includedMixins, key=lambda x: x.identifier.name): for mixinMember in mixin.members: for member in self.members: if mixinMember.identifier.name == member.identifier.name: raise WebIDLError( - "Multiple definitions of %s on %s coming from 'includes' statements" % - (member.identifier.name, self), - [mixinMember.location, member.location]) + "Multiple definitions of %s on %s coming from 'includes' statements" + % (member.identifier.name, self), + [mixinMember.location, member.location], + ) self.members.extend(mixin.members) for ancestor in self.getInheritedInterfaces(): ancestor.interfacesBasedOnSelf.add(self) - if (ancestor.maplikeOrSetlikeOrIterable is not None and - self.maplikeOrSetlikeOrIterable is not None): - raise WebIDLError("Cannot have maplike/setlike on %s that " - "inherits %s, which is already " - "maplike/setlike" % - (self.identifier.name, - ancestor.identifier.name), - [self.maplikeOrSetlikeOrIterable.location, - ancestor.maplikeOrSetlikeOrIterable.location]) + if ( + ancestor.maplikeOrSetlikeOrIterable is not None + and self.maplikeOrSetlikeOrIterable is not None + ): + raise WebIDLError( + "Cannot have maplike/setlike on %s that " + "inherits %s, which is already " + "maplike/setlike" + % (self.identifier.name, ancestor.identifier.name), + [ + self.maplikeOrSetlikeOrIterable.location, + ancestor.maplikeOrSetlikeOrIterable.location, + ], + ) # Deal with interfaces marked [Unforgeable], now that we have our full # member list, except unforgeables pulled in from parents. We want to @@ -1133,35 +1229,47 @@ def finish(self, scope): # spec would otherwise require us to synthesize and is # missing the ones we plan to synthesize. if not any(m.isMethod() and m.isStringifier() for m in self.members): - raise WebIDLError("Unforgeable interface %s does not have a " - "stringifier" % self.identifier.name, - [self.location]) + raise WebIDLError( + "Unforgeable interface %s does not have a " + "stringifier" % self.identifier.name, + [self.location], + ) for m in self.members: if m.identifier.name == "toJSON": - raise WebIDLError("Unforgeable interface %s has a " - "toJSON so we won't be able to add " - "one ourselves" % self.identifier.name, - [self.location, m.location]) + raise WebIDLError( + "Unforgeable interface %s has a " + "toJSON so we won't be able to add " + "one ourselves" % self.identifier.name, + [self.location, m.location], + ) if m.identifier.name == "valueOf" and not m.isStatic(): - raise WebIDLError("Unforgeable interface %s has a valueOf " - "member so we won't be able to add one " - "ourselves" % self.identifier.name, - [self.location, m.location]) + raise WebIDLError( + "Unforgeable interface %s has a valueOf " + "member so we won't be able to add one " + "ourselves" % self.identifier.name, + [self.location, m.location], + ) for member in self.members: - if ((member.isAttr() or member.isMethod()) and - member.isUnforgeable() and - not hasattr(member, "originatingInterface")): + if ( + (member.isAttr() or member.isMethod()) + and member.isUnforgeable() + and not hasattr(member, "originatingInterface") + ): member.originatingInterface = self for member in self.members: - if ((member.isMethod() and - member.getExtendedAttribute("CrossOriginCallable")) or - (member.isAttr() and - (member.getExtendedAttribute("CrossOriginReadable") or - member.getExtendedAttribute("CrossOriginWritable")))): + if ( + member.isMethod() and member.getExtendedAttribute("CrossOriginCallable") + ) or ( + member.isAttr() + and ( + member.getExtendedAttribute("CrossOriginReadable") + or member.getExtendedAttribute("CrossOriginWritable") + ) + ): self.hasCrossOriginMembers = True break @@ -1175,16 +1283,20 @@ def finish(self, scope): # members from our parent. Also, maplike/setlike declarations get a # slot to hold their backing object. for member in self.members: - if ((member.isAttr() and - (member.getExtendedAttribute("StoreInSlot") or - member.getExtendedAttribute("Cached"))) or - member.isMaplikeOrSetlike()): + if ( + member.isAttr() + and ( + member.getExtendedAttribute("StoreInSlot") + or member.getExtendedAttribute("Cached") + ) + ) or member.isMaplikeOrSetlike(): if self.isJSImplemented() and not member.isMaplikeOrSetlike(): - raise WebIDLError("Interface %s is JS-implemented and we " - "don't support [Cached] or [StoreInSlot] " - "on JS-implemented interfaces" % - self.identifier.name, - [self.location, member.location]) + raise WebIDLError( + "Interface %s is JS-implemented and we " + "don't support [Cached] or [StoreInSlot] " + "on JS-implemented interfaces" % self.identifier.name, + [self.location, member.location], + ) if member.slotIndices is None: member.slotIndices = dict() member.slotIndices[self.identifier.name] = self.totalMembersInSlots @@ -1199,21 +1311,26 @@ def finish(self, scope): # we don't have to worry about anything other than our parent, because it # has already imported its ancestors' unforgeable attributes into its # member list. - for unforgeableMember in (member for member in self.parent.members if - (member.isAttr() or member.isMethod()) and - member.isUnforgeable()): - shadows = [m for m in self.members if - (m.isAttr() or m.isMethod()) and - not m.isStatic() and - m.identifier.name == unforgeableMember.identifier.name] + for unforgeableMember in ( + member + for member in self.parent.members + if (member.isAttr() or member.isMethod()) and member.isUnforgeable() + ): + shadows = [ + m + for m in self.members + if (m.isAttr() or m.isMethod()) + and not m.isStatic() + and m.identifier.name == unforgeableMember.identifier.name + ] if len(shadows) != 0: - locs = [unforgeableMember.location] + [s.location for s - in shadows] - raise WebIDLError("Interface %s shadows [Unforgeable] " - "members of %s" % - (self.identifier.name, - ancestor.identifier.name), - locs) + locs = [unforgeableMember.location] + [s.location for s in shadows] + raise WebIDLError( + "Interface %s shadows [Unforgeable] " + "members of %s" + % (self.identifier.name, ancestor.identifier.name), + locs, + ) # And now just stick it in our members, since we won't be # inheriting this down the proto chain. If we really cared we # could try to do something where we set up the unforgeable @@ -1229,8 +1346,9 @@ def finish(self, scope): testInterface = self isAncestor = False while testInterface: - self.maplikeOrSetlikeOrIterable.checkCollisions(testInterface.members, - isAncestor) + self.maplikeOrSetlikeOrIterable.checkCollisions( + testInterface.members, isAncestor + ) isAncestor = True testInterface = testInterface.parent @@ -1260,7 +1378,7 @@ def finish(self, scope): else: continue - if (memberType != "stringifiers" and memberType != "legacycallers"): + if memberType != "stringifiers" and memberType != "legacycallers": if member.isNamed(): memberType = "named " + memberType else: @@ -1268,10 +1386,14 @@ def finish(self, scope): memberType = "indexed " + memberType if memberType in specialMembersSeen: - raise WebIDLError("Multiple " + memberType + " on %s" % (self), - [self.location, - specialMembersSeen[memberType].location, - member.location]) + raise WebIDLError( + "Multiple " + memberType + " on %s" % (self), + [ + self.location, + specialMembersSeen[memberType].location, + member.location, + ], + ) specialMembersSeen[memberType] = member @@ -1281,7 +1403,8 @@ def finish(self, scope): raise WebIDLError( "Interface with [LegacyUnenumerableNamedProperties] does " "not have a named getter", - [self.location]) + [self.location], + ) ancestor = self.parent while ancestor: if ancestor.getExtendedAttribute("LegacyUnenumerableNamedProperties"): @@ -1289,7 +1412,8 @@ def finish(self, scope): "Interface with [LegacyUnenumerableNamedProperties] " "inherits from another interface with " "[LegacyUnenumerableNamedProperties]", - [self.location, ancestor.location]) + [self.location, ancestor.location], + ) ancestor = ancestor.parent if self._isOnGlobalProtoChain: @@ -1297,51 +1421,58 @@ def finish(self, scope): for memberType in ["setter", "deleter"]: memberId = "named " + memberType + "s" if memberId in specialMembersSeen: - raise WebIDLError("Interface with [Global] has a named %s" % - memberType, - [self.location, - specialMembersSeen[memberId].location]) + raise WebIDLError( + "Interface with [Global] has a named %s" % memberType, + [self.location, specialMembersSeen[memberId].location], + ) # Make sure we're not [OverrideBuiltins] if self.getExtendedAttribute("OverrideBuiltins"): - raise WebIDLError("Interface with [Global] also has " - "[OverrideBuiltins]", - [self.location]) + raise WebIDLError( + "Interface with [Global] also has " "[OverrideBuiltins]", + [self.location], + ) # Mark all of our ancestors as being on the global's proto chain too parent = self.parent while parent: # Must not inherit from an interface with [OverrideBuiltins] if parent.getExtendedAttribute("OverrideBuiltins"): - raise WebIDLError("Interface with [Global] inherits from " - "interface with [OverrideBuiltins]", - [self.location, parent.location]) + raise WebIDLError( + "Interface with [Global] inherits from " + "interface with [OverrideBuiltins]", + [self.location, parent.location], + ) parent._isOnGlobalProtoChain = True parent = parent.parent def validate(self): - def checkDuplicateNames(member, name, attributeName): for m in self.members: if m.identifier.name == name: - raise WebIDLError("[%s=%s] has same name as interface member" % - (attributeName, name), - [member.location, m.location]) + raise WebIDLError( + "[%s=%s] has same name as interface member" + % (attributeName, name), + [member.location, m.location], + ) if m.isMethod() and m != member and name in m.aliases: - raise WebIDLError("conflicting [%s=%s] definitions" % - (attributeName, name), - [member.location, m.location]) + raise WebIDLError( + "conflicting [%s=%s] definitions" % (attributeName, name), + [member.location, m.location], + ) if m.isAttr() and m != member and name in m.bindingAliases: - raise WebIDLError("conflicting [%s=%s] definitions" % - (attributeName, name), - [member.location, m.location]) + raise WebIDLError( + "conflicting [%s=%s] definitions" % (attributeName, name), + [member.location, m.location], + ) # We also don't support inheriting from unforgeable interfaces. if self.getExtendedAttribute("Unforgeable") and self.hasChildInterfaces(): - locations = ([self.location] + - list(i.location for i in - self.interfacesBasedOnSelf if i.parent == self)) - raise WebIDLError("%s is an unforgeable ancestor interface" % - self.identifier.name, - locations) + locations = [self.location] + list( + i.location for i in self.interfacesBasedOnSelf if i.parent == self + ) + raise WebIDLError( + "%s is an unforgeable ancestor interface" % self.identifier.name, + locations, + ) ctor = self.ctor() if ctor is not None: @@ -1355,50 +1486,57 @@ def checkDuplicateNames(member, name, attributeName): member.validate() if self.isCallback() and member.getExtendedAttribute("Replaceable"): - raise WebIDLError("[Replaceable] used on an attribute on " - "interface %s which is a callback interface" % - self.identifier.name, - [self.location, member.location]) + raise WebIDLError( + "[Replaceable] used on an attribute on " + "interface %s which is a callback interface" % self.identifier.name, + [self.location, member.location], + ) # Check that PutForwards refers to another attribute and that no # cycles exist in forwarded assignments. Also check for a # integer-typed "length" attribute. if member.isAttr(): - if (member.identifier.name == "length" and - member.type.isInteger()): + if member.identifier.name == "length" and member.type.isInteger(): hasLengthAttribute = True iface = self attr = member putForwards = attr.getExtendedAttribute("PutForwards") if putForwards and self.isCallback(): - raise WebIDLError("[PutForwards] used on an attribute " - "on interface %s which is a callback " - "interface" % self.identifier.name, - [self.location, member.location]) + raise WebIDLError( + "[PutForwards] used on an attribute " + "on interface %s which is a callback " + "interface" % self.identifier.name, + [self.location, member.location], + ) while putForwards is not None: forwardIface = attr.type.unroll().inner fowardAttr = None for forwardedMember in forwardIface.members: - if (not forwardedMember.isAttr() or - forwardedMember.identifier.name != putForwards[0]): + if ( + not forwardedMember.isAttr() + or forwardedMember.identifier.name != putForwards[0] + ): continue if forwardedMember == member: - raise WebIDLError("Cycle detected in forwarded " - "assignments for attribute %s on " - "%s" % - (member.identifier.name, self), - [member.location]) + raise WebIDLError( + "Cycle detected in forwarded " + "assignments for attribute %s on " + "%s" % (member.identifier.name, self), + [member.location], + ) fowardAttr = forwardedMember break if fowardAttr is None: - raise WebIDLError("Attribute %s on %s forwards to " - "missing attribute %s" % - (attr.identifier.name, iface, putForwards), - [attr.location]) + raise WebIDLError( + "Attribute %s on %s forwards to " + "missing attribute %s" + % (attr.identifier.name, iface, putForwards), + [attr.location], + ) iface = forwardIface attr = fowardAttr @@ -1412,29 +1550,39 @@ def checkDuplicateNames(member, name, attributeName): for alias in member.aliases: if self.isOnGlobalProtoChain(): - raise WebIDLError("[Alias] must not be used on a " - "[Global] interface operation", - [member.location]) - if (member.getExtendedAttribute("Exposed") or - member.getExtendedAttribute("ChromeOnly") or - member.getExtendedAttribute("Pref") or - member.getExtendedAttribute("Func") or - member.getExtendedAttribute("SecureContext")): - raise WebIDLError("[Alias] must not be used on a " - "conditionally exposed operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on a " + "[Global] interface operation", + [member.location], + ) + if ( + member.getExtendedAttribute("Exposed") + or member.getExtendedAttribute("ChromeOnly") + or member.getExtendedAttribute("Pref") + or member.getExtendedAttribute("Func") + or member.getExtendedAttribute("SecureContext") + ): + raise WebIDLError( + "[Alias] must not be used on a " + "conditionally exposed operation", + [member.location], + ) if member.isStatic(): - raise WebIDLError("[Alias] must not be used on a " - "static operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on a " "static operation", + [member.location], + ) if member.isIdentifierLess(): - raise WebIDLError("[Alias] must not be used on an " - "identifierless operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on an " + "identifierless operation", + [member.location], + ) if member.isUnforgeable(): - raise WebIDLError("[Alias] must not be used on an " - "[Unforgeable] operation", - [member.location]) + raise WebIDLError( + "[Alias] must not be used on an " "[Unforgeable] operation", + [member.location], + ) checkDuplicateNames(member, alias, "Alias") @@ -1444,16 +1592,18 @@ def checkDuplicateNames(member, name, attributeName): for bindingAlias in member.bindingAliases: checkDuplicateNames(member, bindingAlias, "BindingAlias") - # Conditional exposure makes no sense for interfaces with no # interface object. # And SecureContext makes sense for interfaces with no interface object, # since it is also propagated to interface members. - if (self.isExposedConditionally(exclusions=["SecureContext"]) and - not self.hasInterfaceObject()): - raise WebIDLError("Interface with no interface object is " - "exposed conditionally", - [self.location]) + if ( + self.isExposedConditionally(exclusions=["SecureContext"]) + and not self.hasInterfaceObject() + ): + raise WebIDLError( + "Interface with no interface object is " "exposed conditionally", + [self.location], + ) # Value iterators are only allowed on interfaces with indexed getters, # and pair iterators are only allowed on interfaces without indexed @@ -1462,32 +1612,38 @@ def checkDuplicateNames(member, name, attributeName): iterableDecl = self.maplikeOrSetlikeOrIterable if iterableDecl.isValueIterator(): if not indexedGetter: - raise WebIDLError("Interface with value iterator does not " - "support indexed properties", - [self.location, iterableDecl.location]) + raise WebIDLError( + "Interface with value iterator does not " + "support indexed properties", + [self.location, iterableDecl.location], + ) if iterableDecl.valueType != indexedGetter.signatures()[0][0]: - raise WebIDLError("Iterable type does not match indexed " - "getter type", - [iterableDecl.location, - indexedGetter.location]) + raise WebIDLError( + "Iterable type does not match indexed " "getter type", + [iterableDecl.location, indexedGetter.location], + ) if not hasLengthAttribute: - raise WebIDLError('Interface with value iterator does not ' - 'have an integer-typed "length" attribute', - [self.location, iterableDecl.location]) + raise WebIDLError( + "Interface with value iterator does not " + 'have an integer-typed "length" attribute', + [self.location, iterableDecl.location], + ) else: assert iterableDecl.isPairIterator() if indexedGetter: - raise WebIDLError("Interface with pair iterator supports " - "indexed properties", - [self.location, iterableDecl.location, - indexedGetter.location]) + raise WebIDLError( + "Interface with pair iterator supports " "indexed properties", + [self.location, iterableDecl.location, indexedGetter.location], + ) if indexedGetter and not hasLengthAttribute: - raise WebIDLError('Interface with an indexed getter does not have ' - 'an integer-typed "length" attribute', - [self.location, indexedGetter.location]) + raise WebIDLError( + "Interface with an indexed getter does not have " + 'an integer-typed "length" attribute', + [self.location, indexedGetter.location], + ) def setCallback(self, value): self._callback = value @@ -1500,15 +1656,25 @@ def isSingleOperationInterface(self): return ( # JS-implemented things should never need the # this-handling weirdness of single-operation interfaces. - not self.isJSImplemented() and + not self.isJSImplemented() + and # Not inheriting from another interface - not self.parent and + not self.parent + and # No attributes of any kinds - not any(m.isAttr() for m in self.members) and + not any(m.isAttr() for m in self.members) + and # There is at least one regular operation, and all regular # operations have the same identifier - len(set(m.identifier.name for m in self.members if - m.isMethod() and not m.isStatic())) == 1) + len( + set( + m.identifier.name + for m in self.members + if m.isMethod() and not m.isStatic() + ) + ) + == 1 + ) def inheritanceDepth(self): depth = 0 @@ -1527,11 +1693,14 @@ def hasInterfaceObject(self): return not hasattr(self, "_noInterfaceObject") def hasInterfacePrototypeObject(self): - return (not self.isCallback() and not self.isNamespace() - and self.getUserData('hasConcreteDescendant', False)) + return ( + not self.isCallback() + and not self.isNamespace() + and self.getUserData("hasConcreteDescendant", False) + ) def addIncludedMixin(self, includedMixin): - assert(isinstance(includedMixin, IDLInterfaceMixin)) + assert isinstance(includedMixin, IDLInterfaceMixin) self.includedMixins.add(includedMixin) def getInheritedInterfaces(self): @@ -1540,7 +1709,7 @@ def getInheritedInterfaces(self): (not including this interface itself). The list is in order from most derived to least derived. """ - assert(self._finished) + assert self._finished if not self.parent: return [] parentInterfaces = self.parent.getInheritedInterfaces() @@ -1601,16 +1770,29 @@ def _getDependentObjects(self): def hasMembersInSlots(self): return self._ownMembersInSlots != 0 - conditionExtendedAttributes = [ "Pref", "ChromeOnly", "Func", - "SecureContext" ] + conditionExtendedAttributes = ["Pref", "ChromeOnly", "Func", "SecureContext"] + def isExposedConditionally(self, exclusions=[]): - return any(((not a in exclusions) and self.getExtendedAttribute(a)) for a in self.conditionExtendedAttributes) + return any( + ((not a in exclusions) and self.getExtendedAttribute(a)) + for a in self.conditionExtendedAttributes + ) + class IDLInterface(IDLInterfaceOrNamespace): - def __init__(self, location, parentScope, name, parent, members, - isKnownNonPartial, classNameOverride=None): - IDLInterfaceOrNamespace.__init__(self, location, parentScope, name, - parent, members, isKnownNonPartial) + def __init__( + self, + location, + parentScope, + name, + parent, + members, + isKnownNonPartial, + classNameOverride=None, + ): + IDLInterfaceOrNamespace.__init__( + self, location, parentScope, name, parent, members, isKnownNonPartial + ) self.classNameOverride = classNameOverride def __str__(self): @@ -1630,22 +1812,28 @@ def addExtendedAttributes(self, attrs): # Special cased attrs if identifier == "TreatNonCallableAsNull": - raise WebIDLError("TreatNonCallableAsNull cannot be specified on interfaces", - [attr.location, self.location]) + raise WebIDLError( + "TreatNonCallableAsNull cannot be specified on interfaces", + [attr.location, self.location], + ) if identifier == "TreatNonObjectAsNull": - raise WebIDLError("TreatNonObjectAsNull cannot be specified on interfaces", - [attr.location, self.location]) + raise WebIDLError( + "TreatNonObjectAsNull cannot be specified on interfaces", + [attr.location, self.location], + ) elif identifier == "NoInterfaceObject": if not attr.noArguments(): - raise WebIDLError("[NoInterfaceObject] must take no arguments", - [attr.location]) + raise WebIDLError( + "[NoInterfaceObject] must take no arguments", [attr.location] + ) self._noInterfaceObject = True elif identifier == "NamedConstructor": if not attr.hasValue(): - raise WebIDLError("NamedConstructor must either take an identifier or take a named argument list", - [attr.location]) - + raise WebIDLError( + "NamedConstructor must either take an identifier or take a named argument list", + [attr.location], + ) args = attr.args() if attr.hasArgs() else [] @@ -1657,7 +1845,8 @@ def addExtendedAttributes(self, attrs): # Named constructors are always assumed to be able to # throw (since there's no way to indicate otherwise). method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Throws",))]) + [IDLExtendedAttribute(self.location, ("Throws",))] + ) # We need to detect conflicts for NamedConstructors across # interfaces. We first call resolve on the parentScope, @@ -1677,17 +1866,22 @@ def addExtendedAttributes(self, attrs): if newMethod == method: self.namedConstructors.append(method) elif newMethod not in self.namedConstructors: - raise WebIDLError("NamedConstructor conflicts with a " - "NamedConstructor of a different interface", - [method.location, newMethod.location]) - elif (identifier == "ExceptionClass"): + raise WebIDLError( + "NamedConstructor conflicts with a " + "NamedConstructor of a different interface", + [method.location, newMethod.location], + ) + elif identifier == "ExceptionClass": if not attr.noArguments(): - raise WebIDLError("[ExceptionClass] must take no arguments", - [attr.location]) + raise WebIDLError( + "[ExceptionClass] must take no arguments", [attr.location] + ) if self.parent: - raise WebIDLError("[ExceptionClass] must not be specified on " - "an interface with inherited interfaces", - [attr.location, self.location]) + raise WebIDLError( + "[ExceptionClass] must not be specified on " + "an interface with inherited interfaces", + [attr.location, self.location], + ) elif identifier == "Global": if attr.hasValue(): self.globalNames = [attr.value()] @@ -1695,8 +1889,9 @@ def addExtendedAttributes(self, attrs): self.globalNames = attr.args() else: self.globalNames = [self.identifier.name] - self.parentScope.addIfaceGlobalNames(self.identifier.name, - self.globalNames) + self.parentScope.addIfaceGlobalNames( + self.identifier.name, self.globalNames + ) self._isOnGlobalProtoChain = True elif identifier == "LegacyWindowAlias": if attr.hasValue(): @@ -1704,58 +1899,71 @@ def addExtendedAttributes(self, attrs): elif attr.hasArgs(): self.legacyWindowAliases = attr.args() else: - raise WebIDLError("[%s] must either take an identifier " - "or take an identifier list" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must either take an identifier " + "or take an identifier list" % identifier, + [attr.location], + ) for alias in self.legacyWindowAliases: unresolved = IDLUnresolvedIdentifier(attr.location, alias) IDLObjectWithIdentifier(attr.location, self.parentScope, unresolved) elif identifier == "SecureContext": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) # This gets propagated to all our members. for member in self.members: if member.getExtendedAttribute("SecureContext"): - raise WebIDLError("[SecureContext] specified on both " - "an interface member and on the " - "interface itself", - [member.location, attr.location]) + raise WebIDLError( + "[SecureContext] specified on both " + "an interface member and on the " + "interface itself", + [member.location, attr.location], + ) member.addExtendedAttributes([attr]) - elif (identifier == "NeedResolve" or - identifier == "OverrideBuiltins" or - identifier == "ChromeOnly" or - identifier == "Unforgeable" or - identifier == "LegacyEventInit" or - identifier == "ProbablyShortLivingWrapper" or - identifier == "LegacyUnenumerableNamedProperties" or - identifier == "RunConstructorInCallerCompartment" or - identifier == "WantsEventListenerHooks" or - identifier == "Serializable"): + elif ( + identifier == "NeedResolve" + or identifier == "OverrideBuiltins" + or identifier == "ChromeOnly" + or identifier == "Unforgeable" + or identifier == "LegacyEventInit" + or identifier == "ProbablyShortLivingWrapper" + or identifier == "LegacyUnenumerableNamedProperties" + or identifier == "RunConstructorInCallerCompartment" + or identifier == "WantsEventListenerHooks" + or identifier == "Serializable" + ): # Known extended attributes that do not take values if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) elif identifier == "Exposed": - convertExposedAttrToGlobalNameSet(attr, - self._exposureGlobalNames) - elif (identifier == "Pref" or - identifier == "JSImplementation" or - identifier == "HeaderFile" or - identifier == "Func" or - identifier == "Deprecated"): + convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) + elif ( + identifier == "Pref" + or identifier == "JSImplementation" + or identifier == "HeaderFile" + or identifier == "Func" + or identifier == "Deprecated" + ): # Known extended attributes that take a string value if not attr.hasValue(): - raise WebIDLError("[%s] must have a value" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must have a value" % identifier, [attr.location] + ) elif identifier == "InstrumentedProps": # Known extended attributes that take a list if not attr.hasArgs(): - raise WebIDLError("[%s] must have arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must have arguments" % identifier, [attr.location] + ) else: - raise WebIDLError("Unknown extended attribute %s on interface" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on interface" % identifier, + [attr.location], + ) attrlist = attr.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -1768,7 +1976,8 @@ def validate(self): "interface. Per spec, that means the object should not be " "serializable, so chances are someone made a mistake here " "somewhere.", - [self.location, self.parent.location]) + [self.location, self.parent.location], + ) def isSerializable(self): return self.getExtendedAttribute("Serializable") @@ -1787,8 +1996,9 @@ def setNonPartial(self, location, parent, members): class IDLNamespace(IDLInterfaceOrNamespace): def __init__(self, location, parentScope, name, members, isKnownNonPartial): - IDLInterfaceOrNamespace.__init__(self, location, parentScope, name, - None, members, isKnownNonPartial) + IDLInterfaceOrNamespace.__init__( + self, location, parentScope, name, None, members, isKnownNonPartial + ) def __str__(self): return "Namespace '%s'" % self.identifier.name @@ -1805,30 +2015,34 @@ def addExtendedAttributes(self, attrs): identifier = attr.identifier() if identifier == "Exposed": - convertExposedAttrToGlobalNameSet(attr, - self._exposureGlobalNames) + convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) elif identifier == "ClassString": # Takes a string value to override the default "Object" if # desired. if not attr.hasValue(): - raise WebIDLError("[%s] must have a value" % identifier, - [attr.location]) - elif (identifier == "ProtoObjectHack" or - identifier == "ChromeOnly"): + raise WebIDLError( + "[%s] must have a value" % identifier, [attr.location] + ) + elif identifier == "ProtoObjectHack" or identifier == "ChromeOnly": if not attr.noArguments(): - raise WebIDLError("[%s] must not have arguments" % identifier, - [attr.location]) - elif (identifier == "Pref" or - identifier == "HeaderFile" or - identifier == "Func"): + raise WebIDLError( + "[%s] must not have arguments" % identifier, [attr.location] + ) + elif ( + identifier == "Pref" + or identifier == "HeaderFile" + or identifier == "Func" + ): # Known extended attributes that take a string value if not attr.hasValue(): - raise WebIDLError("[%s] must have a value" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must have a value" % identifier, [attr.location] + ) else: - raise WebIDLError("Unknown extended attribute %s on namespace" % - identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on namespace" % identifier, + [attr.location], + ) attrlist = attr.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -1864,8 +2078,9 @@ def canBeEmpty(self): Returns true if this dictionary can be empty (that is, it has no required members and neither do any of its ancestors). """ - return (all(member.optional for member in self.members) and - (not self.parent or self.parent.canBeEmpty())) + return all(member.optional for member in self.members) and ( + not self.parent or self.parent.canBeEmpty() + ) def finish(self, scope): if self._finished: @@ -1878,9 +2093,11 @@ def finish(self, scope): oldParent = self.parent self.parent = self.parent.finish(scope) if not isinstance(self.parent, IDLDictionary): - raise WebIDLError("Dictionary %s has parent that is not a dictionary" % - self.identifier.name, - [oldParent.location, self.parent.location]) + raise WebIDLError( + "Dictionary %s has parent that is not a dictionary" + % self.identifier.name, + [oldParent.location, self.parent.location], + ) # Make sure the parent resolves all its members before we start # looking at them. @@ -1904,9 +2121,10 @@ def finish(self, scope): ancestor = self.parent while ancestor: if ancestor == self: - raise WebIDLError("Dictionary %s has itself as an ancestor" % - self.identifier.name, - [self.identifier.location]) + raise WebIDLError( + "Dictionary %s has itself as an ancestor" % self.identifier.name, + [self.identifier.location], + ) inheritedMembers.extend(ancestor.members) ancestor = ancestor.parent @@ -1914,9 +2132,11 @@ def finish(self, scope): for inheritedMember in inheritedMembers: for member in self.members: if member.identifier.name == inheritedMember.identifier.name: - raise WebIDLError("Dictionary %s has two members with name %s" % - (self.identifier.name, member.identifier.name), - [member.location, inheritedMember.location]) + raise WebIDLError( + "Dictionary %s has two members with name %s" + % (self.identifier.name, member.identifier.name), + [member.location, inheritedMember.location], + ) def validate(self): def typeContainsDictionary(memberType, dictionary): @@ -1934,17 +2154,20 @@ def typeContainsDictionary(memberType, dictionary): None, if the boolean value in the first element is False. """ - if (memberType.nullable() or - memberType.isSequence() or - memberType.isRecord()): + if ( + memberType.nullable() + or memberType.isSequence() + or memberType.isRecord() + ): return typeContainsDictionary(memberType.inner, dictionary) if memberType.isDictionary(): if memberType.inner == dictionary: return (True, [memberType.location]) - (contains, locations) = dictionaryContainsDictionary(memberType.inner, - dictionary) + (contains, locations) = dictionaryContainsDictionary( + memberType.inner, dictionary + ) if contains: return (True, [memberType.location] + locations) @@ -1966,7 +2189,9 @@ def dictionaryContainsDictionary(dictMember, dictionary): if dictMember.parent == dictionary: return (True, [dictMember.location]) else: - (contains, locations) = dictionaryContainsDictionary(dictMember.parent, dictionary) + (contains, locations) = dictionaryContainsDictionary( + dictMember.parent, dictionary + ) if contains: return (True, [dictMember.location] + locations) @@ -1974,14 +2199,18 @@ def dictionaryContainsDictionary(dictMember, dictionary): for member in self.members: if member.type.isDictionary() and member.type.nullable(): - raise WebIDLError("Dictionary %s has member with nullable " - "dictionary type" % self.identifier.name, - [member.location]) + raise WebIDLError( + "Dictionary %s has member with nullable " + "dictionary type" % self.identifier.name, + [member.location], + ) (contains, locations) = typeContainsDictionary(member.type, self) if contains: - raise WebIDLError("Dictionary %s has member with itself as type." % - self.identifier.name, - [member.location] + locations) + raise WebIDLError( + "Dictionary %s has member with itself as type." + % self.identifier.name, + [member.location] + locations, + ) def getExtendedAttribute(self, name): return self._extendedAttrDict.get(name, None) @@ -1990,31 +2219,35 @@ def addExtendedAttributes(self, attrs): for attr in attrs: identifier = attr.identifier() - if (identifier == "GenerateInitFromJSON" or - identifier == "GenerateInit"): + if identifier == "GenerateInitFromJSON" or identifier == "GenerateInit": if not attr.noArguments(): - raise WebIDLError("[%s] must not have arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must not have arguments" % identifier, [attr.location] + ) self.needsConversionFromJS = True - elif (identifier == "GenerateConversionToJS" or - identifier == "GenerateToJSON"): + elif ( + identifier == "GenerateConversionToJS" or identifier == "GenerateToJSON" + ): if not attr.noArguments(): - raise WebIDLError("[%s] must not have arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must not have arguments" % identifier, [attr.location] + ) # ToJSON methods require to-JS conversion, because we # implement ToJSON by converting to a JS object and # then using JSON.stringify. self.needsConversionToJS = True else: - raise WebIDLError("[%s] extended attribute not allowed on " - "dictionaries" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] extended attribute not allowed on " + "dictionaries" % identifier, + [attr.location], + ) self._extendedAttrDict[identifier] = True def _getDependentObjects(self): deps = set(self.members) - if (self.parent): + if self.parent: deps.add(self.parent) return deps @@ -2029,8 +2262,9 @@ def __init__(self, location, parentScope, name, values): assert isinstance(name, IDLUnresolvedIdentifier) if len(values) != len(set(values)): - raise WebIDLError("Enum %s has multiple identical strings" % name.name, - [location]) + raise WebIDLError( + "Enum %s has multiple identical strings" % name.name, [location] + ) IDLObjectWithIdentifier.__init__(self, location, parentScope, name) self._values = values @@ -2049,9 +2283,10 @@ def isEnum(self): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on enums", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " "allowed on enums", + [attrs[0].location, self.location], + ) def _getDependentObjects(self): return set() @@ -2060,40 +2295,40 @@ def _getDependentObjects(self): class IDLType(IDLObject): Tags = enum( # The integer types - 'int8', - 'uint8', - 'int16', - 'uint16', - 'int32', - 'uint32', - 'int64', - 'uint64', + "int8", + "uint8", + "int16", + "uint16", + "int32", + "uint32", + "int64", + "uint64", # Additional primitive types - 'bool', - 'unrestricted_float', - 'float', - 'unrestricted_double', + "bool", + "unrestricted_float", + "float", + "unrestricted_double", # "double" last primitive type to match IDLBuiltinType - 'double', + "double", # Other types - 'any', - 'domstring', - 'bytestring', - 'usvstring', - 'utf8string', - 'jsstring', - 'object', - 'void', + "any", + "domstring", + "bytestring", + "usvstring", + "utf8string", + "jsstring", + "object", + "void", # Funny stuff - 'interface', - 'dictionary', - 'enum', - 'callback', - 'union', - 'sequence', - 'record', - 'promise', - ) + "interface", + "dictionary", + "enum", + "callback", + "union", + "sequence", + "record", + "promise", + ) def __init__(self, location, name): IDLObject.__init__(self, location) @@ -2106,15 +2341,25 @@ def __init__(self, location, name): self._extendedAttrDict = {} def __hash__(self): - return (hash(self.builtin) + hash(self.name) + hash(self._clamp) + - hash(self._enforceRange) + hash(self.treatNullAsEmpty) + - hash(self._allowShared)) + return ( + hash(self.builtin) + + hash(self.name) + + hash(self._clamp) + + hash(self._enforceRange) + + hash(self.treatNullAsEmpty) + + hash(self._allowShared) + ) def __eq__(self, other): - return (other and self.builtin == other.builtin and self.name == other.name and - self._clamp == other.hasClamp() and self._enforceRange == other.hasEnforceRange() and - self.treatNullAsEmpty == other.treatNullAsEmpty and - self._allowShared == other.hasAllowShared()) + return ( + other + and self.builtin == other.builtin + and self.name == other.name + and self._clamp == other.hasClamp() + and self._enforceRange == other.hasEnforceRange() + and self.treatNullAsEmpty == other.treatNullAsEmpty + and self._allowShared == other.hasAllowShared() + ) def __ne__(self, other): return not self == other @@ -2194,17 +2439,16 @@ def isNonCallbackInterface(self): return False def isGeckoInterface(self): - """ Returns a boolean indicating whether this type is an 'interface' - type that is implemented in Gecko. At the moment, this returns - true for all interface types that are not types from the TypedArray - spec.""" + """Returns a boolean indicating whether this type is an 'interface' + type that is implemented in Gecko. At the moment, this returns + true for all interface types that are not types from the TypedArray + spec.""" return self.isInterface() and not self.isSpiderMonkeyInterface() def isSpiderMonkeyInterface(self): - """ Returns a boolean indicating whether this type is an 'interface' - type that is implemented in SpiderMonkey. """ - return self.isInterface() and (self.isBufferSource() or - self.isReadableStream()) + """Returns a boolean indicating whether this type is an 'interface' + type that is implemented in SpiderMonkey.""" + return self.isInterface() and (self.isBufferSource() or self.isReadableStream()) def isDictionary(self): return False @@ -2259,8 +2503,10 @@ def treatNonObjectAsNull(self): def withExtendedAttributes(self, attrs): if len(attrs) > 0: - raise WebIDLError("Extended attributes on types only supported for builtins", - [attrs[0].location, self.location]) + raise WebIDLError( + "Extended attributes on types only supported for builtins", + [attrs[0].location, self.location], + ) return self def getExtendedAttribute(self, name): @@ -2273,8 +2519,10 @@ def unroll(self): return self def isDistinguishableFrom(self, other): - raise TypeError("Can't tell whether a generic type is or is not " - "distinguishable from other things") + raise TypeError( + "Can't tell whether a generic type is or is not " + "distinguishable from other things" + ) def isExposedInAllOf(self, exposureSet): return True @@ -2282,7 +2530,7 @@ def isExposedInAllOf(self, exposureSet): class IDLUnresolvedType(IDLType): """ - Unresolved types are interface types + Unresolved types are interface types """ def __init__(self, location, name, attrs=[]): @@ -2297,8 +2545,7 @@ def complete(self, scope): try: obj = scope._lookupIdentifier(self.name) except: - raise WebIDLError("Unresolved type '%s'." % self.name, - [self.location]) + raise WebIDLError("Unresolved type '%s'." % self.name, [self.location]) assert obj if obj.isType(): @@ -2306,10 +2553,11 @@ def complete(self, scope): assert not obj.isType() if obj.isTypedef(): assert self.name.name == obj.identifier.name - typedefType = IDLTypedefType(self.location, obj.innerType, - obj.identifier) + typedefType = IDLTypedefType(self.location, obj.innerType, obj.identifier) assert not typedefType.isComplete() - return typedefType.complete(scope).withExtendedAttributes(self.extraTypeAttributes) + return typedefType.complete(scope).withExtendedAttributes( + self.extraTypeAttributes + ) elif obj.isCallback() and not obj.isInterface(): assert self.name.name == obj.identifier.name return IDLCallbackType(self.location, obj) @@ -2321,8 +2569,10 @@ def withExtendedAttributes(self, attrs): return IDLUnresolvedType(self.location, self.name, attrs) def isDistinguishableFrom(self, other): - raise TypeError("Can't tell whether an unresolved type is or is not " - "distinguishable from other things") + raise TypeError( + "Can't tell whether an unresolved type is or is not " + "distinguishable from other things" + ) class IDLParametrizedType(IDLType): @@ -2477,27 +2727,36 @@ def complete(self, scope): assert self.inner.isComplete() if self.inner.nullable(): - raise WebIDLError("The inner type of a nullable type must not be " - "a nullable type", - [self.location, self.inner.location]) + raise WebIDLError( + "The inner type of a nullable type must not be " "a nullable type", + [self.location, self.inner.location], + ) if self.inner.isUnion(): if self.inner.hasNullableType: - raise WebIDLError("The inner type of a nullable type must not " - "be a union type that itself has a nullable " - "type as a member type", [self.location]) + raise WebIDLError( + "The inner type of a nullable type must not " + "be a union type that itself has a nullable " + "type as a member type", + [self.location], + ) if self.inner.isDOMString(): if self.inner.treatNullAsEmpty: - raise WebIDLError("[TreatNullAs] not allowed on a nullable DOMString", - [self.location, self.inner.location]) + raise WebIDLError( + "[TreatNullAs] not allowed on a nullable DOMString", + [self.location, self.inner.location], + ) self.name = self.inner.name + "OrNull" return self def isDistinguishableFrom(self, other): - if (other.nullable() or - other.isDictionary() or - (other.isUnion() and - (other.hasNullableType or other.hasDictionaryType()))): + if ( + other.nullable() + or other.isDictionary() + or ( + other.isUnion() and (other.hasNullableType or other.hasDictionaryType()) + ) + ): # Can't tell which type null should become return False return self.inner.isDistinguishableFrom(other) @@ -2588,9 +2847,15 @@ def isDistinguishableFrom(self, other): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isInterface() or other.isDictionary() or - other.isCallback() or other.isRecord()) + return ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isInterface() + or other.isDictionary() + or other.isCallback() + or other.isRecord() + ) class IDLRecordType(IDLParametrizedType): @@ -2645,8 +2910,13 @@ def isDistinguishableFrom(self, other): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isNonCallbackInterface() or other.isSequence()) + return ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isNonCallbackInterface() + or other.isSequence() + ) def isExposedInAllOf(self, exposureSet): return self.inner.unroll().isExposedInAllOf(exposureSet) @@ -2717,36 +2987,45 @@ def typeName(type): while i < len(self.flatMemberTypes): if self.flatMemberTypes[i].nullable(): if self.hasNullableType: - raise WebIDLError("Can't have more than one nullable types in a union", - [nullableType.location, self.flatMemberTypes[i].location]) + raise WebIDLError( + "Can't have more than one nullable types in a union", + [nullableType.location, self.flatMemberTypes[i].location], + ) if self.hasDictionaryType(): - raise WebIDLError("Can't have a nullable type and a " - "dictionary type in a union", - [self._dictionaryType.location, - self.flatMemberTypes[i].location]) + raise WebIDLError( + "Can't have a nullable type and a " + "dictionary type in a union", + [ + self._dictionaryType.location, + self.flatMemberTypes[i].location, + ], + ) self.hasNullableType = True nullableType = self.flatMemberTypes[i] self.flatMemberTypes[i] = self.flatMemberTypes[i].inner continue if self.flatMemberTypes[i].isDictionary(): if self.hasNullableType: - raise WebIDLError("Can't have a nullable type and a " - "dictionary type in a union", - [nullableType.location, - self.flatMemberTypes[i].location]) + raise WebIDLError( + "Can't have a nullable type and a " + "dictionary type in a union", + [nullableType.location, self.flatMemberTypes[i].location], + ) self._dictionaryType = self.flatMemberTypes[i] elif self.flatMemberTypes[i].isUnion(): - self.flatMemberTypes[i:i + 1] = self.flatMemberTypes[i].memberTypes + self.flatMemberTypes[i : i + 1] = self.flatMemberTypes[i].memberTypes continue i += 1 for (i, t) in enumerate(self.flatMemberTypes[:-1]): - for u in self.flatMemberTypes[i + 1:]: + for u in self.flatMemberTypes[i + 1 :]: if not t.isDistinguishableFrom(u): - raise WebIDLError("Flat member types of a union should be " - "distinguishable, " + str(t) + " is not " - "distinguishable from " + str(u), - [self.location, t.location, u.location]) + raise WebIDLError( + "Flat member types of a union should be " + "distinguishable, " + str(t) + " is not " + "distinguishable from " + str(u), + [self.location, t.location, u.location], + ) return self @@ -2768,8 +3047,10 @@ def isDistinguishableFrom(self, other): def isExposedInAllOf(self, exposureSet): # We could have different member types in different globals. Just make sure that each thing in exposureSet has one of our member types exposed in it. for globalName in exposureSet: - if not any(t.unroll().isExposedInAllOf(set([globalName])) for t - in self.flatMemberTypes): + if not any( + t.unroll().isExposedInAllOf(set([globalName])) + for t in self.flatMemberTypes + ): return False return True @@ -2777,8 +3058,9 @@ def hasDictionaryType(self): return self._dictionaryType is not None def hasPossiblyEmptyDictionaryType(self): - return (self._dictionaryType is not None and - self._dictionaryType.inner.canBeEmpty()) + return ( + self._dictionaryType is not None and self._dictionaryType.inner.canBeEmpty() + ) def _getDependentObjects(self): return set(self.memberTypes) @@ -2889,7 +3171,9 @@ def _getDependentObjects(self): return self.inner._getDependentObjects() def withExtendedAttributes(self, attrs): - return IDLTypedefType(self.location, self.inner.withExtendedAttributes(attrs), self.name) + return IDLTypedefType( + self.location, self.inner.withExtendedAttributes(attrs), self.name + ) class IDLTypedef(IDLObjectWithIdentifier): @@ -2915,9 +3199,10 @@ def isTypedef(self): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on typedefs", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " "allowed on typedefs", + [attrs[0].location, self.location], + ) def _getDependentObjects(self): return self.innerType._getDependentObjects() @@ -2934,9 +3219,11 @@ def __hash__(self): return hash(self._identifier) + hash(self.builtin) def __eq__(self, other): - return (isinstance(other, IDLWrapperType) and - self._identifier == other._identifier and - self.builtin == other.builtin) + return ( + isinstance(other, IDLWrapperType) + and self._identifier == other._identifier + and self.builtin == other.builtin + ) def __str__(self): return str(self.name) + " (Wrapper)" @@ -2975,8 +3262,9 @@ def isDictionary(self): return isinstance(self.inner, IDLDictionary) def isInterface(self): - return (isinstance(self.inner, IDLInterface) or - isinstance(self.inner, IDLExternalInterface)) + return isinstance(self.inner, IDLInterface) or isinstance( + self.inner, IDLExternalInterface + ) def isCallbackInterface(self): return self.isInterface() and self.inner.isCallback() @@ -3007,8 +3295,11 @@ def isJSONType(self): dictionary = dictionary.parent return True else: - raise WebIDLError("IDLWrapperType wraps type %s that we don't know if " - "is serializable" % type(self.inner), [self.location]) + raise WebIDLError( + "IDLWrapperType wraps type %s that we don't know if " + "is serializable" % type(self.inner), + [self.location], + ) def resolveType(self, parentScope): assert isinstance(parentScope, IDLScope) @@ -3035,13 +3326,23 @@ def isDistinguishableFrom(self, other): return other.isDistinguishableFrom(self) assert self.isInterface() or self.isEnum() or self.isDictionary() if self.isEnum(): - return (other.isPrimitive() or other.isInterface() or other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) + return ( + other.isPrimitive() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ) if self.isDictionary() and other.nullable(): return False - if (other.isPrimitive() or other.isString() or other.isEnum() or - other.isSequence()): + if ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isSequence() + ): return True if self.isDictionary(): return other.isNonCallbackInterface() @@ -3054,12 +3355,11 @@ def isDistinguishableFrom(self, other): assert self.isGeckoInterface() and other.isGeckoInterface() if self.inner.isExternal() or other.unroll().inner.isExternal(): return self != other - return (len(self.inner.interfacesBasedOnSelf & - other.unroll().inner.interfacesBasedOnSelf) == 0 and - (self.isNonCallbackInterface() or - other.isNonCallbackInterface())) - if (other.isDictionary() or other.isCallback() or - other.isRecord()): + return len( + self.inner.interfacesBasedOnSelf + & other.unroll().inner.interfacesBasedOnSelf + ) == 0 and (self.isNonCallbackInterface() or other.isNonCallbackInterface()) + if other.isDictionary() or other.isCallback() or other.isRecord(): return self.isNonCallbackInterface() # Not much else |other| can be @@ -3110,8 +3410,10 @@ def __hash__(self): return hash(self.promiseInnerType()) def __eq__(self, other): - return (isinstance(other, IDLPromiseType) and - self.promiseInnerType() == other.promiseInnerType()) + return ( + isinstance(other, IDLPromiseType) + and self.promiseInnerType() == other.promiseInnerType() + ) def __str__(self): return self.inner.__str__() + "Promise" @@ -3151,44 +3453,44 @@ class IDLBuiltinType(IDLType): Types = enum( # The integer types - 'byte', - 'octet', - 'short', - 'unsigned_short', - 'long', - 'unsigned_long', - 'long_long', - 'unsigned_long_long', + "byte", + "octet", + "short", + "unsigned_short", + "long", + "unsigned_long", + "long_long", + "unsigned_long_long", # Additional primitive types - 'boolean', - 'unrestricted_float', - 'float', - 'unrestricted_double', + "boolean", + "unrestricted_float", + "float", + "unrestricted_double", # IMPORTANT: "double" must be the last primitive type listed - 'double', + "double", # Other types - 'any', - 'domstring', - 'bytestring', - 'usvstring', - 'utf8string', - 'jsstring', - 'object', - 'void', + "any", + "domstring", + "bytestring", + "usvstring", + "utf8string", + "jsstring", + "object", + "void", # Funny stuff - 'ArrayBuffer', - 'ArrayBufferView', - 'Int8Array', - 'Uint8Array', - 'Uint8ClampedArray', - 'Int16Array', - 'Uint16Array', - 'Int32Array', - 'Uint32Array', - 'Float32Array', - 'Float64Array', - 'ReadableStream', - ) + "ArrayBuffer", + "ArrayBufferView", + "Int8Array", + "Uint8Array", + "Uint8ClampedArray", + "Int16Array", + "Uint16Array", + "Int32Array", + "Uint32Array", + "Float32Array", + "Float64Array", + "ReadableStream", + ) TagLookup = { Types.byte: IDLType.Tags.int8, @@ -3244,8 +3546,8 @@ class IDLBuiltinType(IDLType): Types.domstring: "DOMString", Types.bytestring: "ByteString", Types.usvstring: "USVString", - Types.utf8string: "USVString", # That's what it is in spec terms - Types.jsstring: "USVString", # Again, that's what it is in spec terms + Types.utf8string: "USVString", # That's what it is in spec terms + Types.jsstring: "USVString", # Again, that's what it is in spec terms Types.object: "object", Types.void: "void", Types.ArrayBuffer: "ArrayBuffer", @@ -3262,8 +3564,17 @@ class IDLBuiltinType(IDLType): Types.ReadableStream: "ReadableStream", } - def __init__(self, location, name, type, clamp=False, enforceRange=False, treatNullAsEmpty=False, - allowShared=False, attrLocation=[]): + def __init__( + self, + location, + name, + type, + clamp=False, + enforceRange=False, + treatNullAsEmpty=False, + allowShared=False, + attrLocation=[], + ): """ The mutually exclusive clamp/enforceRange/treatNullAsEmpty/allowShared arguments are used to create instances of this type with the appropriate attributes attached. Use .clamped(), @@ -3277,7 +3588,7 @@ def __init__(self, location, name, type, clamp=False, enforceRange=False, treatN self._clamped = None self._rangeEnforced = None self._withTreatNullAs = None - self._withAllowShared = None; + self._withAllowShared = None if self.isInteger(): if clamp: self._clamp = True @@ -3288,7 +3599,9 @@ def __init__(self, location, name, type, clamp=False, enforceRange=False, treatN self.name = "RangeEnforced" + self.name self._extendedAttrDict["EnforceRange"] = True elif clamp or enforceRange: - raise WebIDLError("Non-integer types cannot be [Clamp] or [EnforceRange]", attrLocation) + raise WebIDLError( + "Non-integer types cannot be [Clamp] or [EnforceRange]", attrLocation + ) if self.isDOMString() or self.isUTF8String(): if treatNullAsEmpty: self.treatNullAsEmpty = True @@ -3301,7 +3614,10 @@ def __init__(self, location, name, type, clamp=False, enforceRange=False, treatN self._allowShared = True self._extendedAttrDict["AllowShared"] = True elif allowShared: - raise WebIDLError("Types that are not buffer source types cannot be [AllowShared]", attrLocation) + raise WebIDLError( + "Types that are not buffer source types cannot be [AllowShared]", + attrLocation, + ) def __str__(self): if self._allowShared: @@ -3314,30 +3630,46 @@ def prettyName(self): def clamped(self, attrLocation): if not self._clamped: - self._clamped = IDLBuiltinType(self.location, self.name, - self._typeTag, clamp=True, - attrLocation=attrLocation) + self._clamped = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + clamp=True, + attrLocation=attrLocation, + ) return self._clamped def rangeEnforced(self, attrLocation): if not self._rangeEnforced: - self._rangeEnforced = IDLBuiltinType(self.location, self.name, - self._typeTag, enforceRange=True, - attrLocation=attrLocation) + self._rangeEnforced = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + enforceRange=True, + attrLocation=attrLocation, + ) return self._rangeEnforced def withTreatNullAs(self, attrLocation): if not self._withTreatNullAs: - self._withTreatNullAs = IDLBuiltinType(self.location, self.name, - self._typeTag, treatNullAsEmpty=True, - attrLocation=attrLocation) + self._withTreatNullAs = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + treatNullAsEmpty=True, + attrLocation=attrLocation, + ) return self._withTreatNullAs def withAllowShared(self, attrLocation): if not self._withAllowShared: - self._withAllowShared = IDLBuiltinType(self.location, self.name, - self._typeTag, allowShared=True, - attrLocation=attrLocation) + self._withAllowShared = IDLBuiltinType( + self.location, + self.name, + self._typeTag, + allowShared=True, + attrLocation=attrLocation, + ) return self._withAllowShared def isPrimitive(self): @@ -3350,11 +3682,13 @@ def isNumeric(self): return self.isPrimitive() and not self.isBoolean() def isString(self): - return (self._typeTag == IDLBuiltinType.Types.domstring or - self._typeTag == IDLBuiltinType.Types.bytestring or - self._typeTag == IDLBuiltinType.Types.usvstring or - self._typeTag == IDLBuiltinType.Types.utf8string or - self._typeTag == IDLBuiltinType.Types.jsstring) + return ( + self._typeTag == IDLBuiltinType.Types.domstring + or self._typeTag == IDLBuiltinType.Types.bytestring + or self._typeTag == IDLBuiltinType.Types.usvstring + or self._typeTag == IDLBuiltinType.Types.utf8string + or self._typeTag == IDLBuiltinType.Types.jsstring + ) def isByteString(self): return self._typeTag == IDLBuiltinType.Types.bytestring @@ -3381,8 +3715,10 @@ def isArrayBufferView(self): return self._typeTag == IDLBuiltinType.Types.ArrayBufferView def isTypedArray(self): - return (self._typeTag >= IDLBuiltinType.Types.Int8Array and - self._typeTag <= IDLBuiltinType.Types.Float64Array) + return ( + self._typeTag >= IDLBuiltinType.Types.Int8Array + and self._typeTag <= IDLBuiltinType.Types.Float64Array + ) def isReadableStream(self): return self._typeTag == IDLBuiltinType.Types.ReadableStream @@ -3391,25 +3727,31 @@ def isInterface(self): # TypedArray things are interface types per the TypedArray spec, # but we handle them as builtins because SpiderMonkey implements # all of it internally. - return (self.isArrayBuffer() or - self.isArrayBufferView() or - self.isTypedArray() or - self.isReadableStream()) + return ( + self.isArrayBuffer() + or self.isArrayBufferView() + or self.isTypedArray() + or self.isReadableStream() + ) def isNonCallbackInterface(self): # All the interfaces we can be are non-callback return self.isInterface() def isFloat(self): - return (self._typeTag == IDLBuiltinType.Types.float or - self._typeTag == IDLBuiltinType.Types.double or - self._typeTag == IDLBuiltinType.Types.unrestricted_float or - self._typeTag == IDLBuiltinType.Types.unrestricted_double) + return ( + self._typeTag == IDLBuiltinType.Types.float + or self._typeTag == IDLBuiltinType.Types.double + or self._typeTag == IDLBuiltinType.Types.unrestricted_float + or self._typeTag == IDLBuiltinType.Types.unrestricted_double + ) def isUnrestricted(self): assert self.isFloat() - return (self._typeTag == IDLBuiltinType.Types.unrestricted_float or - self._typeTag == IDLBuiltinType.Types.unrestricted_double) + return ( + self._typeTag == IDLBuiltinType.Types.unrestricted_float + or self._typeTag == IDLBuiltinType.Types.unrestricted_double + ) def isJSONType(self): return self.isPrimitive() or self.isString() or self.isObject() @@ -3427,20 +3769,39 @@ def isDistinguishableFrom(self, other): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) if self.isBoolean(): - return (other.isNumeric() or other.isString() or other.isEnum() or - other.isInterface() or other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) + return ( + other.isNumeric() + or other.isString() + or other.isEnum() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ) if self.isNumeric(): - return (other.isBoolean() or other.isString() or other.isEnum() or - other.isInterface() or other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) + return ( + other.isBoolean() + or other.isString() + or other.isEnum() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ) if self.isString(): - return (other.isPrimitive() or other.isInterface() or - other.isObject() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord()) + return ( + other.isPrimitive() + or other.isInterface() + or other.isObject() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + ) if self.isAny(): # Can't tell "any" apart from anything return False @@ -3451,23 +3812,41 @@ def isDistinguishableFrom(self, other): # Not much else we could be! assert self.isSpiderMonkeyInterface() # Like interfaces, but we know we're not a callback - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isCallback() or other.isDictionary() or - other.isSequence() or other.isRecord() or - (other.isInterface() and ( - # ArrayBuffer is distinguishable from everything - # that's not an ArrayBuffer or a callback interface - (self.isArrayBuffer() and not other.isArrayBuffer()) or - (self.isReadableStream() and not other.isReadableStream()) or - # ArrayBufferView is distinguishable from everything - # that's not an ArrayBufferView or typed array. - (self.isArrayBufferView() and not other.isArrayBufferView() and - not other.isTypedArray()) or - # Typed arrays are distinguishable from everything - # except ArrayBufferView and the same type of typed - # array - (self.isTypedArray() and not other.isArrayBufferView() and not - (other.isTypedArray() and other.name == self.name))))) + return ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isCallback() + or other.isDictionary() + or other.isSequence() + or other.isRecord() + or ( + other.isInterface() + and ( + # ArrayBuffer is distinguishable from everything + # that's not an ArrayBuffer or a callback interface + (self.isArrayBuffer() and not other.isArrayBuffer()) + or (self.isReadableStream() and not other.isReadableStream()) + or + # ArrayBufferView is distinguishable from everything + # that's not an ArrayBufferView or typed array. + ( + self.isArrayBufferView() + and not other.isArrayBufferView() + and not other.isTypedArray() + ) + or + # Typed arrays are distinguishable from everything + # except ArrayBufferView and the same type of typed + # array + ( + self.isTypedArray() + and not other.isArrayBufferView() + and not (other.isTypedArray() and other.name == self.name) + ) + ) + ) + ) def _getDependentObjects(self): return set() @@ -3478,160 +3857,204 @@ def withExtendedAttributes(self, attrs): identifier = attribute.identifier() if identifier == "Clamp": if not attribute.noArguments(): - raise WebIDLError("[Clamp] must take no arguments", - [attribute.location]) + raise WebIDLError( + "[Clamp] must take no arguments", [attribute.location] + ) if ret.hasEnforceRange() or self._enforceRange: - raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive", - [self.location, attribute.location]) + raise WebIDLError( + "[EnforceRange] and [Clamp] are mutually exclusive", + [self.location, attribute.location], + ) ret = self.clamped([self.location, attribute.location]) elif identifier == "EnforceRange": if not attribute.noArguments(): - raise WebIDLError("[EnforceRange] must take no arguments", - [attribute.location]) + raise WebIDLError( + "[EnforceRange] must take no arguments", [attribute.location] + ) if ret.hasClamp() or self._clamp: - raise WebIDLError("[EnforceRange] and [Clamp] are mutually exclusive", - [self.location, attribute.location]) + raise WebIDLError( + "[EnforceRange] and [Clamp] are mutually exclusive", + [self.location, attribute.location], + ) ret = self.rangeEnforced([self.location, attribute.location]) elif identifier == "TreatNullAs": if not (self.isDOMString() or self.isUTF8String()): - raise WebIDLError("[TreatNullAs] only allowed on DOMStrings and UTF8Strings", - [self.location, attribute.location]) + raise WebIDLError( + "[TreatNullAs] only allowed on DOMStrings and UTF8Strings", + [self.location, attribute.location], + ) assert not self.nullable() if not attribute.hasValue(): - raise WebIDLError("[TreatNullAs] must take an identifier argument", - [attribute.location]) + raise WebIDLError( + "[TreatNullAs] must take an identifier argument", + [attribute.location], + ) value = attribute.value() - if value != 'EmptyString': - raise WebIDLError("[TreatNullAs] must take the identifier " - "'EmptyString', not '%s'" % value, - [attribute.location]) + if value != "EmptyString": + raise WebIDLError( + "[TreatNullAs] must take the identifier " + "'EmptyString', not '%s'" % value, + [attribute.location], + ) ret = self.withTreatNullAs([self.location, attribute.location]) elif identifier == "AllowShared": if not attribute.noArguments(): - raise WebIDLError("[AllowShared] must take no arguments", - [attribute.location]) + raise WebIDLError( + "[AllowShared] must take no arguments", [attribute.location] + ) if not self.isBufferSource(): - raise WebIDLError("[AllowShared] only allowed on buffer source types", - [self.location, attribute.location]) + raise WebIDLError( + "[AllowShared] only allowed on buffer source types", + [self.location, attribute.location], + ) ret = self.withAllowShared([self.location, attribute.location]) else: - raise WebIDLError("Unhandled extended attribute on type", - [self.location, attribute.location]) + raise WebIDLError( + "Unhandled extended attribute on type", + [self.location, attribute.location], + ) return ret + BuiltinTypes = { - IDLBuiltinType.Types.byte: - IDLBuiltinType(BuiltinLocation(""), "Byte", - IDLBuiltinType.Types.byte), - IDLBuiltinType.Types.octet: - IDLBuiltinType(BuiltinLocation(""), "Octet", - IDLBuiltinType.Types.octet), - IDLBuiltinType.Types.short: - IDLBuiltinType(BuiltinLocation(""), "Short", - IDLBuiltinType.Types.short), - IDLBuiltinType.Types.unsigned_short: - IDLBuiltinType(BuiltinLocation(""), "UnsignedShort", - IDLBuiltinType.Types.unsigned_short), - IDLBuiltinType.Types.long: - IDLBuiltinType(BuiltinLocation(""), "Long", - IDLBuiltinType.Types.long), - IDLBuiltinType.Types.unsigned_long: - IDLBuiltinType(BuiltinLocation(""), "UnsignedLong", - IDLBuiltinType.Types.unsigned_long), - IDLBuiltinType.Types.long_long: - IDLBuiltinType(BuiltinLocation(""), "LongLong", - IDLBuiltinType.Types.long_long), - IDLBuiltinType.Types.unsigned_long_long: - IDLBuiltinType(BuiltinLocation(""), "UnsignedLongLong", - IDLBuiltinType.Types.unsigned_long_long), - IDLBuiltinType.Types.boolean: - IDLBuiltinType(BuiltinLocation(""), "Boolean", - IDLBuiltinType.Types.boolean), - IDLBuiltinType.Types.float: - IDLBuiltinType(BuiltinLocation(""), "Float", - IDLBuiltinType.Types.float), - IDLBuiltinType.Types.unrestricted_float: - IDLBuiltinType(BuiltinLocation(""), "UnrestrictedFloat", - IDLBuiltinType.Types.unrestricted_float), - IDLBuiltinType.Types.double: - IDLBuiltinType(BuiltinLocation(""), "Double", - IDLBuiltinType.Types.double), - IDLBuiltinType.Types.unrestricted_double: - IDLBuiltinType(BuiltinLocation(""), "UnrestrictedDouble", - IDLBuiltinType.Types.unrestricted_double), - IDLBuiltinType.Types.any: - IDLBuiltinType(BuiltinLocation(""), "Any", - IDLBuiltinType.Types.any), - IDLBuiltinType.Types.domstring: - IDLBuiltinType(BuiltinLocation(""), "String", - IDLBuiltinType.Types.domstring), - IDLBuiltinType.Types.bytestring: - IDLBuiltinType(BuiltinLocation(""), "ByteString", - IDLBuiltinType.Types.bytestring), - IDLBuiltinType.Types.usvstring: - IDLBuiltinType(BuiltinLocation(""), "USVString", - IDLBuiltinType.Types.usvstring), - IDLBuiltinType.Types.utf8string: - IDLBuiltinType(BuiltinLocation(""), "UTF8String", - IDLBuiltinType.Types.utf8string), - IDLBuiltinType.Types.jsstring: - IDLBuiltinType(BuiltinLocation(""), "JSString", - IDLBuiltinType.Types.jsstring), - IDLBuiltinType.Types.object: - IDLBuiltinType(BuiltinLocation(""), "Object", - IDLBuiltinType.Types.object), - IDLBuiltinType.Types.void: - IDLBuiltinType(BuiltinLocation(""), "Void", - IDLBuiltinType.Types.void), - IDLBuiltinType.Types.ArrayBuffer: - IDLBuiltinType(BuiltinLocation(""), "ArrayBuffer", - IDLBuiltinType.Types.ArrayBuffer), - IDLBuiltinType.Types.ArrayBufferView: - IDLBuiltinType(BuiltinLocation(""), "ArrayBufferView", - IDLBuiltinType.Types.ArrayBufferView), - IDLBuiltinType.Types.Int8Array: - IDLBuiltinType(BuiltinLocation(""), "Int8Array", - IDLBuiltinType.Types.Int8Array), - IDLBuiltinType.Types.Uint8Array: - IDLBuiltinType(BuiltinLocation(""), "Uint8Array", - IDLBuiltinType.Types.Uint8Array), - IDLBuiltinType.Types.Uint8ClampedArray: - IDLBuiltinType(BuiltinLocation(""), "Uint8ClampedArray", - IDLBuiltinType.Types.Uint8ClampedArray), - IDLBuiltinType.Types.Int16Array: - IDLBuiltinType(BuiltinLocation(""), "Int16Array", - IDLBuiltinType.Types.Int16Array), - IDLBuiltinType.Types.Uint16Array: - IDLBuiltinType(BuiltinLocation(""), "Uint16Array", - IDLBuiltinType.Types.Uint16Array), - IDLBuiltinType.Types.Int32Array: - IDLBuiltinType(BuiltinLocation(""), "Int32Array", - IDLBuiltinType.Types.Int32Array), - IDLBuiltinType.Types.Uint32Array: - IDLBuiltinType(BuiltinLocation(""), "Uint32Array", - IDLBuiltinType.Types.Uint32Array), - IDLBuiltinType.Types.Float32Array: - IDLBuiltinType(BuiltinLocation(""), "Float32Array", - IDLBuiltinType.Types.Float32Array), - IDLBuiltinType.Types.Float64Array: - IDLBuiltinType(BuiltinLocation(""), "Float64Array", - IDLBuiltinType.Types.Float64Array), - IDLBuiltinType.Types.ReadableStream: - IDLBuiltinType(BuiltinLocation(""), "ReadableStream", - IDLBuiltinType.Types.ReadableStream), + IDLBuiltinType.Types.byte: IDLBuiltinType( + BuiltinLocation(""), "Byte", IDLBuiltinType.Types.byte + ), + IDLBuiltinType.Types.octet: IDLBuiltinType( + BuiltinLocation(""), "Octet", IDLBuiltinType.Types.octet + ), + IDLBuiltinType.Types.short: IDLBuiltinType( + BuiltinLocation(""), "Short", IDLBuiltinType.Types.short + ), + IDLBuiltinType.Types.unsigned_short: IDLBuiltinType( + BuiltinLocation(""), + "UnsignedShort", + IDLBuiltinType.Types.unsigned_short, + ), + IDLBuiltinType.Types.long: IDLBuiltinType( + BuiltinLocation(""), "Long", IDLBuiltinType.Types.long + ), + IDLBuiltinType.Types.unsigned_long: IDLBuiltinType( + BuiltinLocation(""), + "UnsignedLong", + IDLBuiltinType.Types.unsigned_long, + ), + IDLBuiltinType.Types.long_long: IDLBuiltinType( + BuiltinLocation(""), "LongLong", IDLBuiltinType.Types.long_long + ), + IDLBuiltinType.Types.unsigned_long_long: IDLBuiltinType( + BuiltinLocation(""), + "UnsignedLongLong", + IDLBuiltinType.Types.unsigned_long_long, + ), + IDLBuiltinType.Types.boolean: IDLBuiltinType( + BuiltinLocation(""), "Boolean", IDLBuiltinType.Types.boolean + ), + IDLBuiltinType.Types.float: IDLBuiltinType( + BuiltinLocation(""), "Float", IDLBuiltinType.Types.float + ), + IDLBuiltinType.Types.unrestricted_float: IDLBuiltinType( + BuiltinLocation(""), + "UnrestrictedFloat", + IDLBuiltinType.Types.unrestricted_float, + ), + IDLBuiltinType.Types.double: IDLBuiltinType( + BuiltinLocation(""), "Double", IDLBuiltinType.Types.double + ), + IDLBuiltinType.Types.unrestricted_double: IDLBuiltinType( + BuiltinLocation(""), + "UnrestrictedDouble", + IDLBuiltinType.Types.unrestricted_double, + ), + IDLBuiltinType.Types.any: IDLBuiltinType( + BuiltinLocation(""), "Any", IDLBuiltinType.Types.any + ), + IDLBuiltinType.Types.domstring: IDLBuiltinType( + BuiltinLocation(""), "String", IDLBuiltinType.Types.domstring + ), + IDLBuiltinType.Types.bytestring: IDLBuiltinType( + BuiltinLocation(""), "ByteString", IDLBuiltinType.Types.bytestring + ), + IDLBuiltinType.Types.usvstring: IDLBuiltinType( + BuiltinLocation(""), "USVString", IDLBuiltinType.Types.usvstring + ), + IDLBuiltinType.Types.utf8string: IDLBuiltinType( + BuiltinLocation(""), "UTF8String", IDLBuiltinType.Types.utf8string + ), + IDLBuiltinType.Types.jsstring: IDLBuiltinType( + BuiltinLocation(""), "JSString", IDLBuiltinType.Types.jsstring + ), + IDLBuiltinType.Types.object: IDLBuiltinType( + BuiltinLocation(""), "Object", IDLBuiltinType.Types.object + ), + IDLBuiltinType.Types.void: IDLBuiltinType( + BuiltinLocation(""), "Void", IDLBuiltinType.Types.void + ), + IDLBuiltinType.Types.ArrayBuffer: IDLBuiltinType( + BuiltinLocation(""), + "ArrayBuffer", + IDLBuiltinType.Types.ArrayBuffer, + ), + IDLBuiltinType.Types.ArrayBufferView: IDLBuiltinType( + BuiltinLocation(""), + "ArrayBufferView", + IDLBuiltinType.Types.ArrayBufferView, + ), + IDLBuiltinType.Types.Int8Array: IDLBuiltinType( + BuiltinLocation(""), "Int8Array", IDLBuiltinType.Types.Int8Array + ), + IDLBuiltinType.Types.Uint8Array: IDLBuiltinType( + BuiltinLocation(""), "Uint8Array", IDLBuiltinType.Types.Uint8Array + ), + IDLBuiltinType.Types.Uint8ClampedArray: IDLBuiltinType( + BuiltinLocation(""), + "Uint8ClampedArray", + IDLBuiltinType.Types.Uint8ClampedArray, + ), + IDLBuiltinType.Types.Int16Array: IDLBuiltinType( + BuiltinLocation(""), "Int16Array", IDLBuiltinType.Types.Int16Array + ), + IDLBuiltinType.Types.Uint16Array: IDLBuiltinType( + BuiltinLocation(""), + "Uint16Array", + IDLBuiltinType.Types.Uint16Array, + ), + IDLBuiltinType.Types.Int32Array: IDLBuiltinType( + BuiltinLocation(""), "Int32Array", IDLBuiltinType.Types.Int32Array + ), + IDLBuiltinType.Types.Uint32Array: IDLBuiltinType( + BuiltinLocation(""), + "Uint32Array", + IDLBuiltinType.Types.Uint32Array, + ), + IDLBuiltinType.Types.Float32Array: IDLBuiltinType( + BuiltinLocation(""), + "Float32Array", + IDLBuiltinType.Types.Float32Array, + ), + IDLBuiltinType.Types.Float64Array: IDLBuiltinType( + BuiltinLocation(""), + "Float64Array", + IDLBuiltinType.Types.Float64Array, + ), + IDLBuiltinType.Types.ReadableStream: IDLBuiltinType( + BuiltinLocation(""), + "ReadableStream", + IDLBuiltinType.Types.ReadableStream, + ), } integerTypeSizes = { IDLBuiltinType.Types.byte: (-128, 127), - IDLBuiltinType.Types.octet: (0, 255), + IDLBuiltinType.Types.octet: (0, 255), IDLBuiltinType.Types.short: (-32768, 32767), IDLBuiltinType.Types.unsigned_short: (0, 65535), IDLBuiltinType.Types.long: (-2147483648, 2147483647), IDLBuiltinType.Types.unsigned_long: (0, 4294967295), IDLBuiltinType.Types.long_long: (-9223372036854775808, 9223372036854775807), - IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615) + IDLBuiltinType.Types.unsigned_long_long: (0, 18446744073709551615), } @@ -3643,12 +4066,14 @@ def matchIntegerValueToType(value): return None + class NoCoercionFoundError(WebIDLError): """ A class we use to indicate generic coercion failures because none of the types worked out in IDLValue.coerceToType. """ + class IDLValue(IDLObject): def __init__(self, location, type, value): IDLObject.__init__(self, location) @@ -3683,8 +4108,9 @@ def coerceToType(self, type, location): # non-WebIDLErrors here, because those can just happen if # "type" is not something that can have a default value at # all. - if (isinstance(e, WebIDLError) and - not isinstance(e, NoCoercionFoundError)): + if isinstance(e, WebIDLError) and not isinstance( + e, NoCoercionFoundError + ): raise e # If the type allows null, rerun this matching on the inner type, except @@ -3703,29 +4129,41 @@ def coerceToType(self, type, location): # Promote return IDLValue(self.location, type, self.value) else: - raise WebIDLError("Value %s is out of range for type %s." % - (self.value, type), [location]) + raise WebIDLError( + "Value %s is out of range for type %s." % (self.value, type), + [location], + ) elif self.type.isInteger() and type.isFloat(): # Convert an integer literal into float - if -2**24 <= self.value <= 2**24: + if -(2 ** 24) <= self.value <= 2 ** 24: return IDLValue(self.location, type, float(self.value)) else: - raise WebIDLError("Converting value %s to %s will lose precision." % - (self.value, type), [location]) + raise WebIDLError( + "Converting value %s to %s will lose precision." + % (self.value, type), + [location], + ) elif self.type.isString() and type.isEnum(): # Just keep our string, but make sure it's a valid value for this enum enum = type.unroll().inner if self.value not in enum.values(): - raise WebIDLError("'%s' is not a valid default value for enum %s" - % (self.value, enum.identifier.name), - [location, enum.location]) + raise WebIDLError( + "'%s' is not a valid default value for enum %s" + % (self.value, enum.identifier.name), + [location, enum.location], + ) return self elif self.type.isFloat() and type.isFloat(): - if (not type.isUnrestricted() and - (self.value == float("inf") or self.value == float("-inf") or - math.isnan(self.value))): - raise WebIDLError("Trying to convert unrestricted value %s to non-unrestricted" - % self.value, [location]) + if not type.isUnrestricted() and ( + self.value == float("inf") + or self.value == float("-inf") + or math.isnan(self.value) + ): + raise WebIDLError( + "Trying to convert unrestricted value %s to non-unrestricted" + % self.value, + [location], + ) return IDLValue(self.location, type, self.value) elif self.type.isString() and type.isUSVString(): # Allow USVStrings to use default value just like @@ -3738,23 +4176,31 @@ def coerceToType(self, type, location): # TreatNullAsEmpty is a different type for resolution reasons, # however once you have a value it doesn't matter return self - elif self.type.isString() and (type.isByteString() or type.isJSString() or type.isUTF8String()): + elif self.type.isString() and ( + type.isByteString() or type.isJSString() or type.isUTF8String() + ): # Allow ByteStrings, UTF8String, and JSStrings to use a default # value like DOMString. # No coercion is required as Codegen.py will handle the # extra steps. We want to make sure that our string contains # only valid characters, so we check that here. - valid_ascii_lit = " " + string.ascii_letters + string.digits + string.punctuation + valid_ascii_lit = ( + " " + string.ascii_letters + string.digits + string.punctuation + ) for idx, c in enumerate(self.value): if c not in valid_ascii_lit: - raise WebIDLError("Coercing this string literal %s to a ByteString is not supported yet. " - "Coercion failed due to an unsupported byte %d at index %d." - % (self.value.__repr__(), ord(c), idx), [location]) + raise WebIDLError( + "Coercing this string literal %s to a ByteString is not supported yet. " + "Coercion failed due to an unsupported byte %d at index %d." + % (self.value.__repr__(), ord(c), idx), + [location], + ) return IDLValue(self.location, type, self.value) - raise NoCoercionFoundError("Cannot coerce type %s to type %s." % - (self.type, type), [location]) + raise NoCoercionFoundError( + "Cannot coerce type %s to type %s." % (self.type, type), [location] + ) def _getDependentObjects(self): return set() @@ -3767,11 +4213,12 @@ def __init__(self, location): self.value = None def coerceToType(self, type, location): - if (not isinstance(type, IDLNullableType) and - not (type.isUnion() and type.hasNullableType) and - not type.isAny()): - raise WebIDLError("Cannot coerce null value to type %s." % type, - [location]) + if ( + not isinstance(type, IDLNullableType) + and not (type.isUnion() and type.hasNullableType) + and not type.isAny() + ): + raise WebIDLError("Cannot coerce null value to type %s." % type, [location]) nullValue = IDLNullValue(self.location) if type.isUnion() and not type.nullable() and type.hasDictionaryType(): @@ -3806,8 +4253,9 @@ def coerceToType(self, type, location): pass if not type.isSequence(): - raise WebIDLError("Cannot coerce empty sequence value to type %s." % type, - [location]) + raise WebIDLError( + "Cannot coerce empty sequence value to type %s." % type, [location] + ) emptySequenceValue = IDLEmptySequenceValue(self.location) emptySequenceValue.type = type @@ -3835,8 +4283,9 @@ def coerceToType(self, type, location): pass if not type.isDictionary(): - raise WebIDLError("Cannot coerce default dictionary value to type %s." % type, - [location]) + raise WebIDLError( + "Cannot coerce default dictionary value to type %s." % type, [location] + ) defaultDictionaryValue = IDLDefaultDictionaryValue(self.location) defaultDictionaryValue.type = type @@ -3854,8 +4303,9 @@ def __init__(self, location): def coerceToType(self, type, location): if not type.isAny(): - raise WebIDLError("Cannot coerce undefined value to type %s." % type, - [location]) + raise WebIDLError( + "Cannot coerce undefined value to type %s." % type, [location] + ) undefinedValue = IDLUndefinedValue(self.location) undefinedValue.type = type @@ -3867,18 +4317,9 @@ def _getDependentObjects(self): class IDLInterfaceMember(IDLObjectWithIdentifier, IDLExposureMixins): - Tags = enum( - 'Const', - 'Attr', - 'Method', - 'MaplikeOrSetlike', - 'Iterable' - ) + Tags = enum("Const", "Attr", "Method", "MaplikeOrSetlike", "Iterable") - Special = enum( - 'Static', - 'Stringifier' - ) + Special = enum("Static", "Stringifier") AffectsValues = ("Nothing", "Everything") DependsOnValues = ("Nothing", "DOMState", "DeviceState", "Everything") @@ -3902,8 +4343,10 @@ def isConst(self): return self.tag == IDLInterfaceMember.Tags.Const def isMaplikeOrSetlikeOrIterable(self): - return (self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike or - self.tag == IDLInterfaceMember.Tags.Iterable) + return ( + self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike + or self.tag == IDLInterfaceMember.Tags.Iterable + ) def isMaplikeOrSetlike(self): return self.tag == IDLInterfaceMember.Tags.MaplikeOrSetlike @@ -3912,7 +4355,9 @@ def addExtendedAttributes(self, attrs): for attr in attrs: self.handleExtendedAttribute(attr) attrlist = attr.listValue() - self._extendedAttrDict[attr.identifier()] = attrlist if len(attrlist) else True + self._extendedAttrDict[attr.identifier()] = ( + attrlist if len(attrlist) else True + ) def handleExtendedAttribute(self, attr): pass @@ -3926,66 +4371,84 @@ def finish(self, scope): def validate(self): if self.isAttr() or self.isMethod(): if self.affects == "Everything" and self.dependsOn != "Everything": - raise WebIDLError("Interface member is flagged as affecting " - "everything but not depending on everything. " - "That seems rather unlikely.", - [self.location]) + raise WebIDLError( + "Interface member is flagged as affecting " + "everything but not depending on everything. " + "That seems rather unlikely.", + [self.location], + ) if self.getExtendedAttribute("NewObject"): if self.dependsOn == "Nothing" or self.dependsOn == "DOMState": - raise WebIDLError("A [NewObject] method is not idempotent, " - "so it has to depend on something other than DOM state.", - [self.location]) - if (self.getExtendedAttribute("Cached") or - self.getExtendedAttribute("StoreInSlot")): - raise WebIDLError("A [NewObject] attribute shouldnt be " - "[Cached] or [StoreInSlot], since the point " - "of those is to keep returning the same " - "thing across multiple calls, which is not " - "what [NewObject] does.", - [self.location]) + raise WebIDLError( + "A [NewObject] method is not idempotent, " + "so it has to depend on something other than DOM state.", + [self.location], + ) + if self.getExtendedAttribute("Cached") or self.getExtendedAttribute( + "StoreInSlot" + ): + raise WebIDLError( + "A [NewObject] attribute shouldnt be " + "[Cached] or [StoreInSlot], since the point " + "of those is to keep returning the same " + "thing across multiple calls, which is not " + "what [NewObject] does.", + [self.location], + ) def _setDependsOn(self, dependsOn): if self.dependsOn != "Everything": - raise WebIDLError("Trying to specify multiple different DependsOn, " - "Pure, or Constant extended attributes for " - "attribute", [self.location]) + raise WebIDLError( + "Trying to specify multiple different DependsOn, " + "Pure, or Constant extended attributes for " + "attribute", + [self.location], + ) if dependsOn not in IDLInterfaceMember.DependsOnValues: - raise WebIDLError("Invalid [DependsOn=%s] on attribute" % dependsOn, - [self.location]) + raise WebIDLError( + "Invalid [DependsOn=%s] on attribute" % dependsOn, [self.location] + ) self.dependsOn = dependsOn def _setAffects(self, affects): if self.affects != "Everything": - raise WebIDLError("Trying to specify multiple different Affects, " - "Pure, or Constant extended attributes for " - "attribute", [self.location]) + raise WebIDLError( + "Trying to specify multiple different Affects, " + "Pure, or Constant extended attributes for " + "attribute", + [self.location], + ) if affects not in IDLInterfaceMember.AffectsValues: - raise WebIDLError("Invalid [Affects=%s] on attribute" % dependsOn, - [self.location]) + raise WebIDLError( + "Invalid [Affects=%s] on attribute" % dependsOn, [self.location] + ) self.affects = affects def _addAlias(self, alias): if alias in self.aliases: - raise WebIDLError("Duplicate [Alias=%s] on attribute" % alias, - [self.location]) + raise WebIDLError( + "Duplicate [Alias=%s] on attribute" % alias, [self.location] + ) self.aliases.append(alias) def _addBindingAlias(self, bindingAlias): if bindingAlias in self.bindingAliases: - raise WebIDLError("Duplicate [BindingAlias=%s] on attribute" % bindingAlias, - [self.location]) + raise WebIDLError( + "Duplicate [BindingAlias=%s] on attribute" % bindingAlias, + [self.location], + ) self.bindingAliases.append(bindingAlias) -class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): +class IDLMaplikeOrSetlikeOrIterableBase(IDLInterfaceMember): def __init__(self, location, identifier, ifaceType, keyType, valueType, ifaceKind): IDLInterfaceMember.__init__(self, location, identifier, ifaceKind) if keyType is not None: assert isinstance(keyType, IDLType) else: assert valueType is not None - assert ifaceType in ['maplike', 'setlike', 'iterable'] + assert ifaceType in ["maplike", "setlike", "iterable"] if valueType is not None: assert isinstance(valueType, IDLType) self.keyType = keyType @@ -4012,28 +4475,42 @@ def hasValueType(self): def checkCollisions(self, members, isAncestor): for member in members: # Check that there are no disallowed members - if (member.identifier.name in self.disallowedMemberNames and - not ((member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod()) or - (member.isAttr() and member.isMaplikeOrSetlikeAttr()))): - raise WebIDLError("Member '%s' conflicts " - "with reserved %s name." % - (member.identifier.name, - self.maplikeOrSetlikeOrIterableType), - [self.location, member.location]) + if member.identifier.name in self.disallowedMemberNames and not ( + (member.isMethod() and member.isMaplikeOrSetlikeOrIterableMethod()) + or (member.isAttr() and member.isMaplikeOrSetlikeAttr()) + ): + raise WebIDLError( + "Member '%s' conflicts " + "with reserved %s name." + % (member.identifier.name, self.maplikeOrSetlikeOrIterableType), + [self.location, member.location], + ) # Check that there are no disallowed non-method members. # Ancestor members are always disallowed here; own members # are disallowed only if they're non-methods. - if ((isAncestor or member.isAttr() or member.isConst()) and - member.identifier.name in self.disallowedNonMethodNames): - raise WebIDLError("Member '%s' conflicts " - "with reserved %s method." % - (member.identifier.name, - self.maplikeOrSetlikeOrIterableType), - [self.location, member.location]) - - def addMethod(self, name, members, allowExistingOperations, returnType, args=[], - chromeOnly=False, isPure=False, affectsNothing=False, newObject=False, - isIteratorAlias=False): + if ( + isAncestor or member.isAttr() or member.isConst() + ) and member.identifier.name in self.disallowedNonMethodNames: + raise WebIDLError( + "Member '%s' conflicts " + "with reserved %s method." + % (member.identifier.name, self.maplikeOrSetlikeOrIterableType), + [self.location, member.location], + ) + + def addMethod( + self, + name, + members, + allowExistingOperations, + returnType, + args=[], + chromeOnly=False, + isPure=False, + affectsNothing=False, + newObject=False, + isIteratorAlias=False, + ): """ Create an IDLMethod based on the parameters passed in. @@ -4072,35 +4549,48 @@ def addMethod(self, name, members, allowExistingOperations, returnType, args=[], for m in members: if m.identifier.name == name and m.isMethod() and not m.isStatic(): return - method = IDLMethod(self.location, - IDLUnresolvedIdentifier(self.location, name, allowDoubleUnderscore=chromeOnly), - returnType, args, maplikeOrSetlikeOrIterable=self) + method = IDLMethod( + self.location, + IDLUnresolvedIdentifier( + self.location, name, allowDoubleUnderscore=chromeOnly + ), + returnType, + args, + maplikeOrSetlikeOrIterable=self, + ) # We need to be able to throw from declaration methods - method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Throws",))]) + method.addExtendedAttributes([IDLExtendedAttribute(self.location, ("Throws",))]) if chromeOnly: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("ChromeOnly",))]) + [IDLExtendedAttribute(self.location, ("ChromeOnly",))] + ) if isPure: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Pure",))]) + [IDLExtendedAttribute(self.location, ("Pure",))] + ) # Following attributes are used for keys/values/entries. Can't mark # them pure, since they return a new object each time they are run. if affectsNothing: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("DependsOn", "Everything")), - IDLExtendedAttribute(self.location, ("Affects", "Nothing"))]) + [ + IDLExtendedAttribute(self.location, ("DependsOn", "Everything")), + IDLExtendedAttribute(self.location, ("Affects", "Nothing")), + ] + ) if newObject: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NewObject",))]) + [IDLExtendedAttribute(self.location, ("NewObject",))] + ) if isIteratorAlias: method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("Alias", "@@iterator"))]) + [IDLExtendedAttribute(self.location, ("Alias", "@@iterator"))] + ) # Methods generated for iterables should be enumerable, but the ones for # maplike/setlike should not be. if not self.isIterable(): method.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NonEnumerable",))]) + [IDLExtendedAttribute(self.location, ("NonEnumerable",))] + ) members.append(method) def resolve(self, parentScope): @@ -4141,28 +4631,45 @@ def _getDependentObjects(self): return deps def getForEachArguments(self): - return [IDLArgument(self.location, - IDLUnresolvedIdentifier(BuiltinLocation(""), - "callback"), - BuiltinTypes[IDLBuiltinType.Types.object]), - IDLArgument(self.location, - IDLUnresolvedIdentifier(BuiltinLocation(""), - "thisArg"), - BuiltinTypes[IDLBuiltinType.Types.any], - optional=True)] + return [ + IDLArgument( + self.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), "callback" + ), + BuiltinTypes[IDLBuiltinType.Types.object], + ), + IDLArgument( + self.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), "thisArg" + ), + BuiltinTypes[IDLBuiltinType.Types.any], + optional=True, + ), + ] + # Iterable adds ES6 iterator style functions and traits # (keys/values/entries/@@iterator) to an interface. class IDLIterable(IDLMaplikeOrSetlikeOrIterableBase): - def __init__(self, location, identifier, keyType, valueType=None, scope=None): - IDLMaplikeOrSetlikeOrIterableBase.__init__(self, location, identifier, - "iterable", keyType, valueType, - IDLInterfaceMember.Tags.Iterable) + IDLMaplikeOrSetlikeOrIterableBase.__init__( + self, + location, + identifier, + "iterable", + keyType, + valueType, + IDLInterfaceMember.Tags.Iterable, + ) self.iteratorType = None def __str__(self): - return "declared iterable with key '%s' and value '%s'" % (self.keyType, self.valueType) + return "declared iterable with key '%s' and value '%s'" % ( + self.keyType, + self.valueType, + ) def expand(self, members, isJSImplemented): """ @@ -4176,20 +4683,42 @@ def expand(self, members, isJSImplemented): return # object entries() - self.addMethod("entries", members, False, self.iteratorType, - affectsNothing=True, newObject=True, - isIteratorAlias=True) + self.addMethod( + "entries", + members, + False, + self.iteratorType, + affectsNothing=True, + newObject=True, + isIteratorAlias=True, + ) # object keys() - self.addMethod("keys", members, False, self.iteratorType, - affectsNothing=True, newObject=True) + self.addMethod( + "keys", + members, + False, + self.iteratorType, + affectsNothing=True, + newObject=True, + ) # object values() - self.addMethod("values", members, False, self.iteratorType, - affectsNothing=True, newObject=True) + self.addMethod( + "values", + members, + False, + self.iteratorType, + affectsNothing=True, + newObject=True, + ) # void forEach(callback(valueType, keyType), optional any thisArg) - self.addMethod("forEach", members, False, - BuiltinTypes[IDLBuiltinType.Types.void], - self.getForEachArguments()) + self.addMethod( + "forEach", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.void], + self.getForEachArguments(), + ) def isValueIterator(self): return not self.isPairIterator() @@ -4197,25 +4726,36 @@ def isValueIterator(self): def isPairIterator(self): return self.hasKeyType() + # MaplikeOrSetlike adds ES6 map-or-set-like traits to an interface. class IDLMaplikeOrSetlike(IDLMaplikeOrSetlikeOrIterableBase): - - def __init__(self, location, identifier, maplikeOrSetlikeType, - readonly, keyType, valueType): - IDLMaplikeOrSetlikeOrIterableBase.__init__(self, location, identifier, maplikeOrSetlikeType, - keyType, valueType, IDLInterfaceMember.Tags.MaplikeOrSetlike) + def __init__( + self, location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType + ): + IDLMaplikeOrSetlikeOrIterableBase.__init__( + self, + location, + identifier, + maplikeOrSetlikeType, + keyType, + valueType, + IDLInterfaceMember.Tags.MaplikeOrSetlike, + ) self.readonly = readonly self.slotIndices = None # When generating JSAPI access code, we need to know the backing object # type prefix to create the correct function. Generate here for reuse. if self.isMaplike(): - self.prefix = 'Map' + self.prefix = "Map" elif self.isSetlike(): - self.prefix = 'Set' + self.prefix = "Set" def __str__(self): - return "declared '%s' with key '%s'" % (self.maplikeOrSetlikeOrIterableType, self.keyType) + return "declared '%s' with key '%s'" % ( + self.maplikeOrSetlikeOrIterableType, + self.keyType, + ) def expand(self, members, isJSImplemented): """ @@ -4224,71 +4764,133 @@ def expand(self, members, isJSImplemented): specification during parsing. """ # Both maplike and setlike have a size attribute - sizeAttr = IDLAttribute(self.location, - IDLUnresolvedIdentifier(BuiltinLocation(""), "size"), - BuiltinTypes[IDLBuiltinType.Types.unsigned_long], - True, - maplikeOrSetlike=self) + sizeAttr = IDLAttribute( + self.location, + IDLUnresolvedIdentifier( + BuiltinLocation(""), "size" + ), + BuiltinTypes[IDLBuiltinType.Types.unsigned_long], + True, + maplikeOrSetlike=self, + ) # This should be non-enumerable. sizeAttr.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NonEnumerable",))]) + [IDLExtendedAttribute(self.location, ("NonEnumerable",))] + ) members.append(sizeAttr) self.reserved_ro_names = ["size"] self.disallowedMemberNames.append("size") # object entries() - self.addMethod("entries", members, False, BuiltinTypes[IDLBuiltinType.Types.object], - affectsNothing=True, isIteratorAlias=self.isMaplike()) + self.addMethod( + "entries", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.object], + affectsNothing=True, + isIteratorAlias=self.isMaplike(), + ) # object keys() - self.addMethod("keys", members, False, BuiltinTypes[IDLBuiltinType.Types.object], - affectsNothing=True) + self.addMethod( + "keys", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.object], + affectsNothing=True, + ) # object values() - self.addMethod("values", members, False, BuiltinTypes[IDLBuiltinType.Types.object], - affectsNothing=True, isIteratorAlias=self.isSetlike()) + self.addMethod( + "values", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.object], + affectsNothing=True, + isIteratorAlias=self.isSetlike(), + ) # void forEach(callback(valueType, keyType), thisVal) - self.addMethod("forEach", members, False, BuiltinTypes[IDLBuiltinType.Types.void], - self.getForEachArguments()) + self.addMethod( + "forEach", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.void], + self.getForEachArguments(), + ) def getKeyArg(): - return IDLArgument(self.location, - IDLUnresolvedIdentifier(self.location, "key"), - self.keyType) + return IDLArgument( + self.location, + IDLUnresolvedIdentifier(self.location, "key"), + self.keyType, + ) # boolean has(keyType key) - self.addMethod("has", members, False, BuiltinTypes[IDLBuiltinType.Types.boolean], - [getKeyArg()], isPure=True) + self.addMethod( + "has", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.boolean], + [getKeyArg()], + isPure=True, + ) if not self.readonly: # void clear() - self.addMethod("clear", members, True, BuiltinTypes[IDLBuiltinType.Types.void], - []) + self.addMethod( + "clear", members, True, BuiltinTypes[IDLBuiltinType.Types.void], [] + ) # boolean delete(keyType key) - self.addMethod("delete", members, True, - BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()]) + self.addMethod( + "delete", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.boolean], + [getKeyArg()], + ) # Always generate underscored functions (e.g. __add, __clear) for js # implemented interfaces as convenience functions. if isJSImplemented: # void clear() - self.addMethod("clear", members, True, BuiltinTypes[IDLBuiltinType.Types.void], - [], chromeOnly=True) + self.addMethod( + "clear", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.void], + [], + chromeOnly=True, + ) # boolean delete(keyType key) - self.addMethod("delete", members, True, - BuiltinTypes[IDLBuiltinType.Types.boolean], [getKeyArg()], - chromeOnly=True) + self.addMethod( + "delete", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.boolean], + [getKeyArg()], + chromeOnly=True, + ) if self.isSetlike(): if not self.readonly: # Add returns the set object it just added to. # object add(keyType key) - self.addMethod("add", members, True, - BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()]) + self.addMethod( + "add", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.object], + [getKeyArg()], + ) if isJSImplemented: - self.addMethod("add", members, True, - BuiltinTypes[IDLBuiltinType.Types.object], [getKeyArg()], - chromeOnly=True) + self.addMethod( + "add", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.object], + [getKeyArg()], + chromeOnly=True, + ) return # If we get this far, we're a maplike declaration. @@ -4301,39 +4903,61 @@ def getKeyArg(): # # TODO: Bug 1155340 may change this to use specific type to provide # more info to JIT. - self.addMethod("get", members, False, BuiltinTypes[IDLBuiltinType.Types.any], - [getKeyArg()], isPure=True) + self.addMethod( + "get", + members, + False, + BuiltinTypes[IDLBuiltinType.Types.any], + [getKeyArg()], + isPure=True, + ) def getValueArg(): - return IDLArgument(self.location, - IDLUnresolvedIdentifier(self.location, "value"), - self.valueType) + return IDLArgument( + self.location, + IDLUnresolvedIdentifier(self.location, "value"), + self.valueType, + ) if not self.readonly: - self.addMethod("set", members, True, BuiltinTypes[IDLBuiltinType.Types.object], - [getKeyArg(), getValueArg()]) + self.addMethod( + "set", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.object], + [getKeyArg(), getValueArg()], + ) if isJSImplemented: - self.addMethod("set", members, True, BuiltinTypes[IDLBuiltinType.Types.object], - [getKeyArg(), getValueArg()], chromeOnly=True) + self.addMethod( + "set", + members, + True, + BuiltinTypes[IDLBuiltinType.Types.object], + [getKeyArg(), getValueArg()], + chromeOnly=True, + ) + class IDLConst(IDLInterfaceMember): def __init__(self, location, identifier, type, value): - IDLInterfaceMember.__init__(self, location, identifier, - IDLInterfaceMember.Tags.Const) + IDLInterfaceMember.__init__( + self, location, identifier, IDLInterfaceMember.Tags.Const + ) assert isinstance(type, IDLType) if type.isDictionary(): - raise WebIDLError("A constant cannot be of a dictionary type", - [self.location]) + raise WebIDLError( + "A constant cannot be of a dictionary type", [self.location] + ) if type.isRecord(): - raise WebIDLError("A constant cannot be of a record type", - [self.location]) + raise WebIDLError("A constant cannot be of a record type", [self.location]) self.type = type self.value = value if identifier.name == "prototype": - raise WebIDLError("The identifier of a constant must not be 'prototype'", - [location]) + raise WebIDLError( + "The identifier of a constant must not be 'prototype'", [location] + ) def __str__(self): return "'%s' const '%s'" % (self.type, self.identifier) @@ -4365,16 +4989,20 @@ def handleExtendedAttribute(self, attr): identifier = attr.identifier() if identifier == "Exposed": convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) - elif (identifier == "Pref" or - identifier == "ChromeOnly" or - identifier == "Func" or - identifier == "SecureContext" or - identifier == "NonEnumerable"): + elif ( + identifier == "Pref" + or identifier == "ChromeOnly" + or identifier == "Func" + or identifier == "SecureContext" + or identifier == "NonEnumerable" + ): # Known attributes that we don't need to do anything with here pass else: - raise WebIDLError("Unknown extended attribute %s on constant" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on constant" % identifier, + [attr.location], + ) IDLInterfaceMember.handleExtendedAttribute(self, attr) def _getDependentObjects(self): @@ -4382,12 +5010,25 @@ def _getDependentObjects(self): class IDLAttribute(IDLInterfaceMember): - def __init__(self, location, identifier, type, readonly, inherit=False, - static=False, stringifier=False, maplikeOrSetlike=None, - extendedAttrDict=None): - IDLInterfaceMember.__init__(self, location, identifier, - IDLInterfaceMember.Tags.Attr, - extendedAttrDict=extendedAttrDict) + def __init__( + self, + location, + identifier, + type, + readonly, + inherit=False, + static=False, + stringifier=False, + maplikeOrSetlike=None, + extendedAttrDict=None, + ): + IDLInterfaceMember.__init__( + self, + location, + identifier, + IDLInterfaceMember.Tags.Attr, + extendedAttrDict=extendedAttrDict, + ) assert isinstance(type, IDLType) self.type = type @@ -4398,19 +5039,24 @@ def __init__(self, location, identifier, type, readonly, inherit=False, self._unforgeable = False self.stringifier = stringifier self.slotIndices = None - assert maplikeOrSetlike is None or isinstance(maplikeOrSetlike, IDLMaplikeOrSetlike) + assert maplikeOrSetlike is None or isinstance( + maplikeOrSetlike, IDLMaplikeOrSetlike + ) self.maplikeOrSetlike = maplikeOrSetlike self.dependsOn = "Everything" self.affects = "Everything" self.bindingAliases = [] if static and identifier.name == "prototype": - raise WebIDLError("The identifier of a static attribute must not be 'prototype'", - [location]) + raise WebIDLError( + "The identifier of a static attribute must not be 'prototype'", + [location], + ) if readonly and inherit: - raise WebIDLError("An attribute cannot be both 'readonly' and 'inherit'", - [self.location]) + raise WebIDLError( + "An attribute cannot be both 'readonly' and 'inherit'", [self.location] + ) def isStatic(self): return self._static @@ -4432,69 +5078,96 @@ def finish(self, scope): assert not isinstance(t.name, IDLUnresolvedIdentifier) self.type = t - if self.readonly and (self.type.hasClamp() or self.type.hasEnforceRange() or - self.type.hasAllowShared() or self.type.treatNullAsEmpty): - raise WebIDLError("A readonly attribute cannot be [Clamp] or [EnforceRange] or [AllowShared]", - [self.location]) + if self.readonly and ( + self.type.hasClamp() + or self.type.hasEnforceRange() + or self.type.hasAllowShared() + or self.type.treatNullAsEmpty + ): + raise WebIDLError( + "A readonly attribute cannot be [Clamp] or [EnforceRange] or [AllowShared]", + [self.location], + ) if self.type.isDictionary() and not self.getExtendedAttribute("Cached"): - raise WebIDLError("An attribute cannot be of a dictionary type", - [self.location]) + raise WebIDLError( + "An attribute cannot be of a dictionary type", [self.location] + ) if self.type.isSequence() and not self.getExtendedAttribute("Cached"): - raise WebIDLError("A non-cached attribute cannot be of a sequence " - "type", [self.location]) + raise WebIDLError( + "A non-cached attribute cannot be of a sequence " "type", + [self.location], + ) if self.type.isRecord() and not self.getExtendedAttribute("Cached"): - raise WebIDLError("A non-cached attribute cannot be of a record " - "type", [self.location]) + raise WebIDLError( + "A non-cached attribute cannot be of a record " "type", [self.location] + ) if self.type.isUnion(): for f in self.type.unroll().flatMemberTypes: if f.isDictionary(): - raise WebIDLError("An attribute cannot be of a union " - "type if one of its member types (or " - "one of its member types's member " - "types, and so on) is a dictionary " - "type", [self.location, f.location]) + raise WebIDLError( + "An attribute cannot be of a union " + "type if one of its member types (or " + "one of its member types's member " + "types, and so on) is a dictionary " + "type", + [self.location, f.location], + ) if f.isSequence(): - raise WebIDLError("An attribute cannot be of a union " - "type if one of its member types (or " - "one of its member types's member " - "types, and so on) is a sequence " - "type", [self.location, f.location]) + raise WebIDLError( + "An attribute cannot be of a union " + "type if one of its member types (or " + "one of its member types's member " + "types, and so on) is a sequence " + "type", + [self.location, f.location], + ) if f.isRecord(): - raise WebIDLError("An attribute cannot be of a union " - "type if one of its member types (or " - "one of its member types's member " - "types, and so on) is a record " - "type", [self.location, f.location]) + raise WebIDLError( + "An attribute cannot be of a union " + "type if one of its member types (or " + "one of its member types's member " + "types, and so on) is a record " + "type", + [self.location, f.location], + ) if not self.type.isInterface() and self.getExtendedAttribute("PutForwards"): - raise WebIDLError("An attribute with [PutForwards] must have an " - "interface type as its type", [self.location]) + raise WebIDLError( + "An attribute with [PutForwards] must have an " + "interface type as its type", + [self.location], + ) - if (not self.type.isInterface() and - self.getExtendedAttribute("SameObject")): - raise WebIDLError("An attribute with [SameObject] must have an " - "interface type as its type", [self.location]) + if not self.type.isInterface() and self.getExtendedAttribute("SameObject"): + raise WebIDLError( + "An attribute with [SameObject] must have an " + "interface type as its type", + [self.location], + ) if self.type.isPromise() and not self.readonly: - raise WebIDLError("Promise-returning attributes must be readonly", - [self.location]) + raise WebIDLError( + "Promise-returning attributes must be readonly", [self.location] + ) def validate(self): def typeContainsChromeOnlyDictionaryMember(type): - if (type.nullable() or - type.isSequence() or - type.isRecord()): + if type.nullable() or type.isSequence() or type.isRecord(): return typeContainsChromeOnlyDictionaryMember(type.inner) if type.isUnion(): for memberType in type.flatMemberTypes: - (contains, location) = typeContainsChromeOnlyDictionaryMember(memberType) + (contains, location) = typeContainsChromeOnlyDictionaryMember( + memberType + ) if contains: return (True, location) if type.isDictionary(): dictionary = type.inner while dictionary: - (contains, location) = dictionaryContainsChromeOnlyMember(dictionary) + (contains, location) = dictionaryContainsChromeOnlyMember( + dictionary + ) if contains: return (True, location) dictionary = dictionary.parent @@ -4505,254 +5178,343 @@ def dictionaryContainsChromeOnlyMember(dictionary): for member in dictionary.members: if member.getExtendedAttribute("ChromeOnly"): return (True, member.location) - (contains, location) = typeContainsChromeOnlyDictionaryMember(member.type) + (contains, location) = typeContainsChromeOnlyDictionaryMember( + member.type + ) if contains: return (True, location) return (False, None) IDLInterfaceMember.validate(self) - if (self.getExtendedAttribute("Cached") or - self.getExtendedAttribute("StoreInSlot")): + if self.getExtendedAttribute("Cached") or self.getExtendedAttribute( + "StoreInSlot" + ): if not self.affects == "Nothing": - raise WebIDLError("Cached attributes and attributes stored in " - "slots must be Constant or Pure or " - "Affects=Nothing, since the getter won't always " - "be called.", - [self.location]) + raise WebIDLError( + "Cached attributes and attributes stored in " + "slots must be Constant or Pure or " + "Affects=Nothing, since the getter won't always " + "be called.", + [self.location], + ) (contains, location) = typeContainsChromeOnlyDictionaryMember(self.type) if contains: - raise WebIDLError("[Cached] and [StoreInSlot] must not be used " - "on an attribute whose type contains a " - "[ChromeOnly] dictionary member", - [self.location, location]) + raise WebIDLError( + "[Cached] and [StoreInSlot] must not be used " + "on an attribute whose type contains a " + "[ChromeOnly] dictionary member", + [self.location, location], + ) if self.getExtendedAttribute("Frozen"): - if (not self.type.isSequence() and not self.type.isDictionary() and - not self.type.isRecord()): - raise WebIDLError("[Frozen] is only allowed on " - "sequence-valued, dictionary-valued, and " - "record-valued attributes", - [self.location]) + if ( + not self.type.isSequence() + and not self.type.isDictionary() + and not self.type.isRecord() + ): + raise WebIDLError( + "[Frozen] is only allowed on " + "sequence-valued, dictionary-valued, and " + "record-valued attributes", + [self.location], + ) if not self.type.unroll().isExposedInAllOf(self.exposureSet): - raise WebIDLError("Attribute returns a type that is not exposed " - "everywhere where the attribute is exposed", - [self.location]) + raise WebIDLError( + "Attribute returns a type that is not exposed " + "everywhere where the attribute is exposed", + [self.location], + ) if self.getExtendedAttribute("CEReactions"): if self.readonly: - raise WebIDLError("[CEReactions] is not allowed on " - "readonly attributes", - [self.location]) + raise WebIDLError( + "[CEReactions] is not allowed on " "readonly attributes", + [self.location], + ) def handleExtendedAttribute(self, attr): identifier = attr.identifier() - if ((identifier == "SetterThrows" or identifier == "SetterCanOOM" or - identifier == "SetterNeedsSubjectPrincipal") - and self.readonly): - raise WebIDLError("Readonly attributes must not be flagged as " - "[%s]" % identifier, - [self.location]) + if ( + identifier == "SetterThrows" + or identifier == "SetterCanOOM" + or identifier == "SetterNeedsSubjectPrincipal" + ) and self.readonly: + raise WebIDLError( + "Readonly attributes must not be flagged as " "[%s]" % identifier, + [self.location], + ) elif identifier == "BindingAlias": if not attr.hasValue(): - raise WebIDLError("[BindingAlias] takes an identifier or string", - [attr.location]) + raise WebIDLError( + "[BindingAlias] takes an identifier or string", [attr.location] + ) self._addBindingAlias(attr.value()) - elif (((identifier == "Throws" or identifier == "GetterThrows" or - identifier == "CanOOM" or identifier == "GetterCanOOM") and - self.getExtendedAttribute("StoreInSlot")) or - (identifier == "StoreInSlot" and - (self.getExtendedAttribute("Throws") or - self.getExtendedAttribute("GetterThrows") or - self.getExtendedAttribute("CanOOM") or - self.getExtendedAttribute("GetterCanOOM")))): - raise WebIDLError("Throwing things can't be [StoreInSlot]", - [attr.location]) + elif ( + ( + identifier == "Throws" + or identifier == "GetterThrows" + or identifier == "CanOOM" + or identifier == "GetterCanOOM" + ) + and self.getExtendedAttribute("StoreInSlot") + ) or ( + identifier == "StoreInSlot" + and ( + self.getExtendedAttribute("Throws") + or self.getExtendedAttribute("GetterThrows") + or self.getExtendedAttribute("CanOOM") + or self.getExtendedAttribute("GetterCanOOM") + ) + ): + raise WebIDLError("Throwing things can't be [StoreInSlot]", [attr.location]) elif identifier == "LenientThis": if not attr.noArguments(): - raise WebIDLError("[LenientThis] must take no arguments", - [attr.location]) + raise WebIDLError( + "[LenientThis] must take no arguments", [attr.location] + ) if self.isStatic(): - raise WebIDLError("[LenientThis] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[LenientThis] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("CrossOriginReadable"): - raise WebIDLError("[LenientThis] is not allowed in combination " - "with [CrossOriginReadable]", - [attr.location, self.location]) + raise WebIDLError( + "[LenientThis] is not allowed in combination " + "with [CrossOriginReadable]", + [attr.location, self.location], + ) if self.getExtendedAttribute("CrossOriginWritable"): - raise WebIDLError("[LenientThis] is not allowed in combination " - "with [CrossOriginWritable]", - [attr.location, self.location]) + raise WebIDLError( + "[LenientThis] is not allowed in combination " + "with [CrossOriginWritable]", + [attr.location, self.location], + ) self.lenientThis = True elif identifier == "Unforgeable": if self.isStatic(): - raise WebIDLError("[Unforgeable] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[Unforgeable] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) self._unforgeable = True elif identifier == "SameObject" and not self.readonly: - raise WebIDLError("[SameObject] only allowed on readonly attributes", - [attr.location, self.location]) + raise WebIDLError( + "[SameObject] only allowed on readonly attributes", + [attr.location, self.location], + ) elif identifier == "Constant" and not self.readonly: - raise WebIDLError("[Constant] only allowed on readonly attributes", - [attr.location, self.location]) + raise WebIDLError( + "[Constant] only allowed on readonly attributes", + [attr.location, self.location], + ) elif identifier == "PutForwards": if not self.readonly: - raise WebIDLError("[PutForwards] is only allowed on readonly " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] is only allowed on readonly " "attributes", + [attr.location, self.location], + ) if self.type.isPromise(): - raise WebIDLError("[PutForwards] is not allowed on " - "Promise-typed attributes", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] is not allowed on " "Promise-typed attributes", + [attr.location, self.location], + ) if self.isStatic(): - raise WebIDLError("[PutForwards] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("Replaceable") is not None: - raise WebIDLError("[PutForwards] and [Replaceable] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] and [Replaceable] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) if not attr.hasValue(): - raise WebIDLError("[PutForwards] takes an identifier", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] takes an identifier", [attr.location, self.location] + ) elif identifier == "Replaceable": if not attr.noArguments(): - raise WebIDLError("[Replaceable] must take no arguments", - [attr.location]) + raise WebIDLError( + "[Replaceable] must take no arguments", [attr.location] + ) if not self.readonly: - raise WebIDLError("[Replaceable] is only allowed on readonly " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[Replaceable] is only allowed on readonly " "attributes", + [attr.location, self.location], + ) if self.type.isPromise(): - raise WebIDLError("[Replaceable] is not allowed on " - "Promise-typed attributes", - [attr.location, self.location]) + raise WebIDLError( + "[Replaceable] is not allowed on " "Promise-typed attributes", + [attr.location, self.location], + ) if self.isStatic(): - raise WebIDLError("[Replaceable] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[Replaceable] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("PutForwards") is not None: - raise WebIDLError("[PutForwards] and [Replaceable] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[PutForwards] and [Replaceable] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) elif identifier == "LenientSetter": if not attr.noArguments(): - raise WebIDLError("[LenientSetter] must take no arguments", - [attr.location]) + raise WebIDLError( + "[LenientSetter] must take no arguments", [attr.location] + ) if not self.readonly: - raise WebIDLError("[LenientSetter] is only allowed on readonly " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[LenientSetter] is only allowed on readonly " "attributes", + [attr.location, self.location], + ) if self.type.isPromise(): - raise WebIDLError("[LenientSetter] is not allowed on " - "Promise-typed attributes", - [attr.location, self.location]) + raise WebIDLError( + "[LenientSetter] is not allowed on " "Promise-typed attributes", + [attr.location, self.location], + ) if self.isStatic(): - raise WebIDLError("[LenientSetter] is only allowed on non-static " - "attributes", [attr.location, self.location]) + raise WebIDLError( + "[LenientSetter] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) if self.getExtendedAttribute("PutForwards") is not None: - raise WebIDLError("[LenientSetter] and [PutForwards] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[LenientSetter] and [PutForwards] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) if self.getExtendedAttribute("Replaceable") is not None: - raise WebIDLError("[LenientSetter] and [Replaceable] can't both " - "appear on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[LenientSetter] and [Replaceable] can't both " + "appear on the same attribute", + [attr.location, self.location], + ) elif identifier == "LenientFloat": if self.readonly: - raise WebIDLError("[LenientFloat] used on a readonly attribute", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on a readonly attribute", + [attr.location, self.location], + ) if not self.type.includesRestrictedFloat(): - raise WebIDLError("[LenientFloat] used on an attribute with a " - "non-restricted-float type", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on an attribute with a " + "non-restricted-float type", + [attr.location, self.location], + ) elif identifier == "StoreInSlot": if self.getExtendedAttribute("Cached"): - raise WebIDLError("[StoreInSlot] and [Cached] must not be " - "specified on the same attribute", - [attr.location, self.location]) + raise WebIDLError( + "[StoreInSlot] and [Cached] must not be " + "specified on the same attribute", + [attr.location, self.location], + ) elif identifier == "Cached": if self.getExtendedAttribute("StoreInSlot"): - raise WebIDLError("[Cached] and [StoreInSlot] must not be " - "specified on the same attribute", - [attr.location, self.location]) - elif (identifier == "CrossOriginReadable" or - identifier == "CrossOriginWritable"): + raise WebIDLError( + "[Cached] and [StoreInSlot] must not be " + "specified on the same attribute", + [attr.location, self.location], + ) + elif identifier == "CrossOriginReadable" or identifier == "CrossOriginWritable": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) if self.isStatic(): - raise WebIDLError("[%s] is only allowed on non-static " - "attributes" % identifier, - [attr.location, self.location]) + raise WebIDLError( + "[%s] is only allowed on non-static " "attributes" % identifier, + [attr.location, self.location], + ) if self.getExtendedAttribute("LenientThis"): - raise WebIDLError("[LenientThis] is not allowed in combination " - "with [%s]" % identifier, - [attr.location, self.location]) + raise WebIDLError( + "[LenientThis] is not allowed in combination " + "with [%s]" % identifier, + [attr.location, self.location], + ) elif identifier == "Exposed": convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) elif identifier == "Pure": if not attr.noArguments(): - raise WebIDLError("[Pure] must take no arguments", - [attr.location]) + raise WebIDLError("[Pure] must take no arguments", [attr.location]) self._setDependsOn("DOMState") self._setAffects("Nothing") elif identifier == "Constant" or identifier == "SameObject": if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) self._setDependsOn("Nothing") self._setAffects("Nothing") elif identifier == "Affects": if not attr.hasValue(): - raise WebIDLError("[Affects] takes an identifier", - [attr.location]) + raise WebIDLError("[Affects] takes an identifier", [attr.location]) self._setAffects(attr.value()) elif identifier == "DependsOn": if not attr.hasValue(): - raise WebIDLError("[DependsOn] takes an identifier", - [attr.location]) - if (attr.value() != "Everything" and attr.value() != "DOMState" and - not self.readonly): - raise WebIDLError("[DependsOn=%s] only allowed on " - "readonly attributes" % attr.value(), - [attr.location, self.location]) + raise WebIDLError("[DependsOn] takes an identifier", [attr.location]) + if ( + attr.value() != "Everything" + and attr.value() != "DOMState" + and not self.readonly + ): + raise WebIDLError( + "[DependsOn=%s] only allowed on " + "readonly attributes" % attr.value(), + [attr.location, self.location], + ) self._setDependsOn(attr.value()) elif identifier == "UseCounter": if self.stringifier: - raise WebIDLError("[UseCounter] must not be used on a " - "stringifier attribute", - [attr.location, self.location]) + raise WebIDLError( + "[UseCounter] must not be used on a " "stringifier attribute", + [attr.location, self.location], + ) elif identifier == "Unscopable": if not attr.noArguments(): - raise WebIDLError("[Unscopable] must take no arguments", - [attr.location]) + raise WebIDLError( + "[Unscopable] must take no arguments", [attr.location] + ) if self.isStatic(): - raise WebIDLError("[Unscopable] is only allowed on non-static " - "attributes and operations", - [attr.location, self.location]) + raise WebIDLError( + "[Unscopable] is only allowed on non-static " + "attributes and operations", + [attr.location, self.location], + ) elif identifier == "CEReactions": if not attr.noArguments(): - raise WebIDLError("[CEReactions] must take no arguments", - [attr.location]) - elif (identifier == "Pref" or - identifier == "Deprecated" or - identifier == "SetterThrows" or - identifier == "Throws" or - identifier == "GetterThrows" or - identifier == "SetterCanOOM" or - identifier == "CanOOM" or - identifier == "GetterCanOOM" or - identifier == "ChromeOnly" or - identifier == "Func" or - identifier == "SecureContext" or - identifier == "Frozen" or - identifier == "NewObject" or - identifier == "NeedsSubjectPrincipal" or - identifier == "SetterNeedsSubjectPrincipal" or - identifier == "GetterNeedsSubjectPrincipal" or - identifier == "NeedsCallerType" or - identifier == "ReturnValueNeedsContainsHack" or - identifier == "BinaryName" or - identifier == "NonEnumerable"): + raise WebIDLError( + "[CEReactions] must take no arguments", [attr.location] + ) + elif ( + identifier == "Pref" + or identifier == "Deprecated" + or identifier == "SetterThrows" + or identifier == "Throws" + or identifier == "GetterThrows" + or identifier == "SetterCanOOM" + or identifier == "CanOOM" + or identifier == "GetterCanOOM" + or identifier == "ChromeOnly" + or identifier == "Func" + or identifier == "SecureContext" + or identifier == "Frozen" + or identifier == "NewObject" + or identifier == "NeedsSubjectPrincipal" + or identifier == "SetterNeedsSubjectPrincipal" + or identifier == "GetterNeedsSubjectPrincipal" + or identifier == "NeedsCallerType" + or identifier == "ReturnValueNeedsContainsHack" + or identifier == "BinaryName" + or identifier == "NonEnumerable" + ): # Known attributes that we don't need to do anything with here pass else: - raise WebIDLError("Unknown extended attribute %s on attribute" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on attribute" % identifier, + [attr.location], + ) IDLInterfaceMember.handleExtendedAttribute(self, attr) def resolve(self, parentScope): @@ -4780,15 +5542,22 @@ def _getDependentObjects(self): def expand(self, members): assert self.stringifier if not self.type.isDOMString() and not self.type.isUSVString(): - raise WebIDLError("The type of a stringifer attribute must be " - "either DOMString or USVString", - [self.location]) - identifier = IDLUnresolvedIdentifier(self.location, "__stringifier", - allowDoubleUnderscore=True) - method = IDLMethod(self.location, - identifier, - returnType=self.type, arguments=[], - stringifier=True, underlyingAttr=self) + raise WebIDLError( + "The type of a stringifer attribute must be " + "either DOMString or USVString", + [self.location], + ) + identifier = IDLUnresolvedIdentifier( + self.location, "__stringifier", allowDoubleUnderscore=True + ) + method = IDLMethod( + self.location, + identifier, + returnType=self.type, + arguments=[], + stringifier=True, + underlyingAttr=self, + ) allowedExtAttrs = ["Throws", "NeedsSubjectPrincipal", "Pure"] # Safe to ignore these as they are only meaningful for attributes attributeOnlyExtAttrs = [ @@ -4799,21 +5568,37 @@ def expand(self, members): for (key, value) in self._extendedAttrDict.items(): if key in allowedExtAttrs: if value is not True: - raise WebIDLError("[%s] with a value is currently " - "unsupported in stringifier attributes, " - "please file a bug to add support" % key, - [self.location]) - method.addExtendedAttributes([IDLExtendedAttribute(self.location, (key,))]) + raise WebIDLError( + "[%s] with a value is currently " + "unsupported in stringifier attributes, " + "please file a bug to add support" % key, + [self.location], + ) + method.addExtendedAttributes( + [IDLExtendedAttribute(self.location, (key,))] + ) elif not key in attributeOnlyExtAttrs: - raise WebIDLError("[%s] is currently unsupported in " - "stringifier attributes, please file a bug " - "to add support" % key, - [self.location]) + raise WebIDLError( + "[%s] is currently unsupported in " + "stringifier attributes, please file a bug " + "to add support" % key, + [self.location], + ) members.append(method) class IDLArgument(IDLObjectWithIdentifier): - def __init__(self, location, identifier, type, optional=False, defaultValue=None, variadic=False, dictionaryMember=False, allowTypeAttributes=False): + def __init__( + self, + location, + identifier, + type, + optional=False, + defaultValue=None, + variadic=False, + dictionaryMember=False, + allowTypeAttributes=False, + ): IDLObjectWithIdentifier.__init__(self, location, None, identifier) assert isinstance(type, IDLType) @@ -4834,24 +5619,36 @@ def __init__(self, location, identifier, type, optional=False, defaultValue=None def addExtendedAttributes(self, attrs): for attribute in attrs: identifier = attribute.identifier() - if self.allowTypeAttributes and (identifier == "EnforceRange" or identifier == "Clamp" or - identifier == "TreatNullAs" or identifier == "AllowShared"): + if self.allowTypeAttributes and ( + identifier == "EnforceRange" + or identifier == "Clamp" + or identifier == "TreatNullAs" + or identifier == "AllowShared" + ): self.type = self.type.withExtendedAttributes([attribute]) elif identifier == "TreatNonCallableAsNull": self._allowTreatNonCallableAsNull = True - elif (self.dictionaryMember and - (identifier == "ChromeOnly" or - identifier == "Func" or - identifier == "Pref")): + elif self.dictionaryMember and ( + identifier == "ChromeOnly" + or identifier == "Func" + or identifier == "Pref" + ): if not self.optional: - raise WebIDLError("[%s] must not be used on a required " - "dictionary member" % identifier, - [attribute.location]) + raise WebIDLError( + "[%s] must not be used on a required " + "dictionary member" % identifier, + [attribute.location], + ) else: - raise WebIDLError("Unhandled extended attribute on %s" % - ("a dictionary member" if self.dictionaryMember else - "an argument"), - [attribute.location]) + raise WebIDLError( + "Unhandled extended attribute on %s" + % ( + "a dictionary member" + if self.dictionaryMember + else "an argument" + ), + [attribute.location], + ) attrlist = attribute.listValue() self._extendedAttrDict[identifier] = attrlist if len(attrlist) else True @@ -4875,8 +5672,9 @@ def complete(self, scope): self.type = type if self.type.isAny(): - assert (self.defaultValue is None or - isinstance(self.defaultValue, IDLNullValue)) + assert self.defaultValue is None or isinstance( + self.defaultValue, IDLNullValue + ) # optional 'any' values always have a default value if self.optional and not self.defaultValue and not self.variadic: # Set the default value to undefined, for simplicity, so the @@ -4884,12 +5682,13 @@ def complete(self, scope): self.defaultValue = IDLUndefinedValue(self.location) if self.dictionaryMember and self.type.treatNullAsEmpty: - raise WebIDLError("Dictionary members cannot be [TreatNullAs]", [self.location]) + raise WebIDLError( + "Dictionary members cannot be [TreatNullAs]", [self.location] + ) # Now do the coercing thing; this needs to happen after the # above creation of a default value. if self.defaultValue: - self.defaultValue = self.defaultValue.coerceToType(self.type, - self.location) + self.defaultValue = self.defaultValue.coerceToType(self.type, self.location) assert self.defaultValue def allowTreatNonCallableAsNull(self): @@ -4906,7 +5705,9 @@ def canHaveMissingValue(self): class IDLCallback(IDLObjectWithScope): - def __init__(self, location, parentScope, identifier, returnType, arguments, isConstructor): + def __init__( + self, location, parentScope, identifier, returnType, arguments, isConstructor + ): assert isinstance(returnType, IDLType) self._returnType = returnType @@ -4963,20 +5764,27 @@ def addExtendedAttributes(self, attrs): self._treatNonCallableAsNull = True elif attr.identifier() == "TreatNonObjectAsNull": if self._isConstructor: - raise WebIDLError("[TreatNonObjectAsNull] is not supported " - "on constructors", [self.location]) + raise WebIDLError( + "[TreatNonObjectAsNull] is not supported " "on constructors", + [self.location], + ) self._treatNonObjectAsNull = True elif attr.identifier() == "MOZ_CAN_RUN_SCRIPT_BOUNDARY": if self._isConstructor: - raise WebIDLError("[MOZ_CAN_RUN_SCRIPT_BOUNDARY] is not " - "permitted on constructors", - [self.location]) + raise WebIDLError( + "[MOZ_CAN_RUN_SCRIPT_BOUNDARY] is not " + "permitted on constructors", + [self.location], + ) self._isRunScriptBoundary = True else: unhandledAttrs.append(attr) if self._treatNonCallableAsNull and self._treatNonObjectAsNull: - raise WebIDLError("Cannot specify both [TreatNonCallableAsNull] " - "and [TreatNonObjectAsNull]", [self.location]) + raise WebIDLError( + "Cannot specify both [TreatNonCallableAsNull] " + "and [TreatNonObjectAsNull]", + [self.location], + ) if len(unhandledAttrs) != 0: IDLType.addExtendedAttributes(self, unhandledAttrs) @@ -4984,7 +5792,7 @@ def _getDependentObjects(self): return set([self._returnType] + self._arguments) def isRunScriptBoundary(self): - return self._isRunScriptBoundary; + return self._isRunScriptBoundary class IDLCallbackType(IDLType): @@ -5004,8 +5812,13 @@ def isDistinguishableFrom(self, other): if other.isUnion(): # Just forward to the union; it'll deal return other.isDistinguishableFrom(self) - return (other.isPrimitive() or other.isString() or other.isEnum() or - other.isNonCallbackInterface() or other.isSequence()) + return ( + other.isPrimitive() + or other.isString() + or other.isEnum() + or other.isNonCallbackInterface() + or other.isSequence() + ) def _getDependentObjects(self): return self.callback._getDependentObjects() @@ -5020,6 +5833,7 @@ class IDLMethodOverload: IDLMethodOverload for each one, all hanging off an IDLMethod representing the full set of overloads. """ + def __init__(self, returnType, arguments, location): self.returnType = returnType # Clone the list of arguments, just in case @@ -5038,28 +5852,31 @@ def includesRestrictedFloatArgument(self): class IDLMethod(IDLInterfaceMember, IDLScope): Special = enum( - 'Getter', - 'Setter', - 'Deleter', - 'LegacyCaller', - base=IDLInterfaceMember.Special + "Getter", "Setter", "Deleter", "LegacyCaller", base=IDLInterfaceMember.Special ) - NamedOrIndexed = enum( - 'Neither', - 'Named', - 'Indexed' - ) - - def __init__(self, location, identifier, returnType, arguments, - static=False, getter=False, setter=False, - deleter=False, specialType=NamedOrIndexed.Neither, - legacycaller=False, stringifier=False, - maplikeOrSetlikeOrIterable=None, - underlyingAttr=None): + NamedOrIndexed = enum("Neither", "Named", "Indexed") + + def __init__( + self, + location, + identifier, + returnType, + arguments, + static=False, + getter=False, + setter=False, + deleter=False, + specialType=NamedOrIndexed.Neither, + legacycaller=False, + stringifier=False, + maplikeOrSetlikeOrIterable=None, + underlyingAttr=None, + ): # REVIEW: specialType is NamedOrIndexed -- wow, this is messed up. - IDLInterfaceMember.__init__(self, location, identifier, - IDLInterfaceMember.Tags.Method) + IDLInterfaceMember.__init__( + self, location, identifier, IDLInterfaceMember.Tags.Method + ) self._hasOverloads = False @@ -5080,7 +5897,9 @@ def __init__(self, location, identifier, returnType, arguments, self._legacycaller = legacycaller assert isinstance(stringifier, bool) self._stringifier = stringifier - assert maplikeOrSetlikeOrIterable is None or isinstance(maplikeOrSetlikeOrIterable, IDLMaplikeOrSetlikeOrIterableBase) + assert maplikeOrSetlikeOrIterable is None or isinstance( + maplikeOrSetlikeOrIterable, IDLMaplikeOrSetlikeOrIterableBase + ) self.maplikeOrSetlikeOrIterable = maplikeOrSetlikeOrIterable self._htmlConstructor = False self.underlyingAttr = underlyingAttr @@ -5091,8 +5910,10 @@ def __init__(self, location, identifier, returnType, arguments, self.aliases = [] if static and identifier.name == "prototype": - raise WebIDLError("The identifier of a static operation must not be 'prototype'", - [location]) + raise WebIDLError( + "The identifier of a static operation must not be 'prototype'", + [location], + ) self.assertSignatureConstraints() @@ -5105,8 +5926,10 @@ def assertSignatureConstraints(self): overload = self._overloads[0] arguments = overload.arguments assert len(arguments) == 1 - assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or - arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]) + assert ( + arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] + or arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long] + ) assert not arguments[0].optional and not arguments[0].variadic assert not self._getter or not overload.returnType.isVoid() @@ -5114,8 +5937,10 @@ def assertSignatureConstraints(self): assert len(self._overloads) == 1 arguments = self._overloads[0].arguments assert len(arguments) == 2 - assert (arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] or - arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]) + assert ( + arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.domstring] + or arguments[0].type == BuiltinTypes[IDLBuiltinType.Types.unsigned_long] + ) assert not arguments[0].optional and not arguments[0].variadic assert not arguments[1].optional and not arguments[1].variadic @@ -5124,7 +5949,9 @@ def assertSignatureConstraints(self): overload = self._overloads[0] assert len(overload.arguments) == 0 if not self.underlyingAttr: - assert overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring] + assert ( + overload.returnType == BuiltinTypes[IDLBuiltinType.Types.domstring] + ) def isStatic(self): return self._static @@ -5142,13 +5969,17 @@ def isDeleter(self): return self._deleter def isNamed(self): - assert (self._specialType == IDLMethod.NamedOrIndexed.Named or - self._specialType == IDLMethod.NamedOrIndexed.Indexed) + assert ( + self._specialType == IDLMethod.NamedOrIndexed.Named + or self._specialType == IDLMethod.NamedOrIndexed.Indexed + ) return self._specialType == IDLMethod.NamedOrIndexed.Named def isIndexed(self): - assert (self._specialType == IDLMethod.NamedOrIndexed.Named or - self._specialType == IDLMethod.NamedOrIndexed.Indexed) + assert ( + self._specialType == IDLMethod.NamedOrIndexed.Named + or self._specialType == IDLMethod.NamedOrIndexed.Indexed + ) return self._specialType == IDLMethod.NamedOrIndexed.Indexed def isLegacycaller(self): @@ -5171,11 +6002,13 @@ def isMaplikeOrSetlikeOrIterableMethod(self): return self.maplikeOrSetlikeOrIterable is not None def isSpecial(self): - return (self.isGetter() or - self.isSetter() or - self.isDeleter() or - self.isLegacycaller() or - self.isStringifier()) + return ( + self.isGetter() + or self.isSetter() + or self.isDeleter() + or self.isLegacycaller() + or self.isStringifier() + ) def isHTMLConstructor(self): return self._htmlConstructor @@ -5191,8 +6024,10 @@ def isIdentifierLess(self): implemented interfaces, so while these functions use what is considered an non-identifier name, they actually DO have an identifier. """ - return (self.identifier.name[:2] == "__" and - not self.isMaplikeOrSetlikeOrIterableMethod()) + return ( + self.identifier.name[:2] == "__" + and not self.isMaplikeOrSetlikeOrIterableMethod() + ) def resolve(self, parentScope): assert isinstance(parentScope, IDLScope) @@ -5206,36 +6041,52 @@ def addOverload(self, method): assert len(method._overloads) == 1 if self._extendedAttrDict != method._extendedAttrDict: - extendedAttrDiff = set(self._extendedAttrDict.keys()) ^ set(method._extendedAttrDict.keys()) + extendedAttrDiff = set(self._extendedAttrDict.keys()) ^ set( + method._extendedAttrDict.keys() + ) - if extendedAttrDiff == { "LenientFloat" }: + if extendedAttrDiff == {"LenientFloat"}: if "LenientFloat" not in self._extendedAttrDict: for overload in self._overloads: if overload.includesRestrictedFloatArgument(): - raise WebIDLError("Restricted float behavior differs on different " - "overloads of %s" % method.identifier, - [overload.location, method.location]) - self._extendedAttrDict["LenientFloat"] = method._extendedAttrDict["LenientFloat"] + raise WebIDLError( + "Restricted float behavior differs on different " + "overloads of %s" % method.identifier, + [overload.location, method.location], + ) + self._extendedAttrDict["LenientFloat"] = method._extendedAttrDict[ + "LenientFloat" + ] elif method._overloads[0].includesRestrictedFloatArgument(): - raise WebIDLError("Restricted float behavior differs on different " - "overloads of %s" % method.identifier, - [self.location, method.location]) + raise WebIDLError( + "Restricted float behavior differs on different " + "overloads of %s" % method.identifier, + [self.location, method.location], + ) else: - raise WebIDLError("Extended attributes differ on different " - "overloads of %s" % method.identifier, - [self.location, method.location]) + raise WebIDLError( + "Extended attributes differ on different " + "overloads of %s" % method.identifier, + [self.location, method.location], + ) self._overloads.extend(method._overloads) self._hasOverloads = True if self.isStatic() != method.isStatic(): - raise WebIDLError("Overloaded identifier %s appears with different values of the 'static' attribute" % method.identifier, - [method.location]) + raise WebIDLError( + "Overloaded identifier %s appears with different values of the 'static' attribute" + % method.identifier, + [method.location], + ) if self.isLegacycaller() != method.isLegacycaller(): - raise WebIDLError("Overloaded identifier %s appears with different values of the 'legacycaller' attribute" % method.identifier, - [method.location]) + raise WebIDLError( + "Overloaded identifier %s appears with different values of the 'legacycaller' attribute" + % method.identifier, + [method.location], + ) # Can't overload special things! assert not self.isGetter() @@ -5252,8 +6103,9 @@ def addOverload(self, method): return self def signatures(self): - return [(overload.returnType, overload.arguments) for overload in - self._overloads] + return [ + (overload.returnType, overload.arguments) for overload in self._overloads + ] def finish(self, scope): IDLInterfaceMember.finish(self, scope) @@ -5275,8 +6127,11 @@ def finish(self, scope): # Now compute various information that will be used by the # WebIDL overload resolution algorithm. self.maxArgCount = max(len(s[1]) for s in self.signatures()) - self.allowedArgCounts = [i for i in range(self.maxArgCount+1) - if len(self.signaturesForArgCount(i)) != 0] + self.allowedArgCounts = [ + i + for i in range(self.maxArgCount + 1) + if len(self.signaturesForArgCount(i)) != 0 + ] def validate(self): IDLInterfaceMember.validate(self) @@ -5295,19 +6150,26 @@ def validate(self): raise WebIDLError( "Signatures for method '%s' with %d arguments have " "different types of arguments at index %d, which " - "is before distinguishing index %d" % - (self.identifier.name, argCount, idx, - distinguishingIndex), - [self.location, overload.location]) + "is before distinguishing index %d" + % ( + self.identifier.name, + argCount, + idx, + distinguishingIndex, + ), + [self.location, overload.location], + ) overloadWithPromiseReturnType = None overloadWithoutPromiseReturnType = None for overload in self._overloads: returnType = overload.returnType if not returnType.unroll().isExposedInAllOf(self.exposureSet): - raise WebIDLError("Overload returns a type that is not exposed " - "everywhere where the method is exposed", - [overload.location]) + raise WebIDLError( + "Overload returns a type that is not exposed " + "everywhere where the method is exposed", + [overload.location], + ) variadicArgument = None @@ -5315,46 +6177,62 @@ def validate(self): for (idx, argument) in enumerate(arguments): assert argument.type.isComplete() - if ((argument.type.isDictionary() and - argument.type.unroll().inner.canBeEmpty()) or - (argument.type.isUnion() and - argument.type.unroll().hasPossiblyEmptyDictionaryType())): + if ( + argument.type.isDictionary() + and argument.type.unroll().inner.canBeEmpty() + ) or ( + argument.type.isUnion() + and argument.type.unroll().hasPossiblyEmptyDictionaryType() + ): # Optional dictionaries and unions containing optional # dictionaries at the end of the list or followed by # optional arguments must be optional. - if (not argument.optional and - all(arg.optional for arg in arguments[idx+1:])): - raise WebIDLError("Dictionary argument without any " - "required fields or union argument " - "containing such dictionary not " - "followed by a required argument " - "must be optional", - [argument.location]) - - if (not argument.defaultValue and - all(arg.optional for arg in arguments[idx+1:])): - raise WebIDLError("Dictionary argument without any " - "required fields or union argument " - "containing such dictionary not " - "followed by a required argument " - "must have a default value", - [argument.location]) + if not argument.optional and all( + arg.optional for arg in arguments[idx + 1 :] + ): + raise WebIDLError( + "Dictionary argument without any " + "required fields or union argument " + "containing such dictionary not " + "followed by a required argument " + "must be optional", + [argument.location], + ) + + if not argument.defaultValue and all( + arg.optional for arg in arguments[idx + 1 :] + ): + raise WebIDLError( + "Dictionary argument without any " + "required fields or union argument " + "containing such dictionary not " + "followed by a required argument " + "must have a default value", + [argument.location], + ) # An argument cannot be a nullable dictionary or a # nullable union containing a dictionary. - if (argument.type.nullable() and - (argument.type.isDictionary() or - (argument.type.isUnion() and - argument.type.unroll().hasDictionaryType()))): - raise WebIDLError("An argument cannot be a nullable " - "dictionary or nullable union " - "containing a dictionary", - [argument.location]) + if argument.type.nullable() and ( + argument.type.isDictionary() + or ( + argument.type.isUnion() + and argument.type.unroll().hasDictionaryType() + ) + ): + raise WebIDLError( + "An argument cannot be a nullable " + "dictionary or nullable union " + "containing a dictionary", + [argument.location], + ) # Only the last argument can be variadic if variadicArgument: - raise WebIDLError("Variadic argument is not last argument", - [variadicArgument.location]) + raise WebIDLError( + "Variadic argument is not last argument", + [variadicArgument.location], + ) if argument.variadic: variadicArgument = argument @@ -5365,47 +6243,64 @@ def validate(self): # Make sure either all our overloads return Promises or none do if overloadWithPromiseReturnType and overloadWithoutPromiseReturnType: - raise WebIDLError("We have overloads with both Promise and " - "non-Promise return types", - [overloadWithPromiseReturnType.location, - overloadWithoutPromiseReturnType.location]) + raise WebIDLError( + "We have overloads with both Promise and " "non-Promise return types", + [ + overloadWithPromiseReturnType.location, + overloadWithoutPromiseReturnType.location, + ], + ) if overloadWithPromiseReturnType and self._legacycaller: - raise WebIDLError("May not have a Promise return type for a " - "legacycaller.", - [overloadWithPromiseReturnType.location]) + raise WebIDLError( + "May not have a Promise return type for a " "legacycaller.", + [overloadWithPromiseReturnType.location], + ) - if self.getExtendedAttribute("StaticClassOverride") and not \ - (self.identifier.scope.isJSImplemented() and self.isStatic()): - raise WebIDLError("StaticClassOverride can be applied to static" - " methods on JS-implemented classes only.", - [self.location]) + if self.getExtendedAttribute("StaticClassOverride") and not ( + self.identifier.scope.isJSImplemented() and self.isStatic() + ): + raise WebIDLError( + "StaticClassOverride can be applied to static" + " methods on JS-implemented classes only.", + [self.location], + ) # Ensure that toJSON methods satisfy the spec constraints on them. if self.identifier.name == "toJSON": if len(self.signatures()) != 1: - raise WebIDLError("toJSON method has multiple overloads", - [self._overloads[0].location, - self._overloads[1].location]) + raise WebIDLError( + "toJSON method has multiple overloads", + [self._overloads[0].location, self._overloads[1].location], + ) if len(self.signatures()[0][1]) != 0: - raise WebIDLError("toJSON method has arguments", - [self.location]) + raise WebIDLError("toJSON method has arguments", [self.location]) if not self.signatures()[0][0].isJSONType(): - raise WebIDLError("toJSON method has non-JSON return type", - [self.location]) + raise WebIDLError( + "toJSON method has non-JSON return type", [self.location] + ) def overloadsForArgCount(self, argc): - return [overload for overload in self._overloads if - len(overload.arguments) == argc or - (len(overload.arguments) > argc and - all(arg.optional for arg in overload.arguments[argc:])) or - (len(overload.arguments) < argc and - len(overload.arguments) > 0 and - overload.arguments[-1].variadic)] + return [ + overload + for overload in self._overloads + if len(overload.arguments) == argc + or ( + len(overload.arguments) > argc + and all(arg.optional for arg in overload.arguments[argc:]) + ) + or ( + len(overload.arguments) < argc + and len(overload.arguments) > 0 + and overload.arguments[-1].variadic + ) + ] def signaturesForArgCount(self, argc): - return [(overload.returnType, overload.arguments) for overload - in self.overloadsForArgCount(argc)] + return [ + (overload.returnType, overload.arguments) + for overload in self.overloadsForArgCount(argc) + ] def locationsForArgCount(self, argc): return [overload.location for overload in self.overloadsForArgCount(argc)] @@ -5413,156 +6308,190 @@ def locationsForArgCount(self, argc): def distinguishingIndexForArgCount(self, argc): def isValidDistinguishingIndex(idx, signatures): for (firstSigIndex, (firstRetval, firstArgs)) in enumerate(signatures[:-1]): - for (secondRetval, secondArgs) in signatures[firstSigIndex+1:]: + for (secondRetval, secondArgs) in signatures[firstSigIndex + 1 :]: if idx < len(firstArgs): firstType = firstArgs[idx].type else: - assert(firstArgs[-1].variadic) + assert firstArgs[-1].variadic firstType = firstArgs[-1].type if idx < len(secondArgs): secondType = secondArgs[idx].type else: - assert(secondArgs[-1].variadic) + assert secondArgs[-1].variadic secondType = secondArgs[-1].type if not firstType.isDistinguishableFrom(secondType): return False return True + signatures = self.signaturesForArgCount(argc) for idx in range(argc): if isValidDistinguishingIndex(idx, signatures): return idx # No valid distinguishing index. Time to throw locations = self.locationsForArgCount(argc) - raise WebIDLError("Signatures with %d arguments for method '%s' are not " - "distinguishable" % (argc, self.identifier.name), - locations) + raise WebIDLError( + "Signatures with %d arguments for method '%s' are not " + "distinguishable" % (argc, self.identifier.name), + locations, + ) def handleExtendedAttribute(self, attr): identifier = attr.identifier() - if (identifier == "GetterThrows" or - identifier == "SetterThrows" or - identifier == "GetterCanOOM" or - identifier == "SetterCanOOM" or - identifier == "SetterNeedsSubjectPrincipal" or - identifier == "GetterNeedsSubjectPrincipal"): - raise WebIDLError("Methods must not be flagged as " - "[%s]" % identifier, - [attr.location, self.location]) + if ( + identifier == "GetterThrows" + or identifier == "SetterThrows" + or identifier == "GetterCanOOM" + or identifier == "SetterCanOOM" + or identifier == "SetterNeedsSubjectPrincipal" + or identifier == "GetterNeedsSubjectPrincipal" + ): + raise WebIDLError( + "Methods must not be flagged as " "[%s]" % identifier, + [attr.location, self.location], + ) elif identifier == "Unforgeable": if self.isStatic(): - raise WebIDLError("[Unforgeable] is only allowed on non-static " - "methods", [attr.location, self.location]) + raise WebIDLError( + "[Unforgeable] is only allowed on non-static " "methods", + [attr.location, self.location], + ) self._unforgeable = True elif identifier == "SameObject": - raise WebIDLError("Methods must not be flagged as [SameObject]", - [attr.location, self.location]) + raise WebIDLError( + "Methods must not be flagged as [SameObject]", + [attr.location, self.location], + ) elif identifier == "Constant": - raise WebIDLError("Methods must not be flagged as [Constant]", - [attr.location, self.location]) + raise WebIDLError( + "Methods must not be flagged as [Constant]", + [attr.location, self.location], + ) elif identifier == "PutForwards": - raise WebIDLError("Only attributes support [PutForwards]", - [attr.location, self.location]) + raise WebIDLError( + "Only attributes support [PutForwards]", [attr.location, self.location] + ) elif identifier == "LenientSetter": - raise WebIDLError("Only attributes support [LenientSetter]", - [attr.location, self.location]) + raise WebIDLError( + "Only attributes support [LenientSetter]", + [attr.location, self.location], + ) elif identifier == "LenientFloat": # This is called before we've done overload resolution overloads = self._overloads assert len(overloads) == 1 if not overloads[0].returnType.isVoid(): - raise WebIDLError("[LenientFloat] used on a non-void method", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on a non-void method", + [attr.location, self.location], + ) if not overloads[0].includesRestrictedFloatArgument(): - raise WebIDLError("[LenientFloat] used on an operation with no " - "restricted float type arguments", - [attr.location, self.location]) + raise WebIDLError( + "[LenientFloat] used on an operation with no " + "restricted float type arguments", + [attr.location, self.location], + ) elif identifier == "Exposed": convertExposedAttrToGlobalNameSet(attr, self._exposureGlobalNames) - elif (identifier == "CrossOriginCallable" or - identifier == "WebGLHandlesContextLoss"): + elif ( + identifier == "CrossOriginCallable" + or identifier == "WebGLHandlesContextLoss" + ): # Known no-argument attributes. if not attr.noArguments(): - raise WebIDLError("[%s] must take no arguments" % identifier, - [attr.location]) + raise WebIDLError( + "[%s] must take no arguments" % identifier, [attr.location] + ) if identifier == "CrossOriginCallable" and self.isStatic(): - raise WebIDLError("[CrossOriginCallable] is only allowed on non-static " - "attributes", - [attr.location, self.location]) + raise WebIDLError( + "[CrossOriginCallable] is only allowed on non-static " "attributes", + [attr.location, self.location], + ) elif identifier == "Pure": if not attr.noArguments(): - raise WebIDLError("[Pure] must take no arguments", - [attr.location]) + raise WebIDLError("[Pure] must take no arguments", [attr.location]) self._setDependsOn("DOMState") self._setAffects("Nothing") elif identifier == "Affects": if not attr.hasValue(): - raise WebIDLError("[Affects] takes an identifier", - [attr.location]) + raise WebIDLError("[Affects] takes an identifier", [attr.location]) self._setAffects(attr.value()) elif identifier == "DependsOn": if not attr.hasValue(): - raise WebIDLError("[DependsOn] takes an identifier", - [attr.location]) + raise WebIDLError("[DependsOn] takes an identifier", [attr.location]) self._setDependsOn(attr.value()) elif identifier == "Alias": if not attr.hasValue(): - raise WebIDLError("[Alias] takes an identifier or string", - [attr.location]) + raise WebIDLError( + "[Alias] takes an identifier or string", [attr.location] + ) self._addAlias(attr.value()) elif identifier == "UseCounter": if self.isSpecial(): - raise WebIDLError("[UseCounter] must not be used on a special " - "operation", - [attr.location, self.location]) + raise WebIDLError( + "[UseCounter] must not be used on a special " "operation", + [attr.location, self.location], + ) elif identifier == "Unscopable": if not attr.noArguments(): - raise WebIDLError("[Unscopable] must take no arguments", - [attr.location]) + raise WebIDLError( + "[Unscopable] must take no arguments", [attr.location] + ) if self.isStatic(): - raise WebIDLError("[Unscopable] is only allowed on non-static " - "attributes and operations", - [attr.location, self.location]) + raise WebIDLError( + "[Unscopable] is only allowed on non-static " + "attributes and operations", + [attr.location, self.location], + ) elif identifier == "CEReactions": if not attr.noArguments(): - raise WebIDLError("[CEReactions] must take no arguments", - [attr.location]) + raise WebIDLError( + "[CEReactions] must take no arguments", [attr.location] + ) if self.isSpecial() and not self.isSetter() and not self.isDeleter(): - raise WebIDLError("[CEReactions] is only allowed on operation, " - "attribute, setter, and deleter", - [attr.location, self.location]) + raise WebIDLError( + "[CEReactions] is only allowed on operation, " + "attribute, setter, and deleter", + [attr.location, self.location], + ) elif identifier == "Default": if not attr.noArguments(): - raise WebIDLError("[Default] must take no arguments", - [attr.location]) + raise WebIDLError("[Default] must take no arguments", [attr.location]) if not self.isToJSON(): - raise WebIDLError("[Default] is only allowed on toJSON operations", - [attr.location, self.location]) + raise WebIDLError( + "[Default] is only allowed on toJSON operations", + [attr.location, self.location], + ) if self.signatures()[0][0] != BuiltinTypes[IDLBuiltinType.Types.object]: - raise WebIDLError("The return type of the default toJSON " - "operation must be 'object'", - [attr.location, self.location]) - elif (identifier == "Throws" or - identifier == "CanOOM" or - identifier == "NewObject" or - identifier == "ChromeOnly" or - identifier == "Pref" or - identifier == "Deprecated" or - identifier == "Func" or - identifier == "SecureContext" or - identifier == "BinaryName" or - identifier == "NeedsSubjectPrincipal" or - identifier == "NeedsCallerType" or - identifier == "StaticClassOverride" or - identifier == "NonEnumerable" or - identifier == "Unexposed"): + raise WebIDLError( + "The return type of the default toJSON " + "operation must be 'object'", + [attr.location, self.location], + ) + elif ( + identifier == "Throws" + or identifier == "CanOOM" + or identifier == "NewObject" + or identifier == "ChromeOnly" + or identifier == "Pref" + or identifier == "Deprecated" + or identifier == "Func" + or identifier == "SecureContext" + or identifier == "BinaryName" + or identifier == "NeedsSubjectPrincipal" + or identifier == "NeedsCallerType" + or identifier == "StaticClassOverride" + or identifier == "NonEnumerable" + or identifier == "Unexposed" + ): # Known attributes that we don't need to do anything with here pass else: - raise WebIDLError("Unknown extended attribute %s on method" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on method" % identifier, [attr.location] + ) IDLInterfaceMember.handleExtendedAttribute(self, attr) def returnsPromise(self): @@ -5596,45 +6525,53 @@ def addExtendedAttributes(self, attrs): def handleExtendedAttribute(self, attr): identifier = attr.identifier() - if (identifier == "BinaryName" or - identifier == "ChromeOnly" or - identifier == "NewObject" or - identifier == "SecureContext" or - identifier == "Throws" or - identifier == "Func" or - identifier == "Pref"): + if ( + identifier == "BinaryName" + or identifier == "ChromeOnly" + or identifier == "NewObject" + or identifier == "SecureContext" + or identifier == "Throws" + or identifier == "Func" + or identifier == "Pref" + ): IDLMethod.handleExtendedAttribute(self, attr) elif identifier == "HTMLConstructor": if not attr.noArguments(): - raise WebIDLError("[HTMLConstructor] must take no arguments", - [attr.location]) + raise WebIDLError( + "[HTMLConstructor] must take no arguments", [attr.location] + ) # We shouldn't end up here for named constructors. - assert(self.identifier.name == "constructor") + assert self.identifier.name == "constructor" if any(len(sig[1]) != 0 for sig in self.signatures()): - raise WebIDLError("[HTMLConstructor] must not be applied to a " - "constructor operation that has arguments.", - [attr.location]) + raise WebIDLError( + "[HTMLConstructor] must not be applied to a " + "constructor operation that has arguments.", + [attr.location], + ) self._htmlConstructor = True else: - raise WebIDLError("Unknown extended attribute %s on method" % identifier, - [attr.location]) + raise WebIDLError( + "Unknown extended attribute %s on method" % identifier, [attr.location] + ) def reallyInit(self, parentInterface): name = self._initName location = self._initLocation identifier = IDLUnresolvedIdentifier(location, name, allowForbidden=True) retType = IDLWrapperType(parentInterface.location, parentInterface) - IDLMethod.__init__(self, location, identifier, retType, self._initArgs, - static=True) - self._inited = True; + IDLMethod.__init__( + self, location, identifier, retType, self._initArgs, static=True + ) + self._inited = True # Propagate through whatever extended attributes we already had self.addExtendedAttributes(self._initExtendedAttrs) self._initExtendedAttrs = [] # Constructors are always NewObject. Whether they throw or not is # indicated by [Throws] annotations in the usual way. self.addExtendedAttributes( - [IDLExtendedAttribute(self.location, ("NewObject",))]) + [IDLExtendedAttribute(self.location, ("NewObject",))] + ) class IDLIncludesStatement(IDLObject): @@ -5648,25 +6585,28 @@ def finish(self, scope): if self._finished: return self._finished = True - assert(isinstance(self.interface, IDLIdentifierPlaceholder)) - assert(isinstance(self.mixin, IDLIdentifierPlaceholder)) + assert isinstance(self.interface, IDLIdentifierPlaceholder) + assert isinstance(self.mixin, IDLIdentifierPlaceholder) interface = self.interface.finish(scope) mixin = self.mixin.finish(scope) # NOTE: we depend on not setting self.interface and # self.mixin here to keep track of the original # locations. if not isinstance(interface, IDLInterface): - raise WebIDLError("Left-hand side of 'includes' is not an " - "interface", - [self.interface.location, interface.location]) + raise WebIDLError( + "Left-hand side of 'includes' is not an " "interface", + [self.interface.location, interface.location], + ) if interface.isCallback(): - raise WebIDLError("Left-hand side of 'includes' is a callback " - "interface", - [self.interface.location, interface.location]) + raise WebIDLError( + "Left-hand side of 'includes' is a callback " "interface", + [self.interface.location, interface.location], + ) if not isinstance(mixin, IDLInterfaceMixin): - raise WebIDLError("Right-hand side of 'includes' is not an " - "interface mixin", - [self.mixin.location, mixin.location]) + raise WebIDLError( + "Right-hand side of 'includes' is not an " "interface mixin", + [self.mixin.location, mixin.location], + ) mixin.actualExposureGlobalNames.update(interface._exposureGlobalNames) @@ -5679,14 +6619,18 @@ def validate(self): def addExtendedAttributes(self, attrs): if len(attrs) != 0: - raise WebIDLError("There are no extended attributes that are " - "allowed on includes statements", - [attrs[0].location, self.location]) + raise WebIDLError( + "There are no extended attributes that are " + "allowed on includes statements", + [attrs[0].location, self.location], + ) + class IDLExtendedAttribute(IDLObject): """ A class to represent IDL extended attributes so we can give them locations """ + def __init__(self, location, tuple): IDLObject.__init__(self, location) self._tuple = tuple @@ -5701,15 +6645,18 @@ def hasValue(self): return len(self._tuple) >= 2 and isinstance(self._tuple[1], str) def value(self): - assert(self.hasValue()) + assert self.hasValue() return self._tuple[1] def hasArgs(self): - return (len(self._tuple) == 2 and isinstance(self._tuple[1], list) or - len(self._tuple) == 3) + return ( + len(self._tuple) == 2 + and isinstance(self._tuple[1], list) + or len(self._tuple) == 3 + ) def args(self): - assert(self.hasArgs()) + assert self.hasArgs() # Our args are our last element return self._tuple[-1] @@ -5719,40 +6666,40 @@ def listValue(self): """ return list(self._tuple)[1:] + # Parser class Tokenizer(object): - tokens = [ - "INTEGER", - "FLOATLITERAL", - "IDENTIFIER", - "STRING", - "WHITESPACE", - "OTHER" - ] + tokens = ["INTEGER", "FLOATLITERAL", "IDENTIFIER", "STRING", "WHITESPACE", "OTHER"] def t_FLOATLITERAL(self, t): - r'(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN' + r"(-?(([0-9]+\.[0-9]*|[0-9]*\.[0-9]+)([Ee][+-]?[0-9]+)?|[0-9]+[Ee][+-]?[0-9]+|Infinity))|NaN" t.value = float(t.value) return t def t_INTEGER(self, t): - r'-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)' + r"-?(0([0-7]+|[Xx][0-9A-Fa-f]+)?|[1-9][0-9]*)" try: # Can't use int(), because that doesn't handle octal properly. t.value = parseInt(t.value) except: - raise WebIDLError("Invalid integer literal", - [Location(lexer=self.lexer, - lineno=self.lexer.lineno, - lexpos=self.lexer.lexpos, - filename=self._filename)]) + raise WebIDLError( + "Invalid integer literal", + [ + Location( + lexer=self.lexer, + lineno=self.lexer.lineno, + lexpos=self.lexer.lexpos, + filename=self._filename, + ) + ], + ) return t def t_IDENTIFIER(self, t): - r'[_-]?[A-Za-z][0-9A-Z_a-z-]*' - t.type = self.keywords.get(t.value, 'IDENTIFIER') + r"[_-]?[A-Za-z][0-9A-Z_a-z-]*" + t.type = self.keywords.get(t.value, "IDENTIFIER") return t def t_STRING(self, t): @@ -5761,17 +6708,17 @@ def t_STRING(self, t): return t def t_WHITESPACE(self, t): - r'[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+' + r"[\t\n\r ]+|[\t\n\r ]*((//[^\n]*|/\*.*?\*/)[\t\n\r ]*)+" pass def t_ELLIPSIS(self, t): - r'\.\.\.' + r"\.\.\." t.type = self.keywords.get(t.value) return t def t_OTHER(self, t): - r'[^\t\n\r 0-9A-Z_a-z]' - t.type = self.keywords.get(t.value, 'OTHER') + r"[^\t\n\r 0-9A-Z_a-z]" + t.type = self.keywords.get(t.value, "OTHER") return t keywords = { @@ -5845,16 +6792,22 @@ def t_OTHER(self, t): "constructor": "CONSTRUCTOR", "symbol": "SYMBOL", "async": "ASYNC", - } + } tokens.extend(keywords.values()) def t_error(self, t): - raise WebIDLError("Unrecognized Input", - [Location(lexer=self.lexer, - lineno=self.lexer.lineno, - lexpos=self.lexer.lexpos, - filename=self.filename)]) + raise WebIDLError( + "Unrecognized Input", + [ + Location( + lexer=self.lexer, + lineno=self.lexer.lineno, + lexpos=self.lexer.lexpos, + filename=self.filename, + ) + ], + ) def __init__(self, outputdir, lexer=None): if lexer: @@ -5879,18 +6832,21 @@ class SqueakyCleanLogger(object): "Symbol 'OtherOrComma' is unreachable", # Which means the Other symbol is unreachable. "Symbol 'Other' is unreachable", - ] + ] def __init__(self): self.errors = [] def debug(self, msg, *args, **kwargs): pass + info = debug def warning(self, msg, *args, **kwargs): - if msg == "%s:%d: Rule %r defined, but not used" or \ - msg == "%s:%d: Rule '%s' defined, but not used": + if ( + msg == "%s:%d: Rule %r defined, but not used" + or msg == "%s:%d: Rule '%s' defined, but not used" + ): # Munge things so we don't have to hardcode filenames and # line numbers in our whitelist. whitelistmsg = "Rule %r defined, but not used" @@ -5900,6 +6856,7 @@ def warning(self, msg, *args, **kwargs): whitelistargs = args if (whitelistmsg % whitelistargs) not in SqueakyCleanLogger.errorWhitelist: self.errors.append(msg % args) + error = warning def reportGrammarErrors(self): @@ -5918,7 +6875,7 @@ def globalScope(self): # It's acceptable to split things at '|' boundaries. def p_Definitions(self, p): """ - Definitions : ExtendedAttributeList Definition Definitions + Definitions : ExtendedAttributeList Definition Definitions """ if p[2]: p[0] = [p[2]] @@ -5931,27 +6888,27 @@ def p_Definitions(self, p): def p_DefinitionsEmpty(self, p): """ - Definitions : + Definitions : """ p[0] = [] def p_Definition(self, p): """ - Definition : CallbackOrInterfaceOrMixin - | Namespace - | Partial - | Dictionary - | Exception - | Enum - | Typedef - | IncludesStatement + Definition : CallbackOrInterfaceOrMixin + | Namespace + | Partial + | Dictionary + | Exception + | Enum + | Typedef + | IncludesStatement """ p[0] = p[1] assert p[1] # We might not have implemented something ... def p_CallbackOrInterfaceOrMixinCallback(self, p): """ - CallbackOrInterfaceOrMixin : CALLBACK CallbackRestOrInterface + CallbackOrInterfaceOrMixin : CALLBACK CallbackRestOrInterface """ if p[2].isInterface(): assert isinstance(p[2], IDLInterface) @@ -5961,21 +6918,22 @@ def p_CallbackOrInterfaceOrMixinCallback(self, p): def p_CallbackOrInterfaceOrMixinInterfaceOrMixin(self, p): """ - CallbackOrInterfaceOrMixin : INTERFACE InterfaceOrMixin + CallbackOrInterfaceOrMixin : INTERFACE InterfaceOrMixin """ p[0] = p[2] def p_CallbackRestOrInterface(self, p): """ - CallbackRestOrInterface : CallbackRest - | CallbackConstructorRest - | CallbackInterface + CallbackRestOrInterface : CallbackRest + | CallbackConstructorRest + | CallbackInterface """ assert p[1] p[0] = p[1] - def handleNonPartialObject(self, location, identifier, constructor, - constructorArgs, nonPartialArgs): + def handleNonPartialObject( + self, location, identifier, constructor, constructorArgs, nonPartialArgs + ): """ This handles non-partial objects (interfaces, namespaces and dictionaries) by checking for an existing partial object, and promoting @@ -5996,10 +6954,11 @@ def handleNonPartialObject(self, location, identifier, constructor, existingObj = self.globalScope()._lookupIdentifier(identifier) if existingObj: if not isinstance(existingObj, constructor): - raise WebIDLError("%s has the same name as " - "non-%s object" % - (prettyname.capitalize(), prettyname), - [location, existingObj.location]) + raise WebIDLError( + "%s has the same name as " + "non-%s object" % (prettyname.capitalize(), prettyname), + [location, existingObj.location], + ) existingObj.setNonPartial(*nonPartialArgs) return existingObj except Exception as ex: @@ -6012,20 +6971,20 @@ def handleNonPartialObject(self, location, identifier, constructor, def p_InterfaceOrMixin(self, p): """ - InterfaceOrMixin : InterfaceRest - | MixinRest + InterfaceOrMixin : InterfaceRest + | MixinRest """ p[0] = p[1] def p_CallbackInterface(self, p): """ - CallbackInterface : INTERFACE InterfaceRest + CallbackInterface : INTERFACE InterfaceRest """ p[0] = p[2] def p_InterfaceRest(self, p): """ - InterfaceRest : IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON + InterfaceRest : IDENTIFIER Inheritance LBRACE InterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(location, p[1]) @@ -6033,13 +6992,16 @@ def p_InterfaceRest(self, p): parent = p[2] p[0] = self.handleNonPartialObject( - location, identifier, IDLInterface, + location, + identifier, + IDLInterface, [location, self.globalScope(), identifier, parent, members], - [location, parent, members]) + [location, parent, members], + ) def p_InterfaceForwardDecl(self, p): """ - InterfaceRest : IDENTIFIER SEMICOLON + InterfaceRest : IDENTIFIER SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(location, p[1]) @@ -6048,10 +7010,12 @@ def p_InterfaceForwardDecl(self, p): if self.globalScope()._lookupIdentifier(identifier): p[0] = self.globalScope()._lookupIdentifier(identifier) if not isinstance(p[0], IDLExternalInterface): - raise WebIDLError("Name collision between external " - "interface declaration for identifier " - "%s and %s" % (identifier.name, p[0]), - [location, p[0].location]) + raise WebIDLError( + "Name collision between external " + "interface declaration for identifier " + "%s and %s" % (identifier.name, p[0]), + [location, p[0].location], + ) return except Exception as ex: if isinstance(ex, WebIDLError): @@ -6062,52 +7026,63 @@ def p_InterfaceForwardDecl(self, p): def p_MixinRest(self, p): """ - MixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON + MixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handleNonPartialObject( - location, identifier, IDLInterfaceMixin, + location, + identifier, + IDLInterfaceMixin, [location, self.globalScope(), identifier, members], - [location, members]) + [location, members], + ) def p_Namespace(self, p): """ - Namespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON + Namespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handleNonPartialObject( - location, identifier, IDLNamespace, + location, + identifier, + IDLNamespace, [location, self.globalScope(), identifier, members], - [location, None, members]) + [location, None, members], + ) def p_Partial(self, p): """ - Partial : PARTIAL PartialDefinition + Partial : PARTIAL PartialDefinition """ p[0] = p[2] def p_PartialDefinitionInterface(self, p): """ - PartialDefinition : INTERFACE PartialInterfaceOrPartialMixin + PartialDefinition : INTERFACE PartialInterfaceOrPartialMixin """ p[0] = p[2] def p_PartialDefinition(self, p): """ - PartialDefinition : PartialNamespace - | PartialDictionary + PartialDefinition : PartialNamespace + | PartialDictionary """ p[0] = p[1] - def handlePartialObject(self, location, identifier, nonPartialConstructor, - nonPartialConstructorArgs, - partialConstructorArgs): + def handlePartialObject( + self, + location, + identifier, + nonPartialConstructor, + nonPartialConstructorArgs, + partialConstructorArgs, + ): """ This handles partial objects (interfaces, namespaces and dictionaries) by checking for an existing non-partial object, and adding ourselves to @@ -6131,10 +7106,11 @@ def handlePartialObject(self, location, identifier, nonPartialConstructor, nonPartialObject = self.globalScope()._lookupIdentifier(identifier) if nonPartialObject: if not isinstance(nonPartialObject, nonPartialConstructor): - raise WebIDLError("Partial %s has the same name as " - "non-%s object" % - (prettyname, prettyname), - [location, nonPartialObject.location]) + raise WebIDLError( + "Partial %s has the same name as " + "non-%s object" % (prettyname, prettyname), + [location, nonPartialObject.location], + ) except Exception as ex: if isinstance(ex, WebIDLError): raise ex @@ -6143,96 +7119,115 @@ def handlePartialObject(self, location, identifier, nonPartialConstructor, if not nonPartialObject: nonPartialObject = nonPartialConstructor( # No members, False for isKnownNonPartial - *(nonPartialConstructorArgs), members=[], isKnownNonPartial=False) + *(nonPartialConstructorArgs), + members=[], + isKnownNonPartial=False + ) partialObject = None if isinstance(nonPartialObject, IDLDictionary): partialObject = IDLPartialDictionary( - *(partialConstructorArgs + [nonPartialObject])) - elif isinstance(nonPartialObject, (IDLInterface, IDLInterfaceMixin, IDLNamespace)): + *(partialConstructorArgs + [nonPartialObject]) + ) + elif isinstance( + nonPartialObject, (IDLInterface, IDLInterfaceMixin, IDLNamespace) + ): partialObject = IDLPartialInterfaceOrNamespace( - *(partialConstructorArgs + [nonPartialObject])) + *(partialConstructorArgs + [nonPartialObject]) + ) else: - raise WebIDLError("Unknown partial object type %s" % - type(partialObject), - [location]) + raise WebIDLError( + "Unknown partial object type %s" % type(partialObject), [location] + ) return partialObject def p_PartialInterfaceOrPartialMixin(self, p): """ - PartialInterfaceOrPartialMixin : PartialInterfaceRest - | PartialMixinRest + PartialInterfaceOrPartialMixin : PartialInterfaceRest + | PartialMixinRest """ p[0] = p[1] def p_PartialInterfaceRest(self, p): """ - PartialInterfaceRest : IDENTIFIER LBRACE PartialInterfaceMembers RBRACE SEMICOLON + PartialInterfaceRest : IDENTIFIER LBRACE PartialInterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(location, p[1]) members = p[3] p[0] = self.handlePartialObject( - location, identifier, IDLInterface, + location, + identifier, + IDLInterface, [location, self.globalScope(), identifier, None], - [location, identifier, members]) + [location, identifier, members], + ) def p_PartialMixinRest(self, p): """ - PartialMixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON + PartialMixinRest : MIXIN IDENTIFIER LBRACE MixinMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handlePartialObject( - location, identifier, IDLInterfaceMixin, + location, + identifier, + IDLInterfaceMixin, [location, self.globalScope(), identifier], - [location, identifier, members]) + [location, identifier, members], + ) def p_PartialNamespace(self, p): """ - PartialNamespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON + PartialNamespace : NAMESPACE IDENTIFIER LBRACE InterfaceMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handlePartialObject( - location, identifier, IDLNamespace, + location, + identifier, + IDLNamespace, [location, self.globalScope(), identifier], - [location, identifier, members]) + [location, identifier, members], + ) def p_PartialDictionary(self, p): """ - PartialDictionary : DICTIONARY IDENTIFIER LBRACE DictionaryMembers RBRACE SEMICOLON + PartialDictionary : DICTIONARY IDENTIFIER LBRACE DictionaryMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) members = p[4] p[0] = self.handlePartialObject( - location, identifier, IDLDictionary, + location, + identifier, + IDLDictionary, [location, self.globalScope(), identifier], - [location, identifier, members]) + [location, identifier, members], + ) def p_Inheritance(self, p): """ - Inheritance : COLON ScopedName + Inheritance : COLON ScopedName """ p[0] = IDLIdentifierPlaceholder(self.getLocation(p, 2), p[2]) def p_InheritanceEmpty(self, p): """ - Inheritance : + Inheritance : """ pass def p_InterfaceMembers(self, p): """ - InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers + InterfaceMembers : ExtendedAttributeList InterfaceMember InterfaceMembers """ p[0] = [p[2]] @@ -6243,26 +7238,26 @@ def p_InterfaceMembers(self, p): def p_InterfaceMembersEmpty(self, p): """ - InterfaceMembers : + InterfaceMembers : """ p[0] = [] def p_InterfaceMember(self, p): """ - InterfaceMember : PartialInterfaceMember - | Constructor + InterfaceMember : PartialInterfaceMember + | Constructor """ p[0] = p[1] def p_Constructor(self, p): """ - Constructor : CONSTRUCTOR LPAREN ArgumentList RPAREN SEMICOLON + Constructor : CONSTRUCTOR LPAREN ArgumentList RPAREN SEMICOLON """ p[0] = IDLConstructor(self.getLocation(p, 1), p[3], "constructor") def p_PartialInterfaceMembers(self, p): """ - PartialInterfaceMembers : ExtendedAttributeList PartialInterfaceMember PartialInterfaceMembers + PartialInterfaceMembers : ExtendedAttributeList PartialInterfaceMember PartialInterfaceMembers """ p[0] = [p[2]] @@ -6273,27 +7268,26 @@ def p_PartialInterfaceMembers(self, p): def p_PartialInterfaceMembersEmpty(self, p): """ - PartialInterfaceMembers : + PartialInterfaceMembers : """ p[0] = [] def p_PartialInterfaceMember(self, p): """ - PartialInterfaceMember : Const - | AttributeOrOperationOrMaplikeOrSetlikeOrIterable + PartialInterfaceMember : Const + | AttributeOrOperationOrMaplikeOrSetlikeOrIterable """ p[0] = p[1] - def p_MixinMembersEmpty(self, p): """ - MixinMembers : + MixinMembers : """ p[0] = [] def p_MixinMembers(self, p): """ - MixinMembers : ExtendedAttributeList MixinMember MixinMembers + MixinMembers : ExtendedAttributeList MixinMember MixinMembers """ p[0] = [p[2]] @@ -6304,15 +7298,15 @@ def p_MixinMembers(self, p): def p_MixinMember(self, p): """ - MixinMember : Const - | Attribute - | Operation + MixinMember : Const + | Attribute + | Operation """ p[0] = p[1] def p_Dictionary(self, p): """ - Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON + Dictionary : DICTIONARY IDENTIFIER Inheritance LBRACE DictionaryMembers RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) @@ -6321,8 +7315,8 @@ def p_Dictionary(self, p): def p_DictionaryMembers(self, p): """ - DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers - | + DictionaryMembers : ExtendedAttributeList DictionaryMember DictionaryMembers + | """ if len(p) == 1: # We're at the end of the list @@ -6334,21 +7328,26 @@ def p_DictionaryMembers(self, p): def p_DictionaryMemberRequired(self, p): """ - DictionaryMember : REQUIRED TypeWithExtendedAttributes IDENTIFIER SEMICOLON + DictionaryMember : REQUIRED TypeWithExtendedAttributes IDENTIFIER SEMICOLON """ # These quack a lot like required arguments, so just treat them that way. t = p[2] assert isinstance(t, IDLType) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3]) - p[0] = IDLArgument(self.getLocation(p, 3), identifier, t, - optional=False, - defaultValue=None, variadic=False, - dictionaryMember=True) + p[0] = IDLArgument( + self.getLocation(p, 3), + identifier, + t, + optional=False, + defaultValue=None, + variadic=False, + dictionaryMember=True, + ) def p_DictionaryMember(self, p): """ - DictionaryMember : Type IDENTIFIER Default SEMICOLON + DictionaryMember : Type IDENTIFIER Default SEMICOLON """ # These quack a lot like optional arguments, so just treat them that way. t = p[1] @@ -6359,15 +7358,21 @@ def p_DictionaryMember(self, p): # Any attributes that precede this may apply to the type, so # we configure the argument to forward type attributes down instead of producing # a parse error - p[0] = IDLArgument(self.getLocation(p, 2), identifier, t, - optional=True, - defaultValue=defaultValue, variadic=False, - dictionaryMember=True, allowTypeAttributes=True) + p[0] = IDLArgument( + self.getLocation(p, 2), + identifier, + t, + optional=True, + defaultValue=defaultValue, + variadic=False, + dictionaryMember=True, + allowTypeAttributes=True, + ) def p_Default(self, p): """ - Default : EQUALS DefaultValue - | + Default : EQUALS DefaultValue + | """ if len(p) > 1: p[0] = p[2] @@ -6376,9 +7381,9 @@ def p_Default(self, p): def p_DefaultValue(self, p): """ - DefaultValue : ConstValue - | LBRACKET RBRACKET - | LBRACE RBRACE + DefaultValue : ConstValue + | LBRACKET RBRACKET + | LBRACE RBRACE """ if len(p) == 2: p[0] = p[1] @@ -6392,19 +7397,19 @@ def p_DefaultValue(self, p): def p_DefaultValueNull(self, p): """ - DefaultValue : NULL + DefaultValue : NULL """ p[0] = IDLNullValue(self.getLocation(p, 1)) def p_Exception(self, p): """ - Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON + Exception : EXCEPTION IDENTIFIER Inheritance LBRACE ExceptionMembers RBRACE SEMICOLON """ pass def p_Enum(self, p): """ - Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON + Enum : ENUM IDENTIFIER LBRACE EnumValueList RBRACE SEMICOLON """ location = self.getLocation(p, 1) identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) @@ -6415,79 +7420,90 @@ def p_Enum(self, p): def p_EnumValueList(self, p): """ - EnumValueList : STRING EnumValueListComma + EnumValueList : STRING EnumValueListComma """ p[0] = [p[1]] p[0].extend(p[2]) def p_EnumValueListComma(self, p): """ - EnumValueListComma : COMMA EnumValueListString + EnumValueListComma : COMMA EnumValueListString """ p[0] = p[2] def p_EnumValueListCommaEmpty(self, p): """ - EnumValueListComma : + EnumValueListComma : """ p[0] = [] def p_EnumValueListString(self, p): """ - EnumValueListString : STRING EnumValueListComma + EnumValueListString : STRING EnumValueListComma """ p[0] = [p[1]] p[0].extend(p[2]) def p_EnumValueListStringEmpty(self, p): """ - EnumValueListString : + EnumValueListString : """ p[0] = [] def p_CallbackRest(self, p): """ - CallbackRest : IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON + CallbackRest : IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON """ identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1]) - p[0] = IDLCallback(self.getLocation(p, 1), self.globalScope(), - identifier, p[3], p[5], isConstructor=False) + p[0] = IDLCallback( + self.getLocation(p, 1), + self.globalScope(), + identifier, + p[3], + p[5], + isConstructor=False, + ) def p_CallbackConstructorRest(self, p): """ - CallbackConstructorRest : CONSTRUCTOR IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON + CallbackConstructorRest : CONSTRUCTOR IDENTIFIER EQUALS ReturnType LPAREN ArgumentList RPAREN SEMICOLON """ identifier = IDLUnresolvedIdentifier(self.getLocation(p, 2), p[2]) - p[0] = IDLCallback(self.getLocation(p, 2), self.globalScope(), - identifier, p[4], p[6], isConstructor=True) + p[0] = IDLCallback( + self.getLocation(p, 2), + self.globalScope(), + identifier, + p[4], + p[6], + isConstructor=True, + ) def p_ExceptionMembers(self, p): """ - ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers - | + ExceptionMembers : ExtendedAttributeList ExceptionMember ExceptionMembers + | """ pass def p_Typedef(self, p): """ - Typedef : TYPEDEF TypeWithExtendedAttributes IDENTIFIER SEMICOLON + Typedef : TYPEDEF TypeWithExtendedAttributes IDENTIFIER SEMICOLON """ - typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(), - p[2], p[3]) + typedef = IDLTypedef(self.getLocation(p, 1), self.globalScope(), p[2], p[3]) p[0] = typedef def p_IncludesStatement(self, p): """ - IncludesStatement : ScopedName INCLUDES ScopedName SEMICOLON + IncludesStatement : ScopedName INCLUDES ScopedName SEMICOLON """ - assert(p[2] == "includes") + assert p[2] == "includes" interface = IDLIdentifierPlaceholder(self.getLocation(p, 1), p[1]) mixin = IDLIdentifierPlaceholder(self.getLocation(p, 3), p[3]) p[0] = IDLIncludesStatement(self.getLocation(p, 1), interface, mixin) def p_Const(self, p): """ - Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON + Const : CONST ConstType IDENTIFIER EQUALS ConstValue SEMICOLON """ location = self.getLocation(p, 1) type = p[2] @@ -6497,7 +7513,7 @@ def p_Const(self, p): def p_ConstValueBoolean(self, p): """ - ConstValue : BooleanLiteral + ConstValue : BooleanLiteral """ location = self.getLocation(p, 1) booleanType = BuiltinTypes[IDLBuiltinType.Types.boolean] @@ -6505,7 +7521,7 @@ def p_ConstValueBoolean(self, p): def p_ConstValueInteger(self, p): """ - ConstValue : INTEGER + ConstValue : INTEGER """ location = self.getLocation(p, 1) @@ -6519,14 +7535,16 @@ def p_ConstValueInteger(self, p): def p_ConstValueFloat(self, p): """ - ConstValue : FLOATLITERAL + ConstValue : FLOATLITERAL """ location = self.getLocation(p, 1) - p[0] = IDLValue(location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1]) + p[0] = IDLValue( + location, BuiltinTypes[IDLBuiltinType.Types.unrestricted_float], p[1] + ) def p_ConstValueString(self, p): """ - ConstValue : STRING + ConstValue : STRING """ location = self.getLocation(p, 1) stringType = BuiltinTypes[IDLBuiltinType.Types.domstring] @@ -6534,35 +7552,36 @@ def p_ConstValueString(self, p): def p_BooleanLiteralTrue(self, p): """ - BooleanLiteral : TRUE + BooleanLiteral : TRUE """ p[0] = True def p_BooleanLiteralFalse(self, p): """ - BooleanLiteral : FALSE + BooleanLiteral : FALSE """ p[0] = False def p_AttributeOrOperationOrMaplikeOrSetlikeOrIterable(self, p): """ - AttributeOrOperationOrMaplikeOrSetlikeOrIterable : Attribute - | Maplike - | Setlike - | Iterable - | Operation + AttributeOrOperationOrMaplikeOrSetlikeOrIterable : Attribute + | Maplike + | Setlike + | Iterable + | Operation """ p[0] = p[1] def p_Iterable(self, p): """ - Iterable : ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON - | ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON + Iterable : ITERABLE LT TypeWithExtendedAttributes GT SEMICOLON + | ITERABLE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON """ location = self.getLocation(p, 2) - identifier = IDLUnresolvedIdentifier(location, "__iterable", - allowDoubleUnderscore=True) - if (len(p) > 6): + identifier = IDLUnresolvedIdentifier( + location, "__iterable", allowDoubleUnderscore=True + ) + if len(p) > 6: keyType = p[3] valueType = p[5] else: @@ -6573,59 +7592,64 @@ def p_Iterable(self, p): def p_Setlike(self, p): """ - Setlike : ReadOnly SETLIKE LT TypeWithExtendedAttributes GT SEMICOLON + Setlike : ReadOnly SETLIKE LT TypeWithExtendedAttributes GT SEMICOLON """ readonly = p[1] maplikeOrSetlikeType = p[2] location = self.getLocation(p, 2) - identifier = IDLUnresolvedIdentifier(location, "__setlike", - allowDoubleUnderscore=True) + identifier = IDLUnresolvedIdentifier( + location, "__setlike", allowDoubleUnderscore=True + ) keyType = p[4] valueType = keyType - p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType, - readonly, keyType, valueType) + p[0] = IDLMaplikeOrSetlike( + location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType + ) def p_Maplike(self, p): """ - Maplike : ReadOnly MAPLIKE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON + Maplike : ReadOnly MAPLIKE LT TypeWithExtendedAttributes COMMA TypeWithExtendedAttributes GT SEMICOLON """ readonly = p[1] maplikeOrSetlikeType = p[2] location = self.getLocation(p, 2) - identifier = IDLUnresolvedIdentifier(location, "__maplike", - allowDoubleUnderscore=True) + identifier = IDLUnresolvedIdentifier( + location, "__maplike", allowDoubleUnderscore=True + ) keyType = p[4] valueType = p[6] - p[0] = IDLMaplikeOrSetlike(location, identifier, maplikeOrSetlikeType, - readonly, keyType, valueType) + p[0] = IDLMaplikeOrSetlike( + location, identifier, maplikeOrSetlikeType, readonly, keyType, valueType + ) def p_AttributeWithQualifier(self, p): """ - Attribute : Qualifier AttributeRest + Attribute : Qualifier AttributeRest """ static = IDLInterfaceMember.Special.Static in p[1] stringifier = IDLInterfaceMember.Special.Stringifier in p[1] (location, identifier, type, readonly) = p[2] - p[0] = IDLAttribute(location, identifier, type, readonly, - static=static, stringifier=stringifier) + p[0] = IDLAttribute( + location, identifier, type, readonly, static=static, stringifier=stringifier + ) def p_AttributeInherited(self, p): """ - Attribute : INHERIT AttributeRest + Attribute : INHERIT AttributeRest """ (location, identifier, type, readonly) = p[2] p[0] = IDLAttribute(location, identifier, type, readonly, inherit=True) def p_Attribute(self, p): """ - Attribute : AttributeRest + Attribute : AttributeRest """ (location, identifier, type, readonly) = p[1] p[0] = IDLAttribute(location, identifier, type, readonly, inherit=False) def p_AttributeRest(self, p): """ - AttributeRest : ReadOnly ATTRIBUTE TypeWithExtendedAttributes AttributeName SEMICOLON + AttributeRest : ReadOnly ATTRIBUTE TypeWithExtendedAttributes AttributeName SEMICOLON """ location = self.getLocation(p, 2) readonly = p[1] @@ -6635,26 +7659,27 @@ def p_AttributeRest(self, p): def p_ReadOnly(self, p): """ - ReadOnly : READONLY + ReadOnly : READONLY """ p[0] = True def p_ReadOnlyEmpty(self, p): """ - ReadOnly : + ReadOnly : """ p[0] = False def p_Operation(self, p): """ - Operation : Qualifiers OperationRest + Operation : Qualifiers OperationRest """ qualifiers = p[1] # Disallow duplicates in the qualifier set if not len(set(qualifiers)) == len(qualifiers): - raise WebIDLError("Duplicate qualifiers are not allowed", - [self.getLocation(p, 1)]) + raise WebIDLError( + "Duplicate qualifiers are not allowed", [self.getLocation(p, 1)] + ) static = IDLInterfaceMember.Special.Static in p[1] # If static is there that's all that's allowed. This is disallowed @@ -6673,8 +7698,10 @@ def p_Operation(self, p): if getter or deleter: if setter: - raise WebIDLError("getter and deleter are incompatible with setter", - [self.getLocation(p, 1)]) + raise WebIDLError( + "getter and deleter are incompatible with setter", + [self.getLocation(p, 1)], + ) (returnType, identifier, arguments) = p[2] @@ -6684,234 +7711,285 @@ def p_Operation(self, p): if getter or deleter: if len(arguments) != 1: - raise WebIDLError("%s has wrong number of arguments" % - ("getter" if getter else "deleter"), - [self.getLocation(p, 2)]) + raise WebIDLError( + "%s has wrong number of arguments" + % ("getter" if getter else "deleter"), + [self.getLocation(p, 2)], + ) argType = arguments[0].type if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]: specialType = IDLMethod.NamedOrIndexed.Named elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]: specialType = IDLMethod.NamedOrIndexed.Indexed if deleter: - raise WebIDLError("There is no such thing as an indexed deleter.", - [self.getLocation(p, 1)]) + raise WebIDLError( + "There is no such thing as an indexed deleter.", + [self.getLocation(p, 1)], + ) else: - raise WebIDLError("%s has wrong argument type (must be DOMString or UnsignedLong)" % - ("getter" if getter else "deleter"), - [arguments[0].location]) + raise WebIDLError( + "%s has wrong argument type (must be DOMString or UnsignedLong)" + % ("getter" if getter else "deleter"), + [arguments[0].location], + ) if arguments[0].optional or arguments[0].variadic: - raise WebIDLError("%s cannot have %s argument" % - ("getter" if getter else "deleter", - "optional" if arguments[0].optional else "variadic"), - [arguments[0].location]) + raise WebIDLError( + "%s cannot have %s argument" + % ( + "getter" if getter else "deleter", + "optional" if arguments[0].optional else "variadic", + ), + [arguments[0].location], + ) if getter: if returnType.isVoid(): - raise WebIDLError("getter cannot have void return type", - [self.getLocation(p, 2)]) + raise WebIDLError( + "getter cannot have void return type", [self.getLocation(p, 2)] + ) if setter: if len(arguments) != 2: - raise WebIDLError("setter has wrong number of arguments", - [self.getLocation(p, 2)]) + raise WebIDLError( + "setter has wrong number of arguments", [self.getLocation(p, 2)] + ) argType = arguments[0].type if argType == BuiltinTypes[IDLBuiltinType.Types.domstring]: specialType = IDLMethod.NamedOrIndexed.Named elif argType == BuiltinTypes[IDLBuiltinType.Types.unsigned_long]: specialType = IDLMethod.NamedOrIndexed.Indexed else: - raise WebIDLError("settter has wrong argument type (must be DOMString or UnsignedLong)", - [arguments[0].location]) + raise WebIDLError( + "settter has wrong argument type (must be DOMString or UnsignedLong)", + [arguments[0].location], + ) if arguments[0].optional or arguments[0].variadic: - raise WebIDLError("setter cannot have %s argument" % - ("optional" if arguments[0].optional else "variadic"), - [arguments[0].location]) + raise WebIDLError( + "setter cannot have %s argument" + % ("optional" if arguments[0].optional else "variadic"), + [arguments[0].location], + ) if arguments[1].optional or arguments[1].variadic: - raise WebIDLError("setter cannot have %s argument" % - ("optional" if arguments[1].optional else "variadic"), - [arguments[1].location]) + raise WebIDLError( + "setter cannot have %s argument" + % ("optional" if arguments[1].optional else "variadic"), + [arguments[1].location], + ) if stringifier: if len(arguments) != 0: - raise WebIDLError("stringifier has wrong number of arguments", - [self.getLocation(p, 2)]) + raise WebIDLError( + "stringifier has wrong number of arguments", + [self.getLocation(p, 2)], + ) if not returnType.isDOMString(): - raise WebIDLError("stringifier must have DOMString return type", - [self.getLocation(p, 2)]) + raise WebIDLError( + "stringifier must have DOMString return type", + [self.getLocation(p, 2)], + ) # identifier might be None. This is only permitted for special methods. if not identifier: - if (not getter and not setter and - not deleter and not legacycaller and not stringifier): - raise WebIDLError("Identifier required for non-special methods", - [self.getLocation(p, 2)]) + if ( + not getter + and not setter + and not deleter + and not legacycaller + and not stringifier + ): + raise WebIDLError( + "Identifier required for non-special methods", + [self.getLocation(p, 2)], + ) location = BuiltinLocation("") identifier = IDLUnresolvedIdentifier( location, - "__%s%s%s%s%s%s" % - ("named" if specialType == IDLMethod.NamedOrIndexed.Named else - "indexed" if specialType == IDLMethod.NamedOrIndexed.Indexed else "", - "getter" if getter else "", - "setter" if setter else "", - "deleter" if deleter else "", - "legacycaller" if legacycaller else "", - "stringifier" if stringifier else ""), - allowDoubleUnderscore=True) - - method = IDLMethod(self.getLocation(p, 2), identifier, returnType, arguments, - static=static, getter=getter, setter=setter, - deleter=deleter, specialType=specialType, - legacycaller=legacycaller, stringifier=stringifier) + "__%s%s%s%s%s%s" + % ( + "named" + if specialType == IDLMethod.NamedOrIndexed.Named + else "indexed" + if specialType == IDLMethod.NamedOrIndexed.Indexed + else "", + "getter" if getter else "", + "setter" if setter else "", + "deleter" if deleter else "", + "legacycaller" if legacycaller else "", + "stringifier" if stringifier else "", + ), + allowDoubleUnderscore=True, + ) + + method = IDLMethod( + self.getLocation(p, 2), + identifier, + returnType, + arguments, + static=static, + getter=getter, + setter=setter, + deleter=deleter, + specialType=specialType, + legacycaller=legacycaller, + stringifier=stringifier, + ) p[0] = method def p_Stringifier(self, p): """ - Operation : STRINGIFIER SEMICOLON + Operation : STRINGIFIER SEMICOLON """ - identifier = IDLUnresolvedIdentifier(BuiltinLocation(""), - "__stringifier", - allowDoubleUnderscore=True) - method = IDLMethod(self.getLocation(p, 1), - identifier, - returnType=BuiltinTypes[IDLBuiltinType.Types.domstring], - arguments=[], - stringifier=True) + identifier = IDLUnresolvedIdentifier( + BuiltinLocation(""), + "__stringifier", + allowDoubleUnderscore=True, + ) + method = IDLMethod( + self.getLocation(p, 1), + identifier, + returnType=BuiltinTypes[IDLBuiltinType.Types.domstring], + arguments=[], + stringifier=True, + ) p[0] = method def p_QualifierStatic(self, p): """ - Qualifier : STATIC + Qualifier : STATIC """ p[0] = [IDLInterfaceMember.Special.Static] def p_QualifierStringifier(self, p): """ - Qualifier : STRINGIFIER + Qualifier : STRINGIFIER """ p[0] = [IDLInterfaceMember.Special.Stringifier] def p_Qualifiers(self, p): """ - Qualifiers : Qualifier - | Specials + Qualifiers : Qualifier + | Specials """ p[0] = p[1] def p_Specials(self, p): """ - Specials : Special Specials + Specials : Special Specials """ p[0] = [p[1]] p[0].extend(p[2]) def p_SpecialsEmpty(self, p): """ - Specials : + Specials : """ p[0] = [] def p_SpecialGetter(self, p): """ - Special : GETTER + Special : GETTER """ p[0] = IDLMethod.Special.Getter def p_SpecialSetter(self, p): """ - Special : SETTER + Special : SETTER """ p[0] = IDLMethod.Special.Setter def p_SpecialDeleter(self, p): """ - Special : DELETER + Special : DELETER """ p[0] = IDLMethod.Special.Deleter def p_SpecialLegacyCaller(self, p): """ - Special : LEGACYCALLER + Special : LEGACYCALLER """ p[0] = IDLMethod.Special.LegacyCaller def p_OperationRest(self, p): """ - OperationRest : ReturnType OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON + OperationRest : ReturnType OptionalIdentifier LPAREN ArgumentList RPAREN SEMICOLON """ p[0] = (p[1], p[2], p[4]) def p_OptionalIdentifier(self, p): """ - OptionalIdentifier : IDENTIFIER + OptionalIdentifier : IDENTIFIER """ p[0] = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1]) def p_OptionalIdentifierEmpty(self, p): """ - OptionalIdentifier : + OptionalIdentifier : """ pass def p_ArgumentList(self, p): """ - ArgumentList : Argument Arguments + ArgumentList : Argument Arguments """ p[0] = [p[1]] if p[1] else [] p[0].extend(p[2]) def p_ArgumentListEmpty(self, p): """ - ArgumentList : + ArgumentList : """ p[0] = [] def p_Arguments(self, p): """ - Arguments : COMMA Argument Arguments + Arguments : COMMA Argument Arguments """ p[0] = [p[2]] if p[2] else [] p[0].extend(p[3]) def p_ArgumentsEmpty(self, p): """ - Arguments : + Arguments : """ p[0] = [] def p_Argument(self, p): """ - Argument : ExtendedAttributeList ArgumentRest + Argument : ExtendedAttributeList ArgumentRest """ p[0] = p[2] p[0].addExtendedAttributes(p[1]) def p_ArgumentRestOptional(self, p): """ - ArgumentRest : OPTIONAL TypeWithExtendedAttributes ArgumentName Default + ArgumentRest : OPTIONAL TypeWithExtendedAttributes ArgumentName Default """ t = p[2] assert isinstance(t, IDLType) # Arg names can be reserved identifiers - identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3], - allowForbidden=True) + identifier = IDLUnresolvedIdentifier( + self.getLocation(p, 3), p[3], allowForbidden=True + ) defaultValue = p[4] - # We can't test t.isAny() here and give it a default value as needed, # since at this point t is not a fully resolved type yet (e.g. it might # be a typedef). We'll handle the 'any' case in IDLArgument.complete. - p[0] = IDLArgument(self.getLocation(p, 3), identifier, t, True, defaultValue, False) + p[0] = IDLArgument( + self.getLocation(p, 3), identifier, t, True, defaultValue, False + ) def p_ArgumentRest(self, p): """ - ArgumentRest : Type Ellipsis ArgumentName + ArgumentRest : Type Ellipsis ArgumentName """ t = p[1] assert isinstance(t, IDLType) # Arg names can be reserved identifiers - identifier = IDLUnresolvedIdentifier(self.getLocation(p, 3), p[3], - allowForbidden=True) + identifier = IDLUnresolvedIdentifier( + self.getLocation(p, 3), p[3], allowForbidden=True + ) variadic = p[2] @@ -6923,90 +8001,98 @@ def p_ArgumentRest(self, p): # Any attributes that precede this may apply to the type, so # we configure the argument to forward type attributes down instead of producing # a parse error - p[0] = IDLArgument(self.getLocation(p, 3), identifier, t, variadic, None, variadic, allowTypeAttributes=True) + p[0] = IDLArgument( + self.getLocation(p, 3), + identifier, + t, + variadic, + None, + variadic, + allowTypeAttributes=True, + ) def p_ArgumentName(self, p): """ - ArgumentName : IDENTIFIER - | ArgumentNameKeyword + ArgumentName : IDENTIFIER + | ArgumentNameKeyword """ p[0] = p[1] def p_ArgumentNameKeyword(self, p): """ - ArgumentNameKeyword : ASYNC - | ATTRIBUTE - | CALLBACK - | CONST - | CONSTRUCTOR - | DELETER - | DICTIONARY - | ENUM - | EXCEPTION - | GETTER - | INCLUDES - | INHERIT - | INTERFACE - | ITERABLE - | LEGACYCALLER - | MAPLIKE - | MIXIN - | NAMESPACE - | PARTIAL - | READONLY - | REQUIRED - | SERIALIZER - | SETLIKE - | SETTER - | STATIC - | STRINGIFIER - | TYPEDEF - | UNRESTRICTED + ArgumentNameKeyword : ASYNC + | ATTRIBUTE + | CALLBACK + | CONST + | CONSTRUCTOR + | DELETER + | DICTIONARY + | ENUM + | EXCEPTION + | GETTER + | INCLUDES + | INHERIT + | INTERFACE + | ITERABLE + | LEGACYCALLER + | MAPLIKE + | MIXIN + | NAMESPACE + | PARTIAL + | READONLY + | REQUIRED + | SERIALIZER + | SETLIKE + | SETTER + | STATIC + | STRINGIFIER + | TYPEDEF + | UNRESTRICTED """ p[0] = p[1] def p_AttributeName(self, p): """ - AttributeName : IDENTIFIER - | AttributeNameKeyword + AttributeName : IDENTIFIER + | AttributeNameKeyword """ p[0] = p[1] def p_AttributeNameKeyword(self, p): """ - AttributeNameKeyword : ASYNC - | REQUIRED + AttributeNameKeyword : ASYNC + | REQUIRED """ p[0] = p[1] def p_Ellipsis(self, p): """ - Ellipsis : ELLIPSIS + Ellipsis : ELLIPSIS """ p[0] = True def p_EllipsisEmpty(self, p): """ - Ellipsis : + Ellipsis : """ p[0] = False def p_ExceptionMember(self, p): """ - ExceptionMember : Const - | ExceptionField + ExceptionMember : Const + | ExceptionField """ pass def p_ExceptionField(self, p): """ - ExceptionField : Type IDENTIFIER SEMICOLON + ExceptionField : Type IDENTIFIER SEMICOLON """ pass def p_ExtendedAttributeList(self, p): """ - ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET + ExtendedAttributeList : LBRACKET ExtendedAttribute ExtendedAttributes RBRACKET """ p[0] = [p[2]] if p[3]: @@ -7014,117 +8100,117 @@ def p_ExtendedAttributeList(self, p): def p_ExtendedAttributeListEmpty(self, p): """ - ExtendedAttributeList : + ExtendedAttributeList : """ p[0] = [] def p_ExtendedAttribute(self, p): """ - ExtendedAttribute : ExtendedAttributeNoArgs - | ExtendedAttributeArgList - | ExtendedAttributeIdent - | ExtendedAttributeNamedArgList - | ExtendedAttributeIdentList + ExtendedAttribute : ExtendedAttributeNoArgs + | ExtendedAttributeArgList + | ExtendedAttributeIdent + | ExtendedAttributeNamedArgList + | ExtendedAttributeIdentList """ p[0] = IDLExtendedAttribute(self.getLocation(p, 1), p[1]) def p_ExtendedAttributeEmpty(self, p): """ - ExtendedAttribute : + ExtendedAttribute : """ pass def p_ExtendedAttributes(self, p): """ - ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes + ExtendedAttributes : COMMA ExtendedAttribute ExtendedAttributes """ p[0] = [p[2]] if p[2] else [] p[0].extend(p[3]) def p_ExtendedAttributesEmpty(self, p): """ - ExtendedAttributes : + ExtendedAttributes : """ p[0] = [] def p_Other(self, p): """ - Other : INTEGER - | FLOATLITERAL - | IDENTIFIER - | STRING - | OTHER - | ELLIPSIS - | COLON - | SCOPE - | SEMICOLON - | LT - | EQUALS - | GT - | QUESTIONMARK - | DOMSTRING - | BYTESTRING - | USVSTRING - | UTF8STRING - | JSSTRING - | PROMISE - | ANY - | BOOLEAN - | BYTE - | DOUBLE - | FALSE - | FLOAT - | LONG - | NULL - | OBJECT - | OCTET - | OR - | OPTIONAL - | RECORD - | SEQUENCE - | SHORT - | SYMBOL - | TRUE - | UNSIGNED - | VOID - | ArgumentNameKeyword + Other : INTEGER + | FLOATLITERAL + | IDENTIFIER + | STRING + | OTHER + | ELLIPSIS + | COLON + | SCOPE + | SEMICOLON + | LT + | EQUALS + | GT + | QUESTIONMARK + | DOMSTRING + | BYTESTRING + | USVSTRING + | UTF8STRING + | JSSTRING + | PROMISE + | ANY + | BOOLEAN + | BYTE + | DOUBLE + | FALSE + | FLOAT + | LONG + | NULL + | OBJECT + | OCTET + | OR + | OPTIONAL + | RECORD + | SEQUENCE + | SHORT + | SYMBOL + | TRUE + | UNSIGNED + | VOID + | ArgumentNameKeyword """ pass def p_OtherOrComma(self, p): """ - OtherOrComma : Other - | COMMA + OtherOrComma : Other + | COMMA """ pass def p_TypeSingleType(self, p): """ - Type : SingleType + Type : SingleType """ p[0] = p[1] def p_TypeUnionType(self, p): """ - Type : UnionType Null + Type : UnionType Null """ p[0] = self.handleNullable(p[1], p[2]) def p_TypeWithExtendedAttributes(self, p): """ - TypeWithExtendedAttributes : ExtendedAttributeList Type + TypeWithExtendedAttributes : ExtendedAttributeList Type """ p[0] = p[2].withExtendedAttributes(p[1]) def p_SingleTypeDistinguishableType(self, p): """ - SingleType : DistinguishableType + SingleType : DistinguishableType """ p[0] = p[1] def p_SingleTypeAnyType(self, p): """ - SingleType : ANY + SingleType : ANY """ p[0] = BuiltinTypes[IDLBuiltinType.Types.any] @@ -7132,13 +8218,13 @@ def p_SingleTypeAnyType(self, p): # not Type. Promise types can't be null, hence no "Null" in there. def p_SingleTypePromiseType(self, p): """ - SingleType : PROMISE LT ReturnType GT + SingleType : PROMISE LT ReturnType GT """ p[0] = IDLPromiseType(self.getLocation(p, 1), p[3]) def p_UnionType(self, p): """ - UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN + UnionType : LPAREN UnionMemberType OR UnionMemberType UnionMemberTypes RPAREN """ types = [p[2], p[4]] types.extend(p[5]) @@ -7146,35 +8232,35 @@ def p_UnionType(self, p): def p_UnionMemberTypeDistinguishableType(self, p): """ - UnionMemberType : ExtendedAttributeList DistinguishableType + UnionMemberType : ExtendedAttributeList DistinguishableType """ p[0] = p[2].withExtendedAttributes(p[1]) def p_UnionMemberType(self, p): """ - UnionMemberType : UnionType Null + UnionMemberType : UnionType Null """ p[0] = self.handleNullable(p[1], p[2]) def p_UnionMemberTypes(self, p): """ - UnionMemberTypes : OR UnionMemberType UnionMemberTypes + UnionMemberTypes : OR UnionMemberType UnionMemberTypes """ p[0] = [p[2]] p[0].extend(p[3]) def p_UnionMemberTypesEmpty(self, p): """ - UnionMemberTypes : + UnionMemberTypes : """ p[0] = [] def p_DistinguishableType(self, p): """ - DistinguishableType : PrimitiveType Null - | ARRAYBUFFER Null - | READABLESTREAM Null - | OBJECT Null + DistinguishableType : PrimitiveType Null + | ARRAYBUFFER Null + | READABLESTREAM Null + | OBJECT Null """ if p[1] == "object": type = BuiltinTypes[IDLBuiltinType.Types.object] @@ -7189,13 +8275,13 @@ def p_DistinguishableType(self, p): def p_DistinguishableTypeStringType(self, p): """ - DistinguishableType : StringType Null + DistinguishableType : StringType Null """ p[0] = self.handleNullable(p[1], p[2]) def p_DistinguishableTypeSequenceType(self, p): """ - DistinguishableType : SEQUENCE LT TypeWithExtendedAttributes GT Null + DistinguishableType : SEQUENCE LT TypeWithExtendedAttributes GT Null """ innerType = p[3] type = IDLSequenceType(self.getLocation(p, 1), innerType) @@ -7203,7 +8289,7 @@ def p_DistinguishableTypeSequenceType(self, p): def p_DistinguishableTypeRecordType(self, p): """ - DistinguishableType : RECORD LT StringType COMMA TypeWithExtendedAttributes GT Null + DistinguishableType : RECORD LT StringType COMMA TypeWithExtendedAttributes GT Null """ keyType = p[3] valueType = p[5] @@ -7212,14 +8298,15 @@ def p_DistinguishableTypeRecordType(self, p): def p_DistinguishableTypeScopedName(self, p): """ - DistinguishableType : ScopedName Null + DistinguishableType : ScopedName Null """ assert isinstance(p[1], IDLUnresolvedIdentifier) if p[1].name == "Promise": - raise WebIDLError("Promise used without saying what it's " - "parametrized over", - [self.getLocation(p, 1)]) + raise WebIDLError( + "Promise used without saying what it's " "parametrized over", + [self.getLocation(p, 1)], + ) type = None @@ -7228,8 +8315,9 @@ def p_DistinguishableTypeScopedName(self, p): obj = self.globalScope()._lookupIdentifier(p[1]) assert not obj.isType() if obj.isTypedef(): - type = IDLTypedefType(self.getLocation(p, 1), obj.innerType, - obj.identifier.name) + type = IDLTypedefType( + self.getLocation(p, 1), obj.innerType, obj.identifier.name + ) elif obj.isCallback() and not obj.isInterface(): type = IDLCallbackType(self.getLocation(p, 1), obj) else: @@ -7244,13 +8332,13 @@ def p_DistinguishableTypeScopedName(self, p): def p_ConstType(self, p): """ - ConstType : PrimitiveType + ConstType : PrimitiveType """ p[0] = BuiltinTypes[p[1]] def p_ConstTypeIdentifier(self, p): """ - ConstType : IDENTIFIER + ConstType : IDENTIFIER """ identifier = IDLUnresolvedIdentifier(self.getLocation(p, 1), p[1]) @@ -7258,110 +8346,110 @@ def p_ConstTypeIdentifier(self, p): def p_PrimitiveTypeUint(self, p): """ - PrimitiveType : UnsignedIntegerType + PrimitiveType : UnsignedIntegerType """ p[0] = p[1] def p_PrimitiveTypeBoolean(self, p): """ - PrimitiveType : BOOLEAN + PrimitiveType : BOOLEAN """ p[0] = IDLBuiltinType.Types.boolean def p_PrimitiveTypeByte(self, p): """ - PrimitiveType : BYTE + PrimitiveType : BYTE """ p[0] = IDLBuiltinType.Types.byte def p_PrimitiveTypeOctet(self, p): """ - PrimitiveType : OCTET + PrimitiveType : OCTET """ p[0] = IDLBuiltinType.Types.octet def p_PrimitiveTypeFloat(self, p): """ - PrimitiveType : FLOAT + PrimitiveType : FLOAT """ p[0] = IDLBuiltinType.Types.float def p_PrimitiveTypeUnrestictedFloat(self, p): """ - PrimitiveType : UNRESTRICTED FLOAT + PrimitiveType : UNRESTRICTED FLOAT """ p[0] = IDLBuiltinType.Types.unrestricted_float def p_PrimitiveTypeDouble(self, p): """ - PrimitiveType : DOUBLE + PrimitiveType : DOUBLE """ p[0] = IDLBuiltinType.Types.double def p_PrimitiveTypeUnrestictedDouble(self, p): """ - PrimitiveType : UNRESTRICTED DOUBLE + PrimitiveType : UNRESTRICTED DOUBLE """ p[0] = IDLBuiltinType.Types.unrestricted_double def p_StringType(self, p): """ - StringType : BuiltinStringType + StringType : BuiltinStringType """ p[0] = BuiltinTypes[p[1]] def p_BuiltinStringTypeDOMString(self, p): """ - BuiltinStringType : DOMSTRING + BuiltinStringType : DOMSTRING """ p[0] = IDLBuiltinType.Types.domstring def p_BuiltinStringTypeBytestring(self, p): """ - BuiltinStringType : BYTESTRING + BuiltinStringType : BYTESTRING """ p[0] = IDLBuiltinType.Types.bytestring def p_BuiltinStringTypeUSVString(self, p): """ - BuiltinStringType : USVSTRING + BuiltinStringType : USVSTRING """ p[0] = IDLBuiltinType.Types.usvstring def p_BuiltinStringTypeUTF8String(self, p): """ - BuiltinStringType : UTF8STRING + BuiltinStringType : UTF8STRING """ p[0] = IDLBuiltinType.Types.utf8string def p_BuiltinStringTypeJSString(self, p): """ - BuiltinStringType : JSSTRING + BuiltinStringType : JSSTRING """ p[0] = IDLBuiltinType.Types.jsstring def p_UnsignedIntegerTypeUnsigned(self, p): """ - UnsignedIntegerType : UNSIGNED IntegerType + UnsignedIntegerType : UNSIGNED IntegerType """ # Adding one to a given signed integer type gets you the unsigned type: p[0] = p[2] + 1 def p_UnsignedIntegerType(self, p): """ - UnsignedIntegerType : IntegerType + UnsignedIntegerType : IntegerType """ p[0] = p[1] def p_IntegerTypeShort(self, p): """ - IntegerType : SHORT + IntegerType : SHORT """ p[0] = IDLBuiltinType.Types.short def p_IntegerTypeLong(self, p): """ - IntegerType : LONG OptionalLong + IntegerType : LONG OptionalLong """ if p[2]: p[0] = IDLBuiltinType.Types.long_long @@ -7370,20 +8458,20 @@ def p_IntegerTypeLong(self, p): def p_OptionalLong(self, p): """ - OptionalLong : LONG + OptionalLong : LONG """ p[0] = True def p_OptionalLongEmpty(self, p): """ - OptionalLong : + OptionalLong : """ p[0] = False def p_Null(self, p): """ - Null : QUESTIONMARK - | + Null : QUESTIONMARK + | """ if len(p) > 1: p[0] = self.getLocation(p, 1) @@ -7392,33 +8480,33 @@ def p_Null(self, p): def p_ReturnTypeType(self, p): """ - ReturnType : Type + ReturnType : Type """ p[0] = p[1] def p_ReturnTypeVoid(self, p): """ - ReturnType : VOID + ReturnType : VOID """ p[0] = BuiltinTypes[IDLBuiltinType.Types.void] def p_ScopedName(self, p): """ - ScopedName : AbsoluteScopedName - | RelativeScopedName + ScopedName : AbsoluteScopedName + | RelativeScopedName """ p[0] = p[1] def p_AbsoluteScopedName(self, p): """ - AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts + AbsoluteScopedName : SCOPE IDENTIFIER ScopedNameParts """ assert False pass def p_RelativeScopedName(self, p): """ - RelativeScopedName : IDENTIFIER ScopedNameParts + RelativeScopedName : IDENTIFIER ScopedNameParts """ assert not p[2] # Not implemented! @@ -7426,104 +8514,110 @@ def p_RelativeScopedName(self, p): def p_ScopedNameParts(self, p): """ - ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts + ScopedNameParts : SCOPE IDENTIFIER ScopedNameParts """ assert False pass def p_ScopedNamePartsEmpty(self, p): """ - ScopedNameParts : + ScopedNameParts : """ p[0] = None def p_ExtendedAttributeNoArgs(self, p): """ - ExtendedAttributeNoArgs : IDENTIFIER + ExtendedAttributeNoArgs : IDENTIFIER """ p[0] = (p[1],) def p_ExtendedAttributeArgList(self, p): """ - ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN + ExtendedAttributeArgList : IDENTIFIER LPAREN ArgumentList RPAREN """ p[0] = (p[1], p[3]) def p_ExtendedAttributeIdent(self, p): """ - ExtendedAttributeIdent : IDENTIFIER EQUALS STRING - | IDENTIFIER EQUALS IDENTIFIER + ExtendedAttributeIdent : IDENTIFIER EQUALS STRING + | IDENTIFIER EQUALS IDENTIFIER """ p[0] = (p[1], p[3]) def p_ExtendedAttributeNamedArgList(self, p): """ - ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN + ExtendedAttributeNamedArgList : IDENTIFIER EQUALS IDENTIFIER LPAREN ArgumentList RPAREN """ p[0] = (p[1], p[3], p[5]) def p_ExtendedAttributeIdentList(self, p): """ - ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN + ExtendedAttributeIdentList : IDENTIFIER EQUALS LPAREN IdentifierList RPAREN """ p[0] = (p[1], p[4]) def p_IdentifierList(self, p): """ - IdentifierList : IDENTIFIER Identifiers + IdentifierList : IDENTIFIER Identifiers """ idents = list(p[2]) # This is only used for identifier-list-valued extended attributes, and if # we're going to restrict to IDENTIFIER here we should at least allow # escaping with leading '_' as usual for identifiers. ident = p[1] - if ident[0] == '_': + if ident[0] == "_": ident = ident[1:] idents.insert(0, ident) p[0] = idents def p_IdentifiersList(self, p): """ - Identifiers : COMMA IDENTIFIER Identifiers + Identifiers : COMMA IDENTIFIER Identifiers """ idents = list(p[3]) # This is only used for identifier-list-valued extended attributes, and if # we're going to restrict to IDENTIFIER here we should at least allow # escaping with leading '_' as usual for identifiers. ident = p[2] - if ident[0] == '_': + if ident[0] == "_": ident = ident[1:] idents.insert(0, ident) p[0] = idents def p_IdentifiersEmpty(self, p): """ - Identifiers : + Identifiers : """ p[0] = [] def p_error(self, p): if not p: - raise WebIDLError("Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both", - [self._filename]) + raise WebIDLError( + "Syntax Error at end of file. Possibly due to missing semicolon(;), braces(}) or both", + [self._filename], + ) else: - raise WebIDLError("invalid syntax", [Location(self.lexer, p.lineno, p.lexpos, self._filename)]) + raise WebIDLError( + "invalid syntax", + [Location(self.lexer, p.lineno, p.lexpos, self._filename)], + ) - def __init__(self, outputdir='', lexer=None): + def __init__(self, outputdir="", lexer=None): Tokenizer.__init__(self, outputdir, lexer) logger = SqueakyCleanLogger() try: - self.parser = yacc.yacc(module=self, - outputdir=outputdir, - errorlog=logger, - write_tables=False, - # Pickling the grammar is a speedup in - # some cases (older Python?) but a - # significant slowdown in others. - # We're not pickling for now, until it - # becomes a speedup again. - # , picklefile='WebIDLGrammar.pkl' + self.parser = yacc.yacc( + module=self, + outputdir=outputdir, + errorlog=logger, + write_tables=False, + # Pickling the grammar is a speedup in + # some cases (older Python?) but a + # significant slowdown in others. + # We're not pickling for now, until it + # becomes a speedup again. + # , picklefile='WebIDLGrammar.pkl' ) finally: logger.reportGrammarErrors() @@ -7543,12 +8637,16 @@ def _installBuiltins(self, scope): assert isinstance(scope, IDLScope) # range omits the last value. - for x in range(IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.Float64Array + 1): + for x in range( + IDLBuiltinType.Types.ArrayBuffer, IDLBuiltinType.Types.Float64Array + 1 + ): builtin = BuiltinTypes[x] name = builtin.name - typedef = IDLTypedef(BuiltinLocation(""), scope, builtin, name) + typedef = IDLTypedef( + BuiltinLocation(""), scope, builtin, name + ) - @ staticmethod + @staticmethod def handleNullable(type, questionMarkLocation): if questionMarkLocation is not None: type = IDLNullableType(questionMarkLocation, type) @@ -7585,21 +8683,33 @@ def finish(self): iterable = m break if iterable and iterable.isPairIterator(): + def simpleExtendedAttr(str): - return IDLExtendedAttribute(iface.location, (str, )) + return IDLExtendedAttribute(iface.location, (str,)) + nextMethod = IDLMethod( iface.location, IDLUnresolvedIdentifier(iface.location, "next"), - BuiltinTypes[IDLBuiltinType.Types.object], []) + BuiltinTypes[IDLBuiltinType.Types.object], + [], + ) nextMethod.addExtendedAttributes([simpleExtendedAttr("Throws")]) - itr_ident = IDLUnresolvedIdentifier(iface.location, - iface.identifier.name + "Iterator") + itr_ident = IDLUnresolvedIdentifier( + iface.location, iface.identifier.name + "Iterator" + ) classNameOverride = iface.identifier.name + " Iterator" - itr_iface = IDLInterface(iface.location, self.globalScope(), - itr_ident, None, [nextMethod], - isKnownNonPartial=True, - classNameOverride=classNameOverride) - itr_iface.addExtendedAttributes([simpleExtendedAttr("NoInterfaceObject")]) + itr_iface = IDLInterface( + iface.location, + self.globalScope(), + itr_ident, + None, + [nextMethod], + isKnownNonPartial=True, + classNameOverride=classNameOverride, + ) + itr_iface.addExtendedAttributes( + [simpleExtendedAttr("NoInterfaceObject")] + ) # Make sure the exposure set for the iterator interface is the # same as the exposure set for the iterable interface, because # we're going to generate methods on the iterable that return @@ -7615,10 +8725,12 @@ def simpleExtendedAttr(str): # Make sure we finish IDLIncludesStatements before we finish the # IDLInterfaces. # XXX khuey hates this bit and wants to nuke it from orbit. - includesStatements = [p for p in self._productions if - isinstance(p, IDLIncludesStatement)] - otherStatements = [p for p in self._productions if - not isinstance(p, IDLIncludesStatement)] + includesStatements = [ + p for p in self._productions if isinstance(p, IDLIncludesStatement) + ] + otherStatements = [ + p for p in self._productions if not isinstance(p, IDLIncludesStatement) + ] for production in includesStatements: production.finish(self.globalScope()) for production in otherStatements: @@ -7650,12 +8762,21 @@ def reset(self): def main(): # Parse arguments. from optparse import OptionParser + usageString = "usage: %prog [options] files" o = OptionParser(usage=usageString) - o.add_option("--cachedir", dest='cachedir', default=None, - help="Directory in which to cache lex/parse tables.") - o.add_option("--verbose-errors", action='store_true', default=False, - help="When an error happens, display the Python traceback.") + o.add_option( + "--cachedir", + dest="cachedir", + default=None, + help="Directory in which to cache lex/parse tables.", + ) + o.add_option( + "--verbose-errors", + action="store_true", + default=False, + help="When an error happens, display the Python traceback.", + ) (options, args) = o.parse_args() if len(args) < 1: @@ -7669,11 +8790,11 @@ def main(): try: for filename in fileList: fullPath = os.path.normpath(os.path.join(baseDir, filename)) - f = open(fullPath, 'rb') + f = open(fullPath, "rb") lines = f.readlines() f.close() print(fullPath) - parser.parse(''.join(lines), fullPath) + parser.parse("".join(lines), fullPath) parser.finish() except WebIDLError as e: if options.verbose_errors: @@ -7681,5 +8802,6 @@ def main(): else: print(e) -if __name__ == '__main__': + +if __name__ == "__main__": main() diff --git a/dom/bindings/parser/runtests.py b/dom/bindings/parser/runtests.py index 56546e5d3e28c1..0204fc91a437f4 100644 --- a/dom/bindings/parser/runtests.py +++ b/dom/bindings/parser/runtests.py @@ -9,6 +9,7 @@ import traceback import WebIDL + class TestHarness(object): def __init__(self, test, verbose): self.test = test @@ -52,8 +53,9 @@ def check(self, a, b, msg): else: self.test_fail(msg + " | Got %s expected %s" % (a, b)) + def run_tests(tests, verbose): - testdir = os.path.join(os.path.dirname(__file__), 'tests') + testdir = os.path.join(os.path.dirname(__file__), "tests") if not tests: tests = glob.iglob(os.path.join(testdir, "*.py")) sys.path.append(testdir) @@ -63,15 +65,14 @@ def run_tests(tests, verbose): for test in tests: (testpath, ext) = os.path.splitext(os.path.basename(test)) - _test = __import__(testpath, globals(), locals(), ['WebIDLTest']) + _test = __import__(testpath, globals(), locals(), ["WebIDLTest"]) harness = TestHarness(test, verbose) harness.start() try: _test.WebIDLTest.__call__(WebIDL.Parser(), harness) except Exception as ex: - harness.test_fail("Unhandled exception in test %s: %s" % - (testpath, ex)) + harness.test_fail("Unhandled exception in test %s: %s" % (testpath, ex)) traceback.print_exc() finally: harness.finish() @@ -81,28 +82,40 @@ def run_tests(tests, verbose): if verbose or failed_tests: print() - print('Result summary:') - print('Successful: %d' % all_passed) - print('Unexpected: %d' % \ - sum(len(failures) for _, failures in failed_tests)) + print("Result summary:") + print("Successful: %d" % all_passed) + print("Unexpected: %d" % sum(len(failures) for _, failures in failed_tests)) for test, failures in failed_tests: - print('%s:' % test) + print("%s:" % test) for failure in failures: - print('TEST-UNEXPECTED-FAIL | %s' % failure) + print("TEST-UNEXPECTED-FAIL | %s" % failure) return 1 if failed_tests else 0 + def get_parser(): usage = """%(prog)s [OPTIONS] [TESTS] Where TESTS are relative to the tests directory.""" parser = argparse.ArgumentParser(usage=usage) - parser.add_argument('-q', '--quiet', action='store_false', dest='verbose', - help="Don't print passing tests.", default=None) - parser.add_argument('-v', '--verbose', action='store_true', dest='verbose', - help="Run tests in verbose mode.") - parser.add_argument('tests', nargs="*", help="Tests to run") + parser.add_argument( + "-q", + "--quiet", + action="store_false", + dest="verbose", + help="Don't print passing tests.", + default=None, + ) + parser.add_argument( + "-v", + "--verbose", + action="store_true", + dest="verbose", + help="Run tests in verbose mode.", + ) + parser.add_argument("tests", nargs="*", help="Tests to run") return parser -if __name__ == '__main__': + +if __name__ == "__main__": parser = get_parser() args = parser.parse_args() if args.verbose is None: @@ -110,6 +123,6 @@ def get_parser(): # Make sure the current directory is in the python path so we can cache the # result of the webidlyacc.py generation. - sys.path.append('.') + sys.path.append(".") sys.exit(run_tests(args.tests, verbose=args.verbose)) diff --git a/dom/bindings/parser/tests/test_any_null.py b/dom/bindings/parser/tests/test_any_null.py index e3b690bf6f16fb..f9afdacb02f607 100644 --- a/dom/bindings/parser/tests/test_any_null.py +++ b/dom/bindings/parser/tests/test_any_null.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DoubleNull { attribute any? foo; }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_argument_identifier_conflicts.py b/dom/bindings/parser/tests/test_argument_identifier_conflicts.py index eb1f6d3c92ec0a..6b06baddb52630 100644 --- a/dom/bindings/parser/tests/test_argument_identifier_conflicts.py +++ b/dom/bindings/parser/tests/test_argument_identifier_conflicts.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface ArgumentIdentifierConflict { void foo(boolean arg1, boolean arg1); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_argument_keywords.py b/dom/bindings/parser/tests/test_argument_keywords.py index e190f617e26d19..16e183ad7d43b7 100644 --- a/dom/bindings/parser/tests/test_argument_keywords.py +++ b/dom/bindings/parser/tests/test_argument_keywords.py @@ -1,17 +1,22 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Foo { void foo(object constructor); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 1, "Should have an interface"); - iface = results[0]; - harness.check(len(iface.members), 1, "Should have an operation"); - operation = iface.members[0]; - harness.check(len(operation.signatures()), 1, "Should have one signature"); - (retval, args) = operation.signatures()[0]; - harness.check(len(args), 1, "Should have an argument"); - harness.check(args[0].identifier.name, "constructor", - "Should have an identifier named 'constructor'"); + harness.check(len(results), 1, "Should have an interface") + iface = results[0] + harness.check(len(iface.members), 1, "Should have an operation") + operation = iface.members[0] + harness.check(len(operation.signatures()), 1, "Should have one signature") + (retval, args) = operation.signatures()[0] + harness.check(len(args), 1, "Should have an argument") + harness.check( + args[0].identifier.name, + "constructor", + "Should have an identifier named 'constructor'", + ) diff --git a/dom/bindings/parser/tests/test_argument_novoid.py b/dom/bindings/parser/tests/test_argument_novoid.py index ef8c2229aed59c..832f0586545ba2 100644 --- a/dom/bindings/parser/tests/test_argument_novoid.py +++ b/dom/bindings/parser/tests/test_argument_novoid.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface VoidArgument1 { void foo(void arg2); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_arraybuffer.py b/dom/bindings/parser/tests/test_arraybuffer.py index 4a96c0ff5126dd..52f77e097c68de 100644 --- a/dom/bindings/parser/tests/test_arraybuffer.py +++ b/dom/bindings/parser/tests/test_arraybuffer.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestArrayBuffer { attribute ArrayBuffer bufferAttr; void bufferMethod(ArrayBuffer arg1, ArrayBuffer? arg2, sequence arg3); @@ -36,7 +38,8 @@ def WebIDLTest(parser, harness): attribute Float64Array float64ArrayAttr; void float64ArrayMethod(Float64Array arg1, Float64Array? arg2, sequence arg3); }; - """) + """ + ) results = parser.finish() @@ -58,21 +61,32 @@ def checkStuff(attr, method, t): harness.ok(retType.isVoid(), "Should have a void return type") harness.check(len(arguments), 3, "Expect 3 arguments") - harness.check(str(arguments[0].type), t, "Expect an ArrayBuffer type") - harness.ok(arguments[0].type.isSpiderMonkeyInterface(), "Should test as a js interface") - - harness.check(str(arguments[1].type), t + "OrNull", "Expect an ArrayBuffer type") - harness.ok(arguments[1].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface") - - harness.check(str(arguments[2].type), t + "Sequence", "Expect an ArrayBuffer type") - harness.ok(arguments[2].type.inner.isSpiderMonkeyInterface(), "Should test as a js interface") - - - checkStuff(members[0], members[1], "ArrayBuffer") - checkStuff(members[2], members[3], "ArrayBufferView") - checkStuff(members[4], members[5], "Int8Array") - checkStuff(members[6], members[7], "Uint8Array") - checkStuff(members[8], members[9], "Uint8ClampedArray") + harness.check(str(arguments[0].type), t, "Expect an ArrayBuffer type") + harness.ok( + arguments[0].type.isSpiderMonkeyInterface(), "Should test as a js interface" + ) + + harness.check( + str(arguments[1].type), t + "OrNull", "Expect an ArrayBuffer type" + ) + harness.ok( + arguments[1].type.inner.isSpiderMonkeyInterface(), + "Should test as a js interface", + ) + + harness.check( + str(arguments[2].type), t + "Sequence", "Expect an ArrayBuffer type" + ) + harness.ok( + arguments[2].type.inner.isSpiderMonkeyInterface(), + "Should test as a js interface", + ) + + checkStuff(members[0], members[1], "ArrayBuffer") + checkStuff(members[2], members[3], "ArrayBufferView") + checkStuff(members[4], members[5], "Int8Array") + checkStuff(members[6], members[7], "Uint8Array") + checkStuff(members[8], members[9], "Uint8ClampedArray") checkStuff(members[10], members[11], "Int16Array") checkStuff(members[12], members[13], "Uint16Array") checkStuff(members[14], members[15], "Int32Array") diff --git a/dom/bindings/parser/tests/test_attr.py b/dom/bindings/parser/tests/test_attr.py index 35f680aaa8235f..e19689a81a97d8 100644 --- a/dom/bindings/parser/tests/test_attr.py +++ b/dom/bindings/parser/tests/test_attr.py @@ -1,31 +1,35 @@ import WebIDL + def WebIDLTest(parser, harness): - testData = [("::TestAttr%s::b", "b", "Byte%s", False), - ("::TestAttr%s::rb", "rb", "Byte%s", True), - ("::TestAttr%s::o", "o", "Octet%s", False), - ("::TestAttr%s::ro", "ro", "Octet%s", True), - ("::TestAttr%s::s", "s", "Short%s", False), - ("::TestAttr%s::rs", "rs", "Short%s", True), - ("::TestAttr%s::us", "us", "UnsignedShort%s", False), - ("::TestAttr%s::rus", "rus", "UnsignedShort%s", True), - ("::TestAttr%s::l", "l", "Long%s", False), - ("::TestAttr%s::rl", "rl", "Long%s", True), - ("::TestAttr%s::ul", "ul", "UnsignedLong%s", False), - ("::TestAttr%s::rul", "rul", "UnsignedLong%s", True), - ("::TestAttr%s::ll", "ll", "LongLong%s", False), - ("::TestAttr%s::rll", "rll", "LongLong%s", True), - ("::TestAttr%s::ull", "ull", "UnsignedLongLong%s", False), - ("::TestAttr%s::rull", "rull", "UnsignedLongLong%s", True), - ("::TestAttr%s::str", "str", "String%s", False), - ("::TestAttr%s::rstr", "rstr", "String%s", True), - ("::TestAttr%s::obj", "obj", "Object%s", False), - ("::TestAttr%s::robj", "robj", "Object%s", True), - ("::TestAttr%s::object", "object", "Object%s", False), - ("::TestAttr%s::f", "f", "Float%s", False), - ("::TestAttr%s::rf", "rf", "Float%s", True)] - - parser.parse(""" + testData = [ + ("::TestAttr%s::b", "b", "Byte%s", False), + ("::TestAttr%s::rb", "rb", "Byte%s", True), + ("::TestAttr%s::o", "o", "Octet%s", False), + ("::TestAttr%s::ro", "ro", "Octet%s", True), + ("::TestAttr%s::s", "s", "Short%s", False), + ("::TestAttr%s::rs", "rs", "Short%s", True), + ("::TestAttr%s::us", "us", "UnsignedShort%s", False), + ("::TestAttr%s::rus", "rus", "UnsignedShort%s", True), + ("::TestAttr%s::l", "l", "Long%s", False), + ("::TestAttr%s::rl", "rl", "Long%s", True), + ("::TestAttr%s::ul", "ul", "UnsignedLong%s", False), + ("::TestAttr%s::rul", "rul", "UnsignedLong%s", True), + ("::TestAttr%s::ll", "ll", "LongLong%s", False), + ("::TestAttr%s::rll", "rll", "LongLong%s", True), + ("::TestAttr%s::ull", "ull", "UnsignedLongLong%s", False), + ("::TestAttr%s::rull", "rull", "UnsignedLongLong%s", True), + ("::TestAttr%s::str", "str", "String%s", False), + ("::TestAttr%s::rstr", "rstr", "String%s", True), + ("::TestAttr%s::obj", "obj", "Object%s", False), + ("::TestAttr%s::robj", "robj", "Object%s", True), + ("::TestAttr%s::object", "object", "Object%s", False), + ("::TestAttr%s::f", "f", "Float%s", False), + ("::TestAttr%s::rf", "rf", "Float%s", True), + ] + + parser.parse( + """ interface TestAttr { attribute byte b; readonly attribute byte rb; @@ -77,13 +81,13 @@ def WebIDLTest(parser, harness): attribute float? f; readonly attribute float? rf; }; - """) + """ + ) results = parser.finish() def checkAttr(attr, QName, name, type, readonly): - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") harness.ok(attr.isAttr(), "Attr is an Attr") harness.ok(not attr.isMethod(), "Attr is not an method") harness.ok(not attr.isConst(), "Attr is not a const") @@ -95,11 +99,14 @@ def checkAttr(attr, QName, name, type, readonly): harness.ok(True, "TestAttr interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestAttr", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestAttr", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestAttr", "Interface has the right name") - harness.check(len(iface.members), len(testData), "Expect %s members" % len(testData)) + harness.check( + len(iface.members), len(testData), "Expect %s members" % len(testData) + ) attrs = iface.members @@ -110,11 +117,16 @@ def checkAttr(attr, QName, name, type, readonly): checkAttr(attr, QName % "", name, type % "", readonly) iface = results[1] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestAttrNullable", "Interface has the right QName") - harness.check(iface.identifier.name, "TestAttrNullable", "Interface has the right name") - harness.check(len(iface.members), len(testData), "Expect %s members" % len(testData)) + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestAttrNullable", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestAttrNullable", "Interface has the right name" + ) + harness.check( + len(iface.members), len(testData), "Expect %s members" % len(testData) + ) attrs = iface.members @@ -127,11 +139,13 @@ def checkAttr(attr, QName, name, type, readonly): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SetterThrows] readonly attribute boolean foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -140,11 +154,13 @@ def checkAttr(attr, QName, name, type, readonly): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throw] readonly attribute boolean foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -153,24 +169,30 @@ def checkAttr(attr, QName, name, type, readonly): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SameObject] readonly attribute boolean foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should not allow [SameObject] on attributes not of interface type") + harness.ok( + threw, "Should not allow [SameObject] on attributes not of interface type" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SameObject] readonly attribute A foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True diff --git a/dom/bindings/parser/tests/test_attr_sequence_type.py b/dom/bindings/parser/tests/test_attr_sequence_type.py index fb1b97812bca25..f3249de900a6de 100644 --- a/dom/bindings/parser/tests/test_attr_sequence_type.py +++ b/dom/bindings/parser/tests/test_attr_sequence_type.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrSequenceType { attribute sequence foo; }; - """) + """ + ) results = parser.finish() except: @@ -17,51 +19,59 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrUnionWithSequenceType { attribute (sequence or DOMString) foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Attribute type must not be a union with a sequence member type") + harness.ok(threw, "Attribute type must not be a union with a sequence member type") parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrNullableUnionWithSequenceType { attribute (sequence? or DOMString) foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Attribute type must not be a union with a nullable sequence " - "member type") + harness.ok( + threw, + "Attribute type must not be a union with a nullable sequence " "member type", + ) parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface AttrUnionWithUnionWithSequenceType { attribute ((sequence or DOMString) or AttrUnionWithUnionWithSequenceType) foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Attribute type must not be a union type with a union member " - "type that has a sequence member type") + harness.ok( + threw, + "Attribute type must not be a union type with a union member " + "type that has a sequence member type", + ) diff --git a/dom/bindings/parser/tests/test_attributes_on_types.py b/dom/bindings/parser/tests/test_attributes_on_types.py index ff08791d16f475..386148ed261c8a 100644 --- a/dom/bindings/parser/tests/test_attributes_on_types.py +++ b/dom/bindings/parser/tests/test_attributes_on_types.py @@ -1,11 +1,13 @@ # Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL + def WebIDLTest(parser, harness): # Basic functionality threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange] long Foo; typedef [Clamp] long Bar; typedef [TreatNullAs=EmptyString] DOMString Baz; @@ -40,34 +42,75 @@ def WebIDLTest(parser, harness): interface Iterable { iterable<[Clamp] long, [EnforceRange] long>; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(not threw, "Should not have thrown on parsing normal") if not threw: - harness.check(results[0].innerType.hasEnforceRange(), True, "Foo is [EnforceRange]") + harness.check( + results[0].innerType.hasEnforceRange(), True, "Foo is [EnforceRange]" + ) harness.check(results[1].innerType.hasClamp(), True, "Bar is [Clamp]") - harness.check(results[2].innerType.treatNullAsEmpty, True, "Baz is [TreatNullAs=EmptyString]") + harness.check( + results[2].innerType.treatNullAsEmpty, + True, + "Baz is [TreatNullAs=EmptyString]", + ) A = results[3] - harness.check(A.members[0].type.hasEnforceRange(), True, "A.a is [EnforceRange]") + harness.check( + A.members[0].type.hasEnforceRange(), True, "A.a is [EnforceRange]" + ) harness.check(A.members[1].type.hasClamp(), True, "A.b is [Clamp]") - harness.check(A.members[2].type.hasEnforceRange(), True, "A.c is [EnforceRange]") - harness.check(A.members[3].type.hasEnforceRange(), True, "A.d is [EnforceRange]") + harness.check( + A.members[2].type.hasEnforceRange(), True, "A.c is [EnforceRange]" + ) + harness.check( + A.members[3].type.hasEnforceRange(), True, "A.d is [EnforceRange]" + ) B = results[4] - harness.check(B.members[0].type.hasEnforceRange(), True, "B.typedefFoo is [EnforceRange]") - harness.check(B.members[1].type.hasEnforceRange(), True, "B.foo is [EnforceRange]") + harness.check( + B.members[0].type.hasEnforceRange(), True, "B.typedefFoo is [EnforceRange]" + ) + harness.check( + B.members[1].type.hasEnforceRange(), True, "B.foo is [EnforceRange]" + ) harness.check(B.members[2].type.hasClamp(), True, "B.bar is [Clamp]") - harness.check(B.members[3].type.treatNullAsEmpty, True, "B.baz is [TreatNullAs=EmptyString]") + harness.check( + B.members[3].type.treatNullAsEmpty, + True, + "B.baz is [TreatNullAs=EmptyString]", + ) method = B.members[4].signatures()[0][1] - harness.check(method[0].type.hasEnforceRange(), True, "foo argument of method is [EnforceRange]") - harness.check(method[1].type.hasClamp(), True, "bar argument of method is [Clamp]") - harness.check(method[2].type.treatNullAsEmpty, True, "baz argument of method is [TreatNullAs=EmptyString]") + harness.check( + method[0].type.hasEnforceRange(), + True, + "foo argument of method is [EnforceRange]", + ) + harness.check( + method[1].type.hasClamp(), True, "bar argument of method is [Clamp]" + ) + harness.check( + method[2].type.treatNullAsEmpty, + True, + "baz argument of method is [TreatNullAs=EmptyString]", + ) method2 = B.members[5].signatures()[0][1] - harness.check(method[0].type.hasEnforceRange(), True, "foo argument of method2 is [EnforceRange]") - harness.check(method[1].type.hasClamp(), True, "bar argument of method2 is [Clamp]") - harness.check(method[2].type.treatNullAsEmpty, True, "baz argument of method2 is [TreatNullAs=EmptyString]") + harness.check( + method[0].type.hasEnforceRange(), + True, + "foo argument of method2 is [EnforceRange]", + ) + harness.check( + method[1].type.hasClamp(), True, "bar argument of method2 is [Clamp]" + ) + harness.check( + method[2].type.treatNullAsEmpty, + True, + "baz argument of method2 is [TreatNullAs=EmptyString]", + ) C = results[5] harness.ok(C.members[0].type.nullable(), "C.foo is nullable") harness.ok(C.members[0].type.hasEnforceRange(), "C.foo has [EnforceRange]") @@ -75,12 +118,18 @@ def WebIDLTest(parser, harness): harness.ok(C.members[1].type.hasClamp(), "C.bar has [Clamp]") method = C.members[2].signatures()[0][1] harness.ok(method[0].type.nullable(), "foo argument of method is nullable") - harness.ok(method[0].type.hasEnforceRange(), "foo argument of method has [EnforceRange]") + harness.ok( + method[0].type.hasEnforceRange(), + "foo argument of method has [EnforceRange]", + ) harness.ok(method[1].type.nullable(), "bar argument of method is nullable") harness.ok(method[1].type.hasClamp(), "bar argument of method has [Clamp]") method2 = C.members[3].signatures()[0][1] harness.ok(method2[0].type.nullable(), "foo argument of method2 is nullable") - harness.ok(method2[0].type.hasEnforceRange(), "foo argument of method2 has [EnforceRange]") + harness.ok( + method2[0].type.hasEnforceRange(), + "foo argument of method2 has [EnforceRange]", + ) harness.ok(method2[1].type.nullable(), "bar argument of method2 is nullable") harness.ok(method2[1].type.hasClamp(), "bar argument of method2 has [Clamp]") @@ -88,7 +137,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [AllowShared] ArrayBufferView Foo; dictionary A { required [AllowShared] ArrayBufferView a; @@ -115,7 +165,8 @@ def WebIDLTest(parser, harness): interface Iterable { iterable<[Clamp] long, [AllowShared] ArrayBufferView>; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -131,63 +182,101 @@ def WebIDLTest(parser, harness): harness.ok(B.members[0].type.hasAllowShared(), "B.typedefFoo is [AllowShared]") harness.ok(B.members[1].type.hasAllowShared(), "B.foo is [AllowShared]") method = B.members[2].signatures()[0][1] - harness.ok(method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]") + harness.ok( + method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]" + ) method2 = B.members[3].signatures()[0][1] - harness.ok(method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]") + harness.ok( + method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]" + ) C = results[3] harness.ok(C.members[0].type.nullable(), "C.foo is nullable") harness.ok(C.members[0].type.hasAllowShared(), "C.foo is [AllowShared]") method = C.members[1].signatures()[0][1] harness.ok(method[0].type.nullable(), "foo argument of method is nullable") - harness.ok(method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]") + harness.ok( + method[0].type.hasAllowShared(), "foo argument of method is [AllowShared]" + ) method2 = C.members[2].signatures()[0][1] harness.ok(method2[0].type.nullable(), "foo argument of method2 is nullable") - harness.ok(method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]") + harness.ok( + method2[0].type.hasAllowShared(), "foo argument of method2 is [AllowShared]" + ) - ATTRIBUTES = [("[Clamp]", "long"), ("[EnforceRange]", "long"), - ("[TreatNullAs=EmptyString]", "DOMString"), ("[AllowShared]", "ArrayBufferView")] + ATTRIBUTES = [ + ("[Clamp]", "long"), + ("[EnforceRange]", "long"), + ("[TreatNullAs=EmptyString]", "DOMString"), + ("[AllowShared]", "ArrayBufferView"), + ] TEMPLATES = [ - ("required dictionary members", """ + ( + "required dictionary members", + """ dictionary Foo { %s required %s foo; }; - """), - ("optional arguments", """ + """, + ), + ( + "optional arguments", + """ interface Foo { void foo(%s optional %s foo); }; - """), - ("typedefs", """ + """, + ), + ( + "typedefs", + """ %s typedef %s foo; - """), - ("attributes", """ + """, + ), + ( + "attributes", + """ interface Foo { %s attribute %s foo; }; - """), - ("readonly attributes", """ + """, + ), + ( + "readonly attributes", + """ interface Foo { readonly attribute %s %s foo; }; - """), - ("readonly unresolved attributes", """ + """, + ), + ( + "readonly unresolved attributes", + """ interface Foo { readonly attribute Bar baz; }; typedef %s %s Bar; - """), - ("method", """ + """, + ), + ( + "method", + """ interface Foo { %s %s foo(); }; - """), - ("interface",""" + """, + ), + ( + "interface", + """ %s interface Foo { attribute %s foo; }; - """), - ("partial interface",""" + """, + ), + ( + "partial interface", + """ interface Foo { void foo(); }; @@ -195,20 +284,29 @@ def WebIDLTest(parser, harness): partial interface Foo { attribute %s bar; }; - """), - ("interface mixin",""" + """, + ), + ( + "interface mixin", + """ %s interface mixin Foo { attribute %s foo; }; - """), - ("namespace",""" + """, + ), + ( + "namespace", + """ %s namespace Foo { attribute %s foo; }; - """), - ("partial namespace",""" + """, + ), + ( + "partial namespace", + """ namespace Foo { void foo(); }; @@ -216,14 +314,18 @@ def WebIDLTest(parser, harness): partial namespace Foo { attribute %s bar; }; - """), - ("dictionary",""" + """, + ), + ( + "dictionary", + """ %s dictionary Foo { %s foo; }; - """) - ]; + """, + ), + ] for (name, template) in TEMPLATES: parser = parser.reset() @@ -242,15 +344,16 @@ def WebIDLTest(parser, harness): parser.finish() except: threw = True - harness.ok(threw, - "Should not allow %s on %s" % (attribute, name)) + harness.ok(threw, "Should not allow %s on %s" % (attribute, name)) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [Clamp, EnforceRange] long Foo; - """) + """ + ) parser.finish() except: threw = True @@ -260,23 +363,26 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange, Clamp] long Foo; - """) + """ + ) parser.finish() except: threw = True harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange]") - parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [Clamp] long Foo; typedef [EnforceRange] Foo bar; - """) + """ + ) parser.finish() except: threw = True @@ -286,25 +392,36 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange] long Foo; typedef [Clamp] Foo bar; - """) + """ + ) parser.finish() except: threw = True harness.ok(threw, "Should not allow mixing [Clamp] and [EnforceRange] via typedefs") - TYPES = ["DOMString", "unrestricted float", "float", "unrestricted double", "double"] + TYPES = [ + "DOMString", + "unrestricted float", + "float", + "unrestricted double", + "double", + ] for type in TYPES: parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [Clamp] %s Foo; - """ % type) + """ + % type + ) parser.finish() except: threw = True @@ -314,22 +431,26 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [EnforceRange] %s Foo; - """ % type) + """ + % type + ) parser.finish() except: threw = True harness.ok(threw, "Should not allow [EnforceRange] on %s" % type) - parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [TreatNullAs=EmptyString] long Foo; - """) + """ + ) parser.finish() except: threw = True @@ -339,9 +460,11 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [TreatNullAs=EmptyString] JSString Foo; - """) + """ + ) parser.finish() except: threw = True @@ -351,9 +474,11 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [TreatNullAs=EmptyString] DOMString? Foo; - """) + """ + ) parser.finish() except: threw = True @@ -363,9 +488,11 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [AllowShared] DOMString Foo; - """) + """ + ) results = parser.finish() except: threw = True @@ -374,9 +501,11 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef [AllowShared=something] ArrayBufferView Foo; - """) + """ + ) results = parser.finish() except: threw = True @@ -385,31 +514,41 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { void foo([Clamp] Bar arg); }; typedef long Bar; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(not threw, "Should allow type attributes on unresolved types") - harness.check(results[0].members[0].signatures()[0][1][0].type.hasClamp(), True, - "Unresolved types with type attributes should correctly resolve with attributes") + harness.check( + results[0].members[0].signatures()[0][1][0].type.hasClamp(), + True, + "Unresolved types with type attributes should correctly resolve with attributes", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { void foo(Bar arg); }; typedef [Clamp] long Bar; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(not threw, "Should allow type attributes on typedefs") - harness.check(results[0].members[0].signatures()[0][1][0].type.hasClamp(), True, - "Unresolved types that resolve to typedefs with attributes should correctly resolve with attributes") + harness.check( + results[0].members[0].signatures()[0][1][0].type.hasClamp(), + True, + "Unresolved types that resolve to typedefs with attributes should correctly resolve with attributes", + ) diff --git a/dom/bindings/parser/tests/test_builtin_filename.py b/dom/bindings/parser/tests/test_builtin_filename.py index 631e52eba0b108..6c913bba822463 100644 --- a/dom/bindings/parser/tests/test_builtin_filename.py +++ b/dom/bindings/parser/tests/test_builtin_filename.py @@ -1,11 +1,14 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Test { attribute long b; }; - """); + """ + ) attr = parser.finish()[0].members[0] - harness.check(attr.type.filename(), '', 'Filename on builtin type') + harness.check(attr.type.filename(), "", "Filename on builtin type") diff --git a/dom/bindings/parser/tests/test_builtins.py b/dom/bindings/parser/tests/test_builtins.py index f8563fc2d9b8d0..cd22c1678b300a 100644 --- a/dom/bindings/parser/tests/test_builtins.py +++ b/dom/bindings/parser/tests/test_builtins.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestBuiltins { attribute boolean b; attribute byte s8; @@ -14,16 +16,18 @@ def WebIDLTest(parser, harness): attribute unsigned long long u64; attribute DOMTimeStamp ts; }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestBuiltins interface parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] - harness.check(iface.identifier.QName(), "::TestBuiltins", "Interface has the right QName") + harness.check( + iface.identifier.QName(), "::TestBuiltins", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestBuiltins", "Interface has the right name") harness.check(iface.parent, None, "Interface has no parent") @@ -31,11 +35,26 @@ def WebIDLTest(parser, harness): harness.check(len(members), 10, "Should be one production") names = ["b", "s8", "u8", "s16", "u16", "s32", "u32", "s64", "u64", "ts"] - types = ["Boolean", "Byte", "Octet", "Short", "UnsignedShort", "Long", "UnsignedLong", "LongLong", "UnsignedLongLong", "UnsignedLongLong"] + types = [ + "Boolean", + "Byte", + "Octet", + "Short", + "UnsignedShort", + "Long", + "UnsignedLong", + "LongLong", + "UnsignedLongLong", + "UnsignedLongLong", + ] for i in range(10): attr = members[i] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestBuiltins::" + names[i], "Attr has correct QName") + harness.check( + attr.identifier.QName(), + "::TestBuiltins::" + names[i], + "Attr has correct QName", + ) harness.check(attr.identifier.name, names[i], "Attr has correct name") harness.check(str(attr.type), types[i], "Attr type is the correct name") harness.ok(attr.type.isPrimitive(), "Should be a primitive type") diff --git a/dom/bindings/parser/tests/test_bytestring.py b/dom/bindings/parser/tests/test_bytestring.py index fa83e9e2d579ec..eb499ebee68db9 100644 --- a/dom/bindings/parser/tests/test_bytestring.py +++ b/dom/bindings/parser/tests/test_bytestring.py @@ -2,24 +2,30 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestByteString { attribute ByteString bs; attribute DOMString ds; }; - """) + """ + ) - results = parser.finish(); + results = parser.finish() harness.ok(True, "TestByteString interface parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] - harness.check(iface.identifier.QName(), "::TestByteString", "Interface has the right QName") - harness.check(iface.identifier.name, "TestByteString", "Interface has the right name") + harness.check( + iface.identifier.QName(), "::TestByteString", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestByteString", "Interface has the right name" + ) harness.check(iface.parent, None, "Interface has no parent") members = iface.members @@ -27,7 +33,9 @@ def WebIDLTest(parser, harness): attr = members[0] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestByteString::bs", "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestByteString::bs", "Attr has correct QName" + ) harness.check(attr.identifier.name, "bs", "Attr has correct name") harness.check(str(attr.type), "ByteString", "Attr type is the correct name") harness.ok(attr.type.isByteString(), "Should be ByteString type") @@ -37,7 +45,9 @@ def WebIDLTest(parser, harness): # now check we haven't broken DOMStrings in the process. attr = members[1] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestByteString::ds", "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestByteString::ds", "Attr has correct QName" + ) harness.check(attr.identifier.name, "ds", "Attr has correct name") harness.check(str(attr.type), "String", "Attr type is the correct name") harness.ok(attr.type.isDOMString(), "Should be DOMString type") @@ -47,53 +57,69 @@ def WebIDLTest(parser, harness): # Cannot represent constant ByteString in IDL. threw = False try: - parser.parse(""" + parser.parse( + """ interface ConstByteString { const ByteString foo = "hello" }; - """) + """ + ) except WebIDL.WebIDLError: threw = True - harness.ok(threw, "Should have thrown a WebIDL error for ByteString default in interface") + harness.ok( + threw, "Should have thrown a WebIDL error for ByteString default in interface" + ) # Can have optional ByteStrings with default values try: - parser.parse(""" + parser.parse( + """ interface OptionalByteString { void passByteString(optional ByteString arg = "hello"); }; - """) - results2 = parser.finish(); + """ + ) + results2 = parser.finish() except WebIDL.WebIDLError as e: - harness.ok(False, - "Should not have thrown a WebIDL error for ByteString " - "default in dictionary. " + str(e)) + harness.ok( + False, + "Should not have thrown a WebIDL error for ByteString " + "default in dictionary. " + str(e), + ) # Can have a default ByteString value in a dictionary try: - parser.parse(""" + parser.parse( + """ dictionary OptionalByteStringDict { ByteString item = "some string"; }; - """) - results3 = parser.finish(); + """ + ) + results3 = parser.finish() except WebIDL.WebIDLError as e: - harness.ok(False, - "Should not have thrown a WebIDL error for ByteString " - "default in dictionary. " + str(e)) + harness.ok( + False, + "Should not have thrown a WebIDL error for ByteString " + "default in dictionary. " + str(e), + ) # Don't allow control characters in ByteString literals threw = False try: - parser.parse(""" + parser.parse( + """ dictionary OptionalByteStringDict2 { ByteString item = "\x03"; }; - """) + """ + ) results4 = parser.finish() except WebIDL.WebIDLError as e: threw = True - harness.ok(threw, - "Should have thrown a WebIDL error for invalid ByteString " - "default in dictionary") + harness.ok( + threw, + "Should have thrown a WebIDL error for invalid ByteString " + "default in dictionary", + ) diff --git a/dom/bindings/parser/tests/test_callback.py b/dom/bindings/parser/tests/test_callback.py index c304d085ce52ef..407644a6a8dbd6 100644 --- a/dom/bindings/parser/tests/test_callback.py +++ b/dom/bindings/parser/tests/test_callback.py @@ -1,32 +1,37 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestCallback { attribute CallbackType? listener; }; callback CallbackType = boolean (unsigned long arg); - """) + """ + ) results = parser.finish() harness.ok(True, "TestCallback interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestCallback", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestCallback", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestCallback", "Interface has the right name") harness.check(len(iface.members), 1, "Expect %s members" % 1) attr = iface.members[0] - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") harness.ok(attr.isAttr(), "Should be an attribute") harness.ok(not attr.isMethod(), "Attr is not an method") harness.ok(not attr.isConst(), "Attr is not a const") - harness.check(attr.identifier.QName(), "::TestCallback::listener", "Attr has the right QName") + harness.check( + attr.identifier.QName(), "::TestCallback::listener", "Attr has the right QName" + ) harness.check(attr.identifier.name, "listener", "Attr has the right name") t = attr.type harness.ok(not isinstance(t, WebIDL.IDLWrapperType), "Attr has the right type") diff --git a/dom/bindings/parser/tests/test_callback_constructor.py b/dom/bindings/parser/tests/test_callback_constructor.py index 4999deef62392f..0c3cd2e9d4a949 100644 --- a/dom/bindings/parser/tests/test_callback_constructor.py +++ b/dom/bindings/parser/tests/test_callback_constructor.py @@ -1,33 +1,46 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestCallbackConstructor { attribute CallbackConstructorType? constructorAttribute; }; callback constructor CallbackConstructorType = TestCallbackConstructor (unsigned long arg); - """) + """ + ) results = parser.finish() harness.ok(True, "TestCallbackConstructor interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestCallbackConstructor", "Interface has the right QName") - harness.check(iface.identifier.name, "TestCallbackConstructor", "Interface has the right name") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), + "::TestCallbackConstructor", + "Interface has the right QName", + ) + harness.check( + iface.identifier.name, "TestCallbackConstructor", "Interface has the right name" + ) harness.check(len(iface.members), 1, "Expect %s members" % 1) attr = iface.members[0] - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") harness.ok(attr.isAttr(), "Should be an attribute") harness.ok(not attr.isMethod(), "Attr is not an method") harness.ok(not attr.isConst(), "Attr is not a const") - harness.check(attr.identifier.QName(), "::TestCallbackConstructor::constructorAttribute", "Attr has the right QName") - harness.check(attr.identifier.name, "constructorAttribute", "Attr has the right name") + harness.check( + attr.identifier.QName(), + "::TestCallbackConstructor::constructorAttribute", + "Attr has the right QName", + ) + harness.check( + attr.identifier.name, "constructorAttribute", "Attr has the right name" + ) t = attr.type harness.ok(not isinstance(t, WebIDL.IDLWrapperType), "Attr has the right type") harness.ok(isinstance(t, WebIDL.IDLNullableType), "Attr has the right type") @@ -39,10 +52,12 @@ def WebIDLTest(parser, harness): parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [TreatNonObjectAsNull] callback constructor CallbackConstructorType = object (); - """) + """ + ) results = parser.finish() except: threw = True @@ -52,12 +67,16 @@ def WebIDLTest(parser, harness): parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [MOZ_CAN_RUN_SCRIPT_BOUNDARY] callback constructor CallbackConstructorType = object (); - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not permit MOZ_CAN_RUN_SCRIPT_BOUNDARY callback constructors") + harness.ok( + threw, "Should not permit MOZ_CAN_RUN_SCRIPT_BOUNDARY callback constructors" + ) diff --git a/dom/bindings/parser/tests/test_callback_interface.py b/dom/bindings/parser/tests/test_callback_interface.py index e4789dae168bec..030392eac9b9ed 100644 --- a/dom/bindings/parser/tests/test_callback_interface.py +++ b/dom/bindings/parser/tests/test_callback_interface.py @@ -1,11 +1,14 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ callback interface TestCallbackInterface { attribute boolean bool; }; - """) + """ + ) results = parser.finish() @@ -16,13 +19,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestInterface { }; callback interface TestCallbackInterface : TestInterface { attribute boolean bool; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -32,13 +37,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestInterface : TestCallbackInterface { }; callback interface TestCallbackInterface { attribute boolean bool; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -46,7 +53,8 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should not allow callback parent of non-callback interface") parser = parser.reset() - parser.parse(""" + parser.parse( + """ callback interface TestCallbackInterface1 { void foo(); }; @@ -86,9 +94,13 @@ def WebIDLTest(parser, harness): callback interface TestCallbackInterface10 : TestCallbackInterface1 { void bar(); }; - """) + """ + ) results = parser.finish() for (i, iface) in enumerate(results): - harness.check(iface.isSingleOperationInterface(), i < 4, - "Interface %s should be a single operation interface" % - iface.identifier.name) + harness.check( + iface.isSingleOperationInterface(), + i < 4, + "Interface %s should be a single operation interface" + % iface.identifier.name, + ) diff --git a/dom/bindings/parser/tests/test_cereactions.py b/dom/bindings/parser/tests/test_cereactions.py index f726907c2fc4b0..bf925e11ec91aa 100644 --- a/dom/bindings/parser/tests/test_cereactions.py +++ b/dom/bindings/parser/tests/test_cereactions.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions(DOMString a)] void foo(boolean arg2); }; - """) + """ + ) results = parser.finish() except: @@ -16,11 +18,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions(DOMString b)] readonly attribute boolean bar; }; - """) + """ + ) results = parser.finish() except: @@ -31,54 +35,72 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] attribute boolean bar; }; - """) + """ + ) results = parser.finish() except Exception as e: - harness.ok(False, "Shouldn't have thrown for [CEReactions] used on writable attribute. %s" % e) + harness.ok( + False, + "Shouldn't have thrown for [CEReactions] used on writable attribute. %s" + % e, + ) threw = True parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] void foo(boolean arg2); }; - """) + """ + ) results = parser.finish() except Exception as e: - harness.ok(False, "Shouldn't have thrown for [CEReactions] used on regular operations. %s" % e) + harness.ok( + False, + "Shouldn't have thrown for [CEReactions] used on regular operations. %s" + % e, + ) threw = True parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] readonly attribute boolean A; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should have thrown for [CEReactions] used on a readonly attribute") + harness.ok( + threw, "Should have thrown for [CEReactions] used on a readonly attribute" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [CEReactions] interface Foo { } - """) + """ + ) results = parser.finish() except: @@ -89,45 +111,47 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] getter any(DOMString name); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [CEReactions] used on a named getter") + harness.ok(threw, "Should have thrown for [CEReactions] used on a named getter") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] legacycaller double compute(double x); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [CEReactions] used on a legacycaller") + harness.ok(threw, "Should have thrown for [CEReactions] used on a legacycaller") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { [CEReactions] stringifier DOMString (); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [CEReactions] used on a stringifier") - + harness.ok(threw, "Should have thrown for [CEReactions] used on a stringifier") diff --git a/dom/bindings/parser/tests/test_conditional_dictionary_member.py b/dom/bindings/parser/tests/test_conditional_dictionary_member.py index 8420f2ee4e0078..2aef8ebe8ff3a6 100644 --- a/dom/bindings/parser/tests/test_conditional_dictionary_member.py +++ b/dom/bindings/parser/tests/test_conditional_dictionary_member.py @@ -1,23 +1,28 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ dictionary Dict { any foo; [ChromeOnly] any bar; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should have a dictionary") - members = results[0].members; + members = results[0].members harness.check(len(members), 2, "Should have two members") # Note that members are ordered lexicographically, so "bar" comes # before "foo". - harness.ok(members[0].getExtendedAttribute("ChromeOnly"), - "First member is not ChromeOnly") - harness.ok(not members[1].getExtendedAttribute("ChromeOnly"), - "Second member is ChromeOnly") + harness.ok( + members[0].getExtendedAttribute("ChromeOnly"), "First member is not ChromeOnly" + ) + harness.ok( + not members[1].getExtendedAttribute("ChromeOnly"), "Second member is ChromeOnly" + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary Dict { any foo; any bar; @@ -26,14 +31,16 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should have a dictionary and an interface") parser = parser.reset() exception = None try: - parser.parse(""" + parser.parse( + """ dictionary Dict { any foo; [ChromeOnly] any bar; @@ -42,21 +49,25 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) - results = parser.finish() + """ + ) + results = parser.finish() except Exception as e: - exception = e + exception = e harness.ok(exception, "Should have thrown.") - harness.check(exception.message, - "[Cached] and [StoreInSlot] must not be used on an attribute " - "whose type contains a [ChromeOnly] dictionary member", - "Should have thrown the right exception") + harness.check( + exception.message, + "[Cached] and [StoreInSlot] must not be used on an attribute " + "whose type contains a [ChromeOnly] dictionary member", + "Should have thrown the right exception", + ) parser = parser.reset() exception = None try: - parser.parse(""" + parser.parse( + """ dictionary ParentDict { [ChromeOnly] any bar; }; @@ -68,21 +79,25 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) - results = parser.finish() + """ + ) + results = parser.finish() except Exception as e: - exception = e + exception = e harness.ok(exception, "Should have thrown (2).") - harness.check(exception.message, - "[Cached] and [StoreInSlot] must not be used on an attribute " - "whose type contains a [ChromeOnly] dictionary member", - "Should have thrown the right exception (2)") + harness.check( + exception.message, + "[Cached] and [StoreInSlot] must not be used on an attribute " + "whose type contains a [ChromeOnly] dictionary member", + "Should have thrown the right exception (2)", + ) parser = parser.reset() exception = None try: - parser.parse(""" + parser.parse( + """ dictionary GrandParentDict { [ChromeOnly] any baz; }; @@ -98,13 +113,16 @@ def WebIDLTest(parser, harness): interface Iface { [Constant, Cached] readonly attribute Dict dict; }; - """) - results = parser.finish() + """ + ) + results = parser.finish() except Exception as e: - exception = e + exception = e harness.ok(exception, "Should have thrown (3).") - harness.check(exception.message, - "[Cached] and [StoreInSlot] must not be used on an attribute " - "whose type contains a [ChromeOnly] dictionary member", - "Should have thrown the right exception (3)") + harness.check( + exception.message, + "[Cached] and [StoreInSlot] must not be used on an attribute " + "whose type contains a [ChromeOnly] dictionary member", + "Should have thrown the right exception (3)", + ) diff --git a/dom/bindings/parser/tests/test_const.py b/dom/bindings/parser/tests/test_const.py index 918f284a226aa6..f2d4b79d467399 100644 --- a/dom/bindings/parser/tests/test_const.py +++ b/dom/bindings/parser/tests/test_const.py @@ -22,8 +22,10 @@ ("::TestConsts::udbi", "udbi", "UnrestrictedDouble", 2), ] + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestConsts { const byte zero = 0; const byte b = -1; @@ -45,22 +47,25 @@ def WebIDLTest(parser, harness): const unrestricted float ufli = 2; const unrestricted double udbi = 2; }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestConsts interface parsed without error.") harness.check(len(results), 1, "Should be one production.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestConsts", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestConsts", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestConsts", "Interface has the right name") - harness.check(len(iface.members), len(expected), "Expect %s members" % len(expected)) + harness.check( + len(iface.members), len(expected), "Expect %s members" % len(expected) + ) for (const, (QName, name, type, value)) in zip(iface.members, expected): - harness.ok(isinstance(const, WebIDL.IDLConst), - "Should be an IDLConst") + harness.ok(isinstance(const, WebIDL.IDLConst), "Should be an IDLConst") harness.ok(const.isConst(), "Const is a const") harness.ok(not const.isAttr(), "Const is not an attr") harness.ok(not const.isMethod(), "Const is not a method") @@ -68,19 +73,23 @@ def WebIDLTest(parser, harness): harness.check(const.identifier.name, name, "Const has the right name") harness.check(str(const.type), type, "Const has the right type") harness.ok(const.type.isPrimitive(), "All consts should be primitive") - harness.check(str(const.value.type), str(const.type), - "Const's value has the same type as the type") + harness.check( + str(const.value.type), + str(const.type), + "Const's value has the same type as the type", + ) harness.check(const.value.value, value, "Const value has the right value.") - parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestConsts { const boolean? zero = 0; }; - """) + """ + ) parser.finish() except: threw = True diff --git a/dom/bindings/parser/tests/test_constructor.py b/dom/bindings/parser/tests/test_constructor.py index 83e1f4fc34f1bb..e42a818d19be07 100644 --- a/dom/bindings/parser/tests/test_constructor.py +++ b/dom/bindings/parser/tests/test_constructor.py @@ -1,21 +1,39 @@ import WebIDL + def WebIDLTest(parser, harness): def checkArgument(argument, QName, name, type, optional, variadic): - harness.ok(isinstance(argument, WebIDL.IDLArgument), - "Should be an IDLArgument") - harness.check(argument.identifier.QName(), QName, "Argument has the right QName") + harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument") + harness.check( + argument.identifier.QName(), QName, "Argument has the right QName" + ) harness.check(argument.identifier.name, name, "Argument has the right name") harness.check(str(argument.type), type, "Argument has the right return type") - harness.check(argument.optional, optional, "Argument has the right optional value") - harness.check(argument.variadic, variadic, "Argument has the right variadic value") - - def checkMethod(method, QName, name, signatures, - static=True, getter=False, setter=False, deleter=False, - legacycaller=False, stringifier=False, chromeOnly=False, - htmlConstructor=False, secureContext=False, pref=None, func=None): - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + harness.check( + argument.optional, optional, "Argument has the right optional value" + ) + harness.check( + argument.variadic, variadic, "Argument has the right variadic value" + ) + + def checkMethod( + method, + QName, + name, + signatures, + static=True, + getter=False, + setter=False, + deleter=False, + legacycaller=False, + stringifier=False, + chromeOnly=False, + htmlConstructor=False, + secureContext=False, + pref=None, + func=None, + ): + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") harness.ok(method.isMethod(), "Method is a method") harness.ok(not method.isAttr(), "Method is not an attr") harness.ok(not method.isConst(), "Method is not a const") @@ -24,23 +42,58 @@ def checkMethod(method, QName, name, signatures, harness.check(method.isStatic(), static, "Method has the correct static value") harness.check(method.isGetter(), getter, "Method has the correct getter value") harness.check(method.isSetter(), setter, "Method has the correct setter value") - harness.check(method.isDeleter(), deleter, "Method has the correct deleter value") - harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value") - harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value") - harness.check(method.getExtendedAttribute("ChromeOnly") is not None, chromeOnly, "Method has the correct value for ChromeOnly") - harness.check(method.isHTMLConstructor(), htmlConstructor, "Method has the correct htmlConstructor value") - harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures") - harness.check(method.getExtendedAttribute("Pref"), pref, "Method has the correct pref value") - harness.check(method.getExtendedAttribute("Func"), func, "Method has the correct func value") - harness.check(method.getExtendedAttribute("SecureContext") is not None, secureContext, "Method has the correct SecureContext value") + harness.check( + method.isDeleter(), deleter, "Method has the correct deleter value" + ) + harness.check( + method.isLegacycaller(), + legacycaller, + "Method has the correct legacycaller value", + ) + harness.check( + method.isStringifier(), + stringifier, + "Method has the correct stringifier value", + ) + harness.check( + method.getExtendedAttribute("ChromeOnly") is not None, + chromeOnly, + "Method has the correct value for ChromeOnly", + ) + harness.check( + method.isHTMLConstructor(), + htmlConstructor, + "Method has the correct htmlConstructor value", + ) + harness.check( + len(method.signatures()), + len(signatures), + "Method has the correct number of signatures", + ) + harness.check( + method.getExtendedAttribute("Pref"), + pref, + "Method has the correct pref value", + ) + harness.check( + method.getExtendedAttribute("Func"), + func, + "Method has the correct func value", + ) + harness.check( + method.getExtendedAttribute("SecureContext") is not None, + secureContext, + "Method has the correct SecureContext value", + ) sigpairs = zip(method.signatures(), signatures) for (gotSignature, expectedSignature) in sigpairs: (gotRetType, gotArgs) = gotSignature (expectedRetType, expectedArgs) = expectedSignature - harness.check(str(gotRetType), expectedRetType, - "Method has the expected return type.") + harness.check( + str(gotRetType), expectedRetType, "Method has the expected return type." + ) for i in range(0, len(gotArgs)): (QName, name, type, optional, variadic) = expectedArgs[i] @@ -48,33 +101,88 @@ def checkMethod(method, QName, name, signatures, def checkResults(results): harness.check(len(results), 3, "Should be three productions") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.ok(isinstance(results[2], WebIDL.IDLInterface), - "Should be an IDLInterface") - - checkMethod(results[0].ctor(), "::TestConstructorNoArgs::constructor", - "constructor", [("TestConstructorNoArgs (Wrapper)", [])]) - harness.check(len(results[0].members), 0, - "TestConstructorNoArgs should not have members") - checkMethod(results[1].ctor(), "::TestConstructorWithArgs::constructor", - "constructor", - [("TestConstructorWithArgs (Wrapper)", - [("::TestConstructorWithArgs::constructor::name", "name", "String", False, False)])]) - harness.check(len(results[1].members), 0, - "TestConstructorWithArgs should not have members") - checkMethod(results[2].ctor(), "::TestConstructorOverloads::constructor", - "constructor", - [("TestConstructorOverloads (Wrapper)", - [("::TestConstructorOverloads::constructor::foo", "foo", "Object", False, False)]), - ("TestConstructorOverloads (Wrapper)", - [("::TestConstructorOverloads::constructor::bar", "bar", "Boolean", False, False)])]) - harness.check(len(results[2].members), 0, - "TestConstructorOverloads should not have members") - - parser.parse(""" + harness.ok( + isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface" + ) + harness.ok( + isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface" + ) + harness.ok( + isinstance(results[2], WebIDL.IDLInterface), "Should be an IDLInterface" + ) + + checkMethod( + results[0].ctor(), + "::TestConstructorNoArgs::constructor", + "constructor", + [("TestConstructorNoArgs (Wrapper)", [])], + ) + harness.check( + len(results[0].members), 0, "TestConstructorNoArgs should not have members" + ) + checkMethod( + results[1].ctor(), + "::TestConstructorWithArgs::constructor", + "constructor", + [ + ( + "TestConstructorWithArgs (Wrapper)", + [ + ( + "::TestConstructorWithArgs::constructor::name", + "name", + "String", + False, + False, + ) + ], + ) + ], + ) + harness.check( + len(results[1].members), + 0, + "TestConstructorWithArgs should not have members", + ) + checkMethod( + results[2].ctor(), + "::TestConstructorOverloads::constructor", + "constructor", + [ + ( + "TestConstructorOverloads (Wrapper)", + [ + ( + "::TestConstructorOverloads::constructor::foo", + "foo", + "Object", + False, + False, + ) + ], + ), + ( + "TestConstructorOverloads (Wrapper)", + [ + ( + "::TestConstructorOverloads::constructor::bar", + "bar", + "Boolean", + False, + False, + ) + ], + ), + ], + ) + harness.check( + len(results[2].members), + 0, + "TestConstructorOverloads should not have members", + ) + + parser.parse( + """ interface TestConstructorNoArgs { constructor(); }; @@ -87,111 +195,146 @@ def checkResults(results): constructor(object foo); constructor(boolean bar); }; - """) + """ + ) results = parser.finish() checkResults(results) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestPrefConstructor { [Pref="dom.webidl.test1"] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestPrefConstructor::constructor", - "constructor", [("TestPrefConstructor (Wrapper)", [])], - pref=["dom.webidl.test1"]) + checkMethod( + results[0].ctor(), + "::TestPrefConstructor::constructor", + "constructor", + [("TestPrefConstructor (Wrapper)", [])], + pref=["dom.webidl.test1"], + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestChromeOnlyConstructor { [ChromeOnly] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestChromeOnlyConstructor::constructor", - "constructor", [("TestChromeOnlyConstructor (Wrapper)", [])], - chromeOnly=True) + checkMethod( + results[0].ctor(), + "::TestChromeOnlyConstructor::constructor", + "constructor", + [("TestChromeOnlyConstructor (Wrapper)", [])], + chromeOnly=True, + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSCConstructor { [SecureContext] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestSCConstructor::constructor", - "constructor", [("TestSCConstructor (Wrapper)", [])], - secureContext=True) + checkMethod( + results[0].ctor(), + "::TestSCConstructor::constructor", + "constructor", + [("TestSCConstructor (Wrapper)", [])], + secureContext=True, + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestFuncConstructor { [Func="Document::IsWebAnimationsEnabled"] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestFuncConstructor::constructor", - "constructor", [("TestFuncConstructor (Wrapper)", [])], - func=["Document::IsWebAnimationsEnabled"]) + checkMethod( + results[0].ctor(), + "::TestFuncConstructor::constructor", + "constructor", + [("TestFuncConstructor (Wrapper)", [])], + func=["Document::IsWebAnimationsEnabled"], + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestPrefChromeOnlySCFuncConstructor { [ChromeOnly, Pref="dom.webidl.test1", SecureContext, Func="Document::IsWebAnimationsEnabled"] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") - - checkMethod(results[0].ctor(), "::TestPrefChromeOnlySCFuncConstructor::constructor", - "constructor", [("TestPrefChromeOnlySCFuncConstructor (Wrapper)", [])], - func=["Document::IsWebAnimationsEnabled"], pref=["dom.webidl.test1"], - chromeOnly=True, secureContext=True) + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") + + checkMethod( + results[0].ctor(), + "::TestPrefChromeOnlySCFuncConstructor::constructor", + "constructor", + [("TestPrefChromeOnlySCFuncConstructor (Wrapper)", [])], + func=["Document::IsWebAnimationsEnabled"], + pref=["dom.webidl.test1"], + chromeOnly=True, + secureContext=True, + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructor { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") - checkMethod(results[0].ctor(), "::TestHTMLConstructor::constructor", - "constructor", [("TestHTMLConstructor (Wrapper)", [])], - htmlConstructor=True) + checkMethod( + results[0].ctor(), + "::TestHTMLConstructor::constructor", + "constructor", + [("TestHTMLConstructor (Wrapper)", [])], + htmlConstructor=True, + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestChromeOnlyConstructor { constructor() [ChromeOnly] constructor(DOMString a); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -202,11 +345,13 @@ def checkResults(results): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorWithArgs { [HTMLConstructor] constructor(DOMString a); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -217,11 +362,13 @@ def checkResults(results): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ callback interface TestHTMLConstructorOnCallbackInterface { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -232,12 +379,14 @@ def checkResults(results): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -247,165 +396,186 @@ def checkResults(results): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws] constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a throwing constructor and a HTMLConstructor") + harness.ok(threw, "Can't have both a throwing constructor and a HTMLConstructor") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a constructor operation") + harness.ok(threw, "Can't have both a HTMLConstructor and a constructor operation") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws] constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a throwing constructor " - "operation") + harness.ok( + threw, + "Can't have both a HTMLConstructor and a throwing constructor " "operation", + ) # Test HTMLConstructor and [ChromeOnly] constructor operation parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [ChromeOnly] constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a ChromeOnly constructor and a HTMLConstructor") + harness.ok(threw, "Can't have both a ChromeOnly constructor and a HTMLConstructor") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws, ChromeOnly] constructor(); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a throwing chromeonly constructor and a " - "HTMLConstructor") + harness.ok( + threw, + "Can't have both a throwing chromeonly constructor and a " "HTMLConstructor", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [ChromeOnly] constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a chromeonly constructor " - "operation") + harness.ok( + threw, + "Can't have both a HTMLConstructor and a chromeonly constructor " "operation", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestHTMLConstructorAndConstructor { [Throws, ChromeOnly] constructor(DOMString a); [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have both a HTMLConstructor and a throwing chromeonly " - "constructor operation") + harness.ok( + threw, + "Can't have both a HTMLConstructor and a throwing chromeonly " + "constructor operation", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [NoInterfaceObject] interface InterfaceWithoutInterfaceObject { constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have a constructor operation on a [NoInterfaceObject] " - "interface") + harness.ok( + threw, + "Can't have a constructor operation on a [NoInterfaceObject] " "interface", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface InterfaceWithPartial { }; partial interface InterfaceWithPartial { constructor(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have a constructor operation on a partial interface") + harness.ok(threw, "Can't have a constructor operation on a partial interface") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface InterfaceWithMixin { }; @@ -414,11 +584,10 @@ def checkResults(results): }; InterfaceWithMixin includes Mixin - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Can't have a constructor operation on a mixin") - + harness.ok(threw, "Can't have a constructor operation on a mixin") diff --git a/dom/bindings/parser/tests/test_constructor_global.py b/dom/bindings/parser/tests/test_constructor_global.py index b7eabb1e35b644..ee7a361fd38f0b 100644 --- a/dom/bindings/parser/tests/test_constructor_global.py +++ b/dom/bindings/parser/tests/test_constructor_global.py @@ -1,14 +1,17 @@ import traceback + def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=TestConstructorGlobal] interface TestConstructorGlobal { constructor(); }; - """) + """ + ) results = parser.finish() except: @@ -19,12 +22,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=TestNamedConstructorGlobal, NamedConstructor=FooBar] interface TestNamedConstructorGlobal { }; - """) + """ + ) results = parser.finish() except: threw = True @@ -34,12 +39,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [NamedConstructor=FooBar, Global, Exposed=TestNamedConstructorGlobal] interface TestNamedConstructorGlobal { }; - """) + """ + ) results = parser.finish() except: threw = True @@ -49,12 +56,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=TestHTMLConstructorGlobal] interface TestHTMLConstructorGlobal { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_constructor_no_interface_object.py b/dom/bindings/parser/tests/test_constructor_no_interface_object.py index 24cc36066cd663..513f43476726c8 100644 --- a/dom/bindings/parser/tests/test_constructor_no_interface_object.py +++ b/dom/bindings/parser/tests/test_constructor_no_interface_object.py @@ -1,12 +1,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ [NoInterfaceObject] interface TestConstructorNoInterfaceObject { constructor(); }; - """) + """ + ) results = parser.finish() except: @@ -16,23 +18,27 @@ def WebIDLTest(parser, harness): parser = parser.reset() - parser.parse(""" + parser.parse( + """ [NoInterfaceObject, NamedConstructor=FooBar] interface TestNamedConstructorNoInterfaceObject { }; - """) + """ + ) # Test HTMLConstructor and NoInterfaceObject parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [NoInterfaceObject] interface TestHTMLConstructorNoInterfaceObject { [HTMLConstructor] constructor(); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_deduplicate.py b/dom/bindings/parser/tests/test_deduplicate.py index 6249d36fb8f3c3..6649f4ec05d016 100644 --- a/dom/bindings/parser/tests/test_deduplicate.py +++ b/dom/bindings/parser/tests/test_deduplicate.py @@ -1,15 +1,20 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Foo; interface Bar; interface Foo; - """); + """ + ) results = parser.finish() # There should be no duplicate interfaces in the result. - expectedNames = sorted(['Foo', 'Bar']) + expectedNames = sorted(["Foo", "Bar"]) actualNames = sorted(map(lambda iface: iface.identifier.name, results)) - harness.check(actualNames, expectedNames, "Parser shouldn't output duplicate names.") + harness.check( + actualNames, expectedNames, "Parser shouldn't output duplicate names." + ) diff --git a/dom/bindings/parser/tests/test_dictionary.py b/dom/bindings/parser/tests/test_dictionary.py index 3cad3022389ae9..5d3d482f061b25 100644 --- a/dom/bindings/parser/tests/test_dictionary.py +++ b/dom/bindings/parser/tests/test_dictionary.py @@ -1,5 +1,6 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ dictionary Dict2 : Dict1 { long child = 5; Dict1 aaandAnother; @@ -8,27 +9,33 @@ def WebIDLTest(parser, harness): long parent; double otherParent; }; - """) + """ + ) results = parser.finish() - dict1 = results[1]; - dict2 = results[0]; + dict1 = results[1] + dict2 = results[0] harness.check(len(dict1.members), 2, "Dict1 has two members") harness.check(len(dict2.members), 2, "Dict2 has four members") - harness.check(dict1.members[0].identifier.name, "otherParent", - "'o' comes before 'p'") - harness.check(dict1.members[1].identifier.name, "parent", - "'o' really comes before 'p'") - harness.check(dict2.members[0].identifier.name, "aaandAnother", - "'a' comes before 'c'") - harness.check(dict2.members[1].identifier.name, "child", - "'a' really comes before 'c'") + harness.check( + dict1.members[0].identifier.name, "otherParent", "'o' comes before 'p'" + ) + harness.check( + dict1.members[1].identifier.name, "parent", "'o' really comes before 'p'" + ) + harness.check( + dict2.members[0].identifier.name, "aaandAnother", "'a' comes before 'c'" + ) + harness.check( + dict2.members[1].identifier.name, "child", "'a' really comes before 'c'" + ) # Test partial dictionary. - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ dictionary A { long c; long g; @@ -37,30 +44,29 @@ def WebIDLTest(parser, harness): long h; long d; }; - """) + """ + ) results = parser.finish() - dict1 = results[0]; + dict1 = results[0] harness.check(len(dict1.members), 4, "Dict1 has four members") - harness.check(dict1.members[0].identifier.name, "c", - "c should be first") - harness.check(dict1.members[1].identifier.name, "d", - "d should come after c") - harness.check(dict1.members[2].identifier.name, "g", - "g should come after d") - harness.check(dict1.members[3].identifier.name, "h", - "h should be last") + harness.check(dict1.members[0].identifier.name, "c", "c should be first") + harness.check(dict1.members[1].identifier.name, "d", "d should come after c") + harness.check(dict1.members[2].identifier.name, "g", "g should come after d") + harness.check(dict1.members[3].identifier.name, "h", "h should be last") # Now reset our parser parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Dict { long prop = 5; long prop; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -68,28 +74,33 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should not allow name duplication in a dictionary") # Test no name duplication across normal and partial dictionary. - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { long prop = 5; }; partial dictionary A { long prop; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow name duplication across normal and partial dictionary") + harness.ok( + threw, "Should not allow name duplication across normal and partial dictionary" + ) # Now reset our parser again parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Dict1 : Dict2 { long prop = 5; }; @@ -99,24 +110,28 @@ def WebIDLTest(parser, harness): dictionary Dict3 { double prop; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow name duplication in a dictionary and " - "its ancestor") + harness.ok( + threw, "Should not allow name duplication in a dictionary and " "its ancestor" + ) # More reset parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Iface {}; dictionary Dict : Iface { long prop; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -127,10 +142,12 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A : B {}; dictionary B : A {}; - """) + """ + ) results = parser.finish() except: threw = True @@ -140,27 +157,31 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { [TreatNullAs=EmptyString] DOMString foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow [TreatNullAs] on dictionary members"); + harness.ok(threw, "Should not allow [TreatNullAs] on dictionary members") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(A arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -170,13 +191,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional A arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -186,47 +209,53 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo((A or DOMString) arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Trailing union arg containing a dictionary must be optional") + harness.ok(threw, "Trailing union arg containing a dictionary must be optional") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (A or DOMString) arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Trailing union arg containing a dictionary must have a default value") + harness.ok( + threw, "Trailing union arg containing a dictionary must have a default value" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(A arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -236,13 +265,15 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional A arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -252,245 +283,289 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(A arg1, optional long arg2, long arg3); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, - "Dictionary arg followed by non-optional arg doesn't have to be optional") + harness.ok( + not threw, + "Dictionary arg followed by non-optional arg doesn't have to be optional", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo((A or DOMString) arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Union arg containing dictionary followed by optional arg must " - "be optional") + harness.ok( + threw, + "Union arg containing dictionary followed by optional arg must " "be optional", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (A or DOMString) arg1, optional long arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Union arg containing dictionary followed by optional arg must " - "have a default value") + harness.ok( + threw, + "Union arg containing dictionary followed by optional arg must " + "have a default value", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(A arg1, long arg2); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Dictionary arg followed by required arg can be required") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional A? arg1 = {}); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Optional dictionary arg must not be nullable") - harness.ok("nullable" in str(threw), - "Must have the expected exception for optional nullable dictionary arg") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for optional nullable dictionary arg", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { required long x; }; interface X { void doFoo(A? arg1); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Required dictionary arg must not be nullable") - harness.ok("nullable" in str(threw), - "Must have the expected exception for required nullable " - "dictionary arg") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for required nullable " "dictionary arg", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (A or long)? arg1 = {}); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Dictionary arg must not be in an optional nullable union") - harness.ok("nullable" in str(threw), - "Must have the expected exception for optional nullable union " - "arg containing dictionary") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for optional nullable union " + "arg containing dictionary", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { required long x; }; interface X { void doFoo((A or long)? arg1); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = x harness.ok(threw, "Dictionary arg must not be in a required nullable union") - harness.ok("nullable" in str(threw), - "Must have the expected exception for required nullable union " - "arg containing dictionary") + harness.ok( + "nullable" in str(threw), + "Must have the expected exception for required nullable union " + "arg containing dictionary", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(sequence arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, - "Nullable union should be allowed in a sequence argument") + harness.ok(not threw, "Nullable union should be allowed in a sequence argument") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (A or long?) arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Dictionary must not be in a union with a nullable type") + harness.ok(threw, "Dictionary must not be in a union with a nullable type") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (long? or A) arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "A nullable type must not be in a union with a dictionary") + harness.ok(threw, "A nullable type must not be in a union with a dictionary") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { A? doFoo(); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Dictionary return value can be nullable") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional A arg = {}); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Dictionary arg should actually parse") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (A or DOMString) arg = {}); }; - """) + """ + ) results = parser.finish() harness.ok(True, "Union arg containing a dictionary should actually parse") parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary A { }; interface X { void doFoo(optional (A or DOMString) arg = "abc"); }; - """) + """ + ) results = parser.finish() - harness.ok(True, "Union arg containing a dictionary with string default should actually parse") + harness.ok( + True, + "Union arg containing a dictionary with string default should actually parse", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { Foo foo; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -500,7 +575,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo3 : Foo { short d; }; @@ -516,78 +592,102 @@ def WebIDLTest(parser, harness): dictionary Foo { Foo1 b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Dictionary that " - "inherits from its Dictionary.") + harness.ok( + threw, + "Member type must not be a Dictionary that " "inherits from its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { (Foo or DOMString)[]? b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Nullable type " - "whose inner type includes its Dictionary.") + harness.ok( + threw, + "Member type must not be a Nullable type " + "whose inner type includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { (DOMString or Foo) b; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Union type, one of " - "whose member types includes its Dictionary.") + harness.ok( + threw, + "Member type must not be a Union type, one of " + "whose member types includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { sequence>> c; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Sequence type " - "whose element type includes its Dictionary.") + harness.ok( + threw, + "Member type must not be a Sequence type " + "whose element type includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { (DOMString or Foo)[] d; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be an Array type " - "whose element type includes its Dictionary.") + harness.ok( + threw, + "Member type must not be an Array type " + "whose element type includes its Dictionary.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { Foo1 b; }; @@ -603,34 +703,41 @@ def WebIDLTest(parser, harness): dictionary Foo1 : Foo2 { long a; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Member type must not be a Dictionary, one of whose " - "members or inherited members has a type that includes " - "its Dictionary.") + harness.ok( + threw, + "Member type must not be a Dictionary, one of whose " + "members or inherited members has a type that includes " + "its Dictionary.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { }; dictionary Bar { Foo? d; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Member type must not be a nullable dictionary") - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ dictionary Foo { unrestricted float urFloat = 0; unrestricted float urFloat2 = 1.1; @@ -648,103 +755,117 @@ def WebIDLTest(parser, harness): unrestricted double negativeInfUrDouble = -Infinity; unrestricted double nanUrDouble = NaN; }; - """) + """ + ) results = parser.finish() harness.ok(True, "Parsing default values for unrestricted types succeeded.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { double f = Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to Infinity") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { double f = -Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to -Infinity") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { double f = NaN; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to NaN") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { float f = Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to Infinity") - - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { float f = -Infinity; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to -Infinity") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { float f = NaN; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Only unrestricted values can be initialized to NaN") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Foo { long module; }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/dom/bindings/parser/tests/test_distinguishability.py b/dom/bindings/parser/tests/test_distinguishability.py index 505b36468d6da5..466f727509d853 100644 --- a/dom/bindings/parser/tests/test_distinguishability.py +++ b/dom/bindings/parser/tests/test_distinguishability.py @@ -1,8 +1,10 @@ def firstArgType(method): return method.signatures()[0][1][0].type + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ // Give our dictionary a required member so we don't need to // mess with optional and default values. dictionary Dict { @@ -17,7 +19,8 @@ def WebIDLTest(parser, harness): void passNullableUnion((object? or DOMString) arg); void passNullable(Foo? arg); }; - """) + """ + ) results = parser.finish() iface = results[2] @@ -30,31 +33,38 @@ def WebIDLTest(parser, harness): dictType = firstArgType(dictMethod) ifaceType = firstArgType(ifaceMethod) - harness.ok(dictType.isDictionary(), "Should have dictionary type"); - harness.ok(ifaceType.isInterface(), "Should have interface type"); - harness.ok(ifaceType.isCallbackInterface(), "Should have callback interface type"); + harness.ok(dictType.isDictionary(), "Should have dictionary type") + harness.ok(ifaceType.isInterface(), "Should have interface type") + harness.ok(ifaceType.isCallbackInterface(), "Should have callback interface type") - harness.ok(not dictType.isDistinguishableFrom(ifaceType), - "Dictionary not distinguishable from callback interface") - harness.ok(not ifaceType.isDistinguishableFrom(dictType), - "Callback interface not distinguishable from dictionary") + harness.ok( + not dictType.isDistinguishableFrom(ifaceType), + "Dictionary not distinguishable from callback interface", + ) + harness.ok( + not ifaceType.isDistinguishableFrom(dictType), + "Callback interface not distinguishable from dictionary", + ) nullableUnionType = firstArgType(nullableUnionMethod) nullableIfaceType = firstArgType(nullableIfaceMethod) - harness.ok(nullableUnionType.isUnion(), "Should have union type"); - harness.ok(nullableIfaceType.isInterface(), "Should have interface type"); - harness.ok(nullableIfaceType.nullable(), "Should have nullable type"); + harness.ok(nullableUnionType.isUnion(), "Should have union type") + harness.ok(nullableIfaceType.isInterface(), "Should have interface type") + harness.ok(nullableIfaceType.nullable(), "Should have nullable type") - harness.ok(not nullableUnionType.isDistinguishableFrom(nullableIfaceType), - "Nullable type not distinguishable from union with nullable " - "member type") - harness.ok(not nullableIfaceType.isDistinguishableFrom(nullableUnionType), - "Union with nullable member type not distinguishable from " - "nullable type") + harness.ok( + not nullableUnionType.isDistinguishableFrom(nullableIfaceType), + "Nullable type not distinguishable from union with nullable " "member type", + ) + harness.ok( + not nullableIfaceType.isDistinguishableFrom(nullableUnionType), + "Union with nullable member type not distinguishable from " "nullable type", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestIface { void passKid(Kid arg); void passParent(Parent arg); @@ -70,7 +80,8 @@ def WebIDLTest(parser, harness): interface Grandparent {}; interface Unrelated1 {}; interface Unrelated2 {}; - """) + """ + ) results = parser.finish() iface = results[0] @@ -80,21 +91,26 @@ def WebIDLTest(parser, harness): for type1 in argTypes: for type2 in argTypes: - distinguishable = (type1 is not type2 and - (type1 in unrelatedTypes or - type2 in unrelatedTypes)) + distinguishable = type1 is not type2 and ( + type1 in unrelatedTypes or type2 in unrelatedTypes + ) - harness.check(type1.isDistinguishableFrom(type2), - distinguishable, - "Type %s should %sbe distinguishable from type %s" % - (type1, "" if distinguishable else "not ", type2)) - harness.check(type2.isDistinguishableFrom(type1), - distinguishable, - "Type %s should %sbe distinguishable from type %s" % - (type2, "" if distinguishable else "not ", type1)) + harness.check( + type1.isDistinguishableFrom(type2), + distinguishable, + "Type %s should %sbe distinguishable from type %s" + % (type1, "" if distinguishable else "not ", type2), + ) + harness.check( + type2.isDistinguishableFrom(type1), + distinguishable, + "Type %s should %sbe distinguishable from type %s" + % (type2, "" if distinguishable else "not ", type1), + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface Dummy {}; interface TestIface { void method(long arg1, TestIface arg2); @@ -102,17 +118,19 @@ def WebIDLTest(parser, harness): void method(long arg1, Dummy arg2); void method(DOMString arg1, DOMString arg2, DOMString arg3); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[1].members), 1, - "Should look like we have one method") - harness.check(len(results[1].members[0].signatures()), 4, - "Should have four signatures") + harness.check(len(results[1].members), 1, "Should look like we have one method") + harness.check( + len(results[1].members[0].signatures()), 4, "Should have four signatures" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Dummy {}; interface TestIface { void method(long arg1, TestIface arg2); @@ -120,19 +138,23 @@ def WebIDLTest(parser, harness): void method(any arg1, Dummy arg2); void method(DOMString arg1, DOMString arg2, DOMString arg3); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should throw when args before the distinguishing arg are not " - "all the same type") + harness.ok( + threw, + "Should throw when args before the distinguishing arg are not " + "all the same type", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Dummy {}; interface TestIface { void method(long arg1, TestIface arg2); @@ -140,7 +162,8 @@ def WebIDLTest(parser, harness): void method(any arg1, DOMString arg2); void method(DOMString arg1, DOMString arg2, DOMString arg3); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -148,57 +171,118 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should throw when there is no distinguishing index") # Now let's test our whole distinguishability table - argTypes = [ "long", "short", "long?", "short?", "boolean", - "boolean?", "DOMString", "ByteString", "UTF8String", "Enum", "Enum2", - "Interface", "Interface?", - "AncestorInterface", "UnrelatedInterface", "CallbackInterface", - "CallbackInterface?", "CallbackInterface2", - "object", "Callback", "Callback2", "Dict", - "Dict2", "sequence", "sequence", - "record", - "record", - "record", - "record", - "any", "Promise", "Promise?", - "USVString", "JSString", "ArrayBuffer", "ArrayBufferView", - "Uint8Array", "Uint16Array", - "(long or Callback)", "(long or Dict)", + argTypes = [ + "long", + "short", + "long?", + "short?", + "boolean", + "boolean?", + "DOMString", + "ByteString", + "UTF8String", + "Enum", + "Enum2", + "Interface", + "Interface?", + "AncestorInterface", + "UnrelatedInterface", + "CallbackInterface", + "CallbackInterface?", + "CallbackInterface2", + "object", + "Callback", + "Callback2", + "Dict", + "Dict2", + "sequence", + "sequence", + "record", + "record", + "record", + "record", + "any", + "Promise", + "Promise?", + "USVString", + "JSString", + "ArrayBuffer", + "ArrayBufferView", + "Uint8Array", + "Uint16Array", + "(long or Callback)", + "(long or Dict)", ] # Try to categorize things a bit to keep list lengths down def allBut(list1, list2): - return [a for a in list1 if a not in list2 and - (a != "any" and a != "Promise" and a != "Promise?")] - unions = [ "(long or Callback)", "(long or Dict)" ] - numerics = [ "long", "short", "long?", "short?" ] - booleans = [ "boolean", "boolean?" ] + return [ + a + for a in list1 + if a not in list2 + and (a != "any" and a != "Promise" and a != "Promise?") + ] + + unions = ["(long or Callback)", "(long or Dict)"] + numerics = ["long", "short", "long?", "short?"] + booleans = ["boolean", "boolean?"] primitives = numerics + booleans nonNumerics = allBut(argTypes, numerics + unions) nonBooleans = allBut(argTypes, booleans) - strings = [ "DOMString", "ByteString", "Enum", "Enum2", "USVString", "JSString", "UTF8String" ] + strings = [ + "DOMString", + "ByteString", + "Enum", + "Enum2", + "USVString", + "JSString", + "UTF8String", + ] nonStrings = allBut(argTypes, strings) nonObjects = primitives + strings - objects = allBut(argTypes, nonObjects ) + objects = allBut(argTypes, nonObjects) bufferSourceTypes = ["ArrayBuffer", "ArrayBufferView", "Uint8Array", "Uint16Array"] - interfaces = [ "Interface", "Interface?", "AncestorInterface", - "UnrelatedInterface" ] + bufferSourceTypes - nullables = (["long?", "short?", "boolean?", "Interface?", - "CallbackInterface?", "Dict", "Dict2", - "Date?", "any", "Promise?"] + - allBut(unions, [ "(long or Callback)" ])) - sequences = [ "sequence", "sequence" ] + interfaces = [ + "Interface", + "Interface?", + "AncestorInterface", + "UnrelatedInterface", + ] + bufferSourceTypes + nullables = [ + "long?", + "short?", + "boolean?", + "Interface?", + "CallbackInterface?", + "Dict", + "Dict2", + "Date?", + "any", + "Promise?", + ] + allBut(unions, ["(long or Callback)"]) + sequences = ["sequence", "sequence"] nonUserObjects = nonObjects + interfaces + sequences otherObjects = allBut(argTypes, nonUserObjects + ["object"]) - notRelatedInterfaces = (nonObjects + ["UnrelatedInterface"] + - otherObjects + sequences + bufferSourceTypes) - records = [ "record", "record", - "record", "record" ] # JSString not supported in records + notRelatedInterfaces = ( + nonObjects + + ["UnrelatedInterface"] + + otherObjects + + sequences + + bufferSourceTypes + ) + records = [ + "record", + "record", + "record", + "record", + ] # JSString not supported in records # Build a representation of the distinguishability table as a dict # of dicts, holding True values where needed, holes elsewhere. - data = dict(); + data = dict() for type in argTypes: data[type] = dict() + def setDistinguishable(type, types): for other in types: data[type][other] = True @@ -219,8 +303,9 @@ def setDistinguishable(type, types): setDistinguishable("Interface", notRelatedInterfaces) setDistinguishable("Interface?", allBut(notRelatedInterfaces, nullables)) setDistinguishable("AncestorInterface", notRelatedInterfaces) - setDistinguishable("UnrelatedInterface", - allBut(argTypes, ["object", "UnrelatedInterface"])) + setDistinguishable( + "UnrelatedInterface", allBut(argTypes, ["object", "UnrelatedInterface"]) + ) setDistinguishable("CallbackInterface", nonUserObjects) setDistinguishable("CallbackInterface?", allBut(nonUserObjects, nullables)) setDistinguishable("CallbackInterface2", nonUserObjects) @@ -229,10 +314,8 @@ def setDistinguishable(type, types): setDistinguishable("Callback2", nonUserObjects) setDistinguishable("Dict", allBut(nonUserObjects, nullables)) setDistinguishable("Dict2", allBut(nonUserObjects, nullables)) - setDistinguishable("sequence", - allBut(argTypes, sequences + ["object"])) - setDistinguishable("sequence", - allBut(argTypes, sequences + ["object"])) + setDistinguishable("sequence", allBut(argTypes, sequences + ["object"])) + setDistinguishable("sequence", allBut(argTypes, sequences + ["object"])) setDistinguishable("record", nonUserObjects) setDistinguishable("record", nonUserObjects) # JSString not supported in records @@ -242,13 +325,18 @@ def setDistinguishable(type, types): setDistinguishable("Promise", []) setDistinguishable("Promise?", []) setDistinguishable("ArrayBuffer", allBut(argTypes, ["ArrayBuffer", "object"])) - setDistinguishable("ArrayBufferView", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "Uint16Array", "object"])) - setDistinguishable("Uint8Array", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "object"])) - setDistinguishable("Uint16Array", allBut(argTypes, ["ArrayBufferView", "Uint16Array", "object"])) - setDistinguishable("(long or Callback)", - allBut(nonUserObjects, numerics)) - setDistinguishable("(long or Dict)", - allBut(nonUserObjects, numerics + nullables)) + setDistinguishable( + "ArrayBufferView", + allBut(argTypes, ["ArrayBufferView", "Uint8Array", "Uint16Array", "object"]), + ) + setDistinguishable( + "Uint8Array", allBut(argTypes, ["ArrayBufferView", "Uint8Array", "object"]) + ) + setDistinguishable( + "Uint16Array", allBut(argTypes, ["ArrayBufferView", "Uint16Array", "object"]) + ) + setDistinguishable("(long or Callback)", allBut(nonUserObjects, numerics)) + setDistinguishable("(long or Dict)", allBut(nonUserObjects, numerics + nullables)) def areDistinguishable(type1, type2): return data[type1].get(type2, False) @@ -284,11 +372,17 @@ def checkDistinguishability(parser, type1, type2): threw = True if areDistinguishable(type1, type2): - harness.ok(not threw, - "Should not throw for '%s' and '%s' because they are distinguishable" % (type1, type2)) + harness.ok( + not threw, + "Should not throw for '%s' and '%s' because they are distinguishable" + % (type1, type2), + ) else: - harness.ok(threw, - "Should throw for '%s' and '%s' because they are not distinguishable" % (type1, type2)) + harness.ok( + threw, + "Should throw for '%s' and '%s' because they are not distinguishable" + % (type1, type2), + ) # Enumerate over everything in both orders, since order matters in # terms of our implementation of distinguishability checks diff --git a/dom/bindings/parser/tests/test_double_null.py b/dom/bindings/parser/tests/test_double_null.py index 700c7eade006b8..a8876a7fd2db2d 100644 --- a/dom/bindings/parser/tests/test_double_null.py +++ b/dom/bindings/parser/tests/test_double_null.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DoubleNull { attribute byte?? foo; }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_duplicate_qualifiers.py b/dom/bindings/parser/tests/test_duplicate_qualifiers.py index 4874b3aafe6e92..89a4e1acf0b8f3 100644 --- a/dom/bindings/parser/tests/test_duplicate_qualifiers.py +++ b/dom/bindings/parser/tests/test_duplicate_qualifiers.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers1 { getter getter byte foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -15,11 +17,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers2 { setter setter byte foo(unsigned long index, byte value); }; - """) + """ + ) results = parser.finish() except: @@ -29,11 +33,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers4 { deleter deleter byte foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -43,11 +49,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface DuplicateQualifiers5 { getter deleter getter byte foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_empty_enum.py b/dom/bindings/parser/tests/test_empty_enum.py index ee0079f06daf0c..09333a659cdcdc 100644 --- a/dom/bindings/parser/tests/test_empty_enum.py +++ b/dom/bindings/parser/tests/test_empty_enum.py @@ -1,11 +1,14 @@ import WebIDL + def WebIDLTest(parser, harness): try: - parser.parse(""" + parser.parse( + """ enum TestEmptyEnum { }; - """) + """ + ) harness.ok(False, "Should have thrown!") except: diff --git a/dom/bindings/parser/tests/test_empty_sequence_default_value.py b/dom/bindings/parser/tests/test_empty_sequence_default_value.py index a713266c88e57d..84c7a6e413dc17 100644 --- a/dom/bindings/parser/tests/test_empty_sequence_default_value.py +++ b/dom/bindings/parser/tests/test_empty_sequence_default_value.py @@ -1,13 +1,16 @@ import WebIDL + def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface X { const sequence foo = []; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -17,29 +20,35 @@ def WebIDLTest(parser, harness): parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface X { void foo(optional sequence arg = []); }; - """) - results = parser.finish(); - - harness.ok(isinstance( - results[0].members[0].signatures()[0][1][0].defaultValue, - WebIDL.IDLEmptySequenceValue), - "Should have IDLEmptySequenceValue as default value of argument") + """ + ) + results = parser.finish() + + harness.ok( + isinstance( + results[0].members[0].signatures()[0][1][0].defaultValue, + WebIDL.IDLEmptySequenceValue, + ), + "Should have IDLEmptySequenceValue as default value of argument", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary X { sequence foo = []; }; - """) - results = parser.finish(); - - harness.ok(isinstance(results[0].members[0].defaultValue, - WebIDL.IDLEmptySequenceValue), - "Should have IDLEmptySequenceValue as default value of " - "dictionary member") - + """ + ) + results = parser.finish() + + harness.ok( + isinstance(results[0].members[0].defaultValue, WebIDL.IDLEmptySequenceValue), + "Should have IDLEmptySequenceValue as default value of " "dictionary member", + ) diff --git a/dom/bindings/parser/tests/test_enum.py b/dom/bindings/parser/tests/test_enum.py index 86228939181579..1888a379cfb3ac 100644 --- a/dom/bindings/parser/tests/test_enum.py +++ b/dom/bindings/parser/tests/test_enum.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ enum TestEnum { "", "foo", @@ -12,16 +14,15 @@ def WebIDLTest(parser, harness): TestEnum doFoo(boolean arg); readonly attribute TestEnum foo; }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestEnumInterfaces interface parsed without error.") harness.check(len(results), 2, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLEnum), - "Should be an IDLEnum") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLEnum), "Should be an IDLEnum") + harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface") enum = results[0] harness.check(enum.identifier.QName(), "::TestEnum", "Enum has the right QName") @@ -30,32 +31,41 @@ def WebIDLTest(parser, harness): iface = results[1] - harness.check(iface.identifier.QName(), "::TestEnumInterface", "Interface has the right QName") - harness.check(iface.identifier.name, "TestEnumInterface", "Interface has the right name") + harness.check( + iface.identifier.QName(), "::TestEnumInterface", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestEnumInterface", "Interface has the right name" + ) harness.check(iface.parent, None, "Interface has no parent") members = iface.members harness.check(len(members), 2, "Should be one production") - harness.ok(isinstance(members[0], WebIDL.IDLMethod), - "Should be an IDLMethod") + harness.ok(isinstance(members[0], WebIDL.IDLMethod), "Should be an IDLMethod") method = members[0] - harness.check(method.identifier.QName(), "::TestEnumInterface::doFoo", - "Method has correct QName") + harness.check( + method.identifier.QName(), + "::TestEnumInterface::doFoo", + "Method has correct QName", + ) harness.check(method.identifier.name, "doFoo", "Method has correct name") signatures = method.signatures() harness.check(len(signatures), 1, "Expect one signature") (returnType, arguments) = signatures[0] - harness.check(str(returnType), "TestEnum (Wrapper)", "Method type is the correct name") + harness.check( + str(returnType), "TestEnum (Wrapper)", "Method type is the correct name" + ) harness.check(len(arguments), 1, "Method has the right number of arguments") arg = arguments[0] harness.ok(isinstance(arg, WebIDL.IDLArgument), "Should be an IDLArgument") harness.check(str(arg.type), "Boolean", "Argument has the right type") attr = members[1] - harness.check(attr.identifier.QName(), "::TestEnumInterface::foo", - "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestEnumInterface::foo", "Attr has correct QName" + ) harness.check(attr.identifier.name, "foo", "Attr has correct name") harness.check(str(attr.type), "TestEnum (Wrapper)", "Attr type is the correct name") @@ -64,7 +74,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ enum Enum { "a", "b", @@ -73,7 +84,8 @@ def WebIDLTest(parser, harness): interface TestInterface { void foo(optional Enum e = "d"); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -82,12 +94,14 @@ def WebIDLTest(parser, harness): # Now reset our parser parser = parser.reset() - parser.parse(""" + parser.parse( + """ enum Enum { "a", "b", "c", }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should allow trailing comma in enum") diff --git a/dom/bindings/parser/tests/test_enum_duplicate_values.py b/dom/bindings/parser/tests/test_enum_duplicate_values.py index 51205d209e731e..8969281e1c7c6b 100644 --- a/dom/bindings/parser/tests/test_enum_duplicate_values.py +++ b/dom/bindings/parser/tests/test_enum_duplicate_values.py @@ -1,13 +1,16 @@ import WebIDL + def WebIDLTest(parser, harness): try: - parser.parse(""" + parser.parse( + """ enum TestEnumDuplicateValue { "", "" }; - """) + """ + ) harness.ok(False, "Should have thrown!") except: harness.ok(True, "Enum TestEnumDuplicateValue should throw") diff --git a/dom/bindings/parser/tests/test_error_colno.py b/dom/bindings/parser/tests/test_error_colno.py index 7afd15513c6d04..1c9bb065580601 100644 --- a/dom/bindings/parser/tests/test_error_colno.py +++ b/dom/bindings/parser/tests/test_error_colno.py @@ -1,20 +1,24 @@ import WebIDL + def WebIDLTest(parser, harness): # Check that error messages put the '^' in the right place. threw = False - input = 'interface ?' + input = "interface ?" try: parser.parse(input) results = parser.finish() except WebIDL.WebIDLError as e: threw = True - lines = str(e).split('\n') + lines = str(e).split("\n") - harness.check(len(lines), 3, 'Expected number of lines in error message') - harness.check(lines[1], input, 'Second line shows error') - harness.check(lines[2], ' ' * (len(input) - 1) + '^', - 'Correct column pointer in error message') + harness.check(len(lines), 3, "Expected number of lines in error message") + harness.check(lines[1], input, "Second line shows error") + harness.check( + lines[2], + " " * (len(input) - 1) + "^", + "Correct column pointer in error message", + ) harness.ok(threw, "Should have thrown.") diff --git a/dom/bindings/parser/tests/test_error_lineno.py b/dom/bindings/parser/tests/test_error_lineno.py index 70bb1883682502..0d10e006787984 100644 --- a/dom/bindings/parser/tests/test_error_lineno.py +++ b/dom/bindings/parser/tests/test_error_lineno.py @@ -1,5 +1,6 @@ import WebIDL + def WebIDLTest(parser, harness): # Check that error messages put the '^' in the right place. @@ -16,13 +17,22 @@ def WebIDLTest(parser, harness): results = parser.finish() except WebIDL.WebIDLError as e: threw = True - lines = str(e).split('\n') + lines = str(e).split("\n") - harness.check(len(lines), 3, 'Expected number of lines in error message') - harness.ok(lines[0].endswith('line 6:10'), 'First line of error should end with "line 6:10", but was "%s".' % lines[0]) - harness.check(lines[1], 'interface ?', 'Second line of error message is the line which caused the error.') - harness.check(lines[2], ' ' * (len('interface ?') - 1) + '^', - 'Correct column pointer in error message.') + harness.check(len(lines), 3, "Expected number of lines in error message") + harness.ok( + lines[0].endswith("line 6:10"), + 'First line of error should end with "line 6:10", but was "%s".' % lines[0], + ) + harness.check( + lines[1], + "interface ?", + "Second line of error message is the line which caused the error.", + ) + harness.check( + lines[2], + " " * (len("interface ?") - 1) + "^", + "Correct column pointer in error message.", + ) harness.ok(threw, "Should have thrown.") - diff --git a/dom/bindings/parser/tests/test_exposed_extended_attribute.py b/dom/bindings/parser/tests/test_exposed_extended_attribute.py index e0241a56426192..00153b54ec735a 100644 --- a/dom/bindings/parser/tests/test_exposed_extended_attribute.py +++ b/dom/bindings/parser/tests/test_exposed_extended_attribute.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global=(Bar, Bar1,Bar2), Exposed=Bar] interface Bar {}; [Global=(Baz, Baz2), Exposed=Baz] interface Baz {}; @@ -18,39 +20,56 @@ def WebIDLTest(parser, harness): partial interface Iface { void method2(); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 5, "Should know about five things"); + harness.check(len(results), 5, "Should know about five things") iface = results[3] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 3, "Should have three members") - harness.ok(members[0].exposureSet == set(["Foo", "Bar"]), - "method1 should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo", "Bar1"]), - "method1 should have the right exposure global names") - - harness.ok(members[1].exposureSet == set(["Bar"]), - "attr should have the right exposure set") - harness.ok(members[1]._exposureGlobalNames == set(["Bar1"]), - "attr should have the right exposure global names") - - harness.ok(members[2].exposureSet == set(["Foo"]), - "method2 should have the right exposure set") - harness.ok(members[2]._exposureGlobalNames == set(["Foo"]), - "method2 should have the right exposure global names") - - harness.ok(iface.exposureSet == set(["Foo", "Bar"]), - "Iface should have the right exposure set") - harness.ok(iface._exposureGlobalNames == set(["Foo", "Bar1"]), - "Iface should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo", "Bar"]), + "method1 should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo", "Bar1"]), + "method1 should have the right exposure global names", + ) + + harness.ok( + members[1].exposureSet == set(["Bar"]), + "attr should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Bar1"]), + "attr should have the right exposure global names", + ) + + harness.ok( + members[2].exposureSet == set(["Foo"]), + "method2 should have the right exposure set", + ) + harness.ok( + members[2]._exposureGlobalNames == set(["Foo"]), + "method2 should have the right exposure global names", + ) + + harness.ok( + iface.exposureSet == set(["Foo", "Bar"]), + "Iface should have the right exposure set", + ) + harness.ok( + iface._exposureGlobalNames == set(["Foo", "Bar1"]), + "Iface should have the right exposure global names", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global=(Bar, Bar1, Bar2), Exposed=Bar] interface Bar {}; [Global=(Baz, Baz2), Exposed=Baz] interface Baz {}; @@ -59,28 +78,36 @@ def WebIDLTest(parser, harness): interface Iface2 { void method3(); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 4, "Should know about four things"); + harness.check(len(results), 4, "Should know about four things") iface = results[3] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 1, "Should have one member") - harness.ok(members[0].exposureSet == set(["Foo"]), - "method3 should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo"]), - "method3 should have the right exposure global names") - - harness.ok(iface.exposureSet == set(["Foo"]), - "Iface2 should have the right exposure set") - harness.ok(iface._exposureGlobalNames == set(["Foo"]), - "Iface2 should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo"]), + "method3 should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo"]), + "method3 should have the right exposure global names", + ) + + harness.ok( + iface.exposureSet == set(["Foo"]), "Iface2 should have the right exposure set" + ) + harness.ok( + iface._exposureGlobalNames == set(["Foo"]), + "Iface2 should have the right exposure global names", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global=(Bar, Bar1, Bar2), Exposed=Bar] interface Bar {}; [Global=(Baz, Baz2), Exposed=Baz] interface Baz {}; @@ -96,33 +123,43 @@ def WebIDLTest(parser, harness): }; Iface3 includes Mixin; - """) + """ + ) results = parser.finish() - harness.check(len(results), 6, "Should know about six things"); + harness.check(len(results), 6, "Should know about six things") iface = results[3] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 2, "Should have two members") - harness.ok(members[0].exposureSet == set(["Foo"]), - "method4 should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo"]), - "method4 should have the right exposure global names") - - harness.ok(members[1].exposureSet == set(["Foo", "Bar"]), - "method5 should have the right exposure set") - harness.ok(members[1]._exposureGlobalNames == set(["Foo", "Bar1"]), - "method5 should have the right exposure global names") + harness.ok( + members[0].exposureSet == set(["Foo"]), + "method4 should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo"]), + "method4 should have the right exposure global names", + ) + + harness.ok( + members[1].exposureSet == set(["Foo", "Bar"]), + "method5 should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Foo", "Bar1"]), + "method5 should have the right exposure global names", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Exposed=Foo] interface Bar { }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -133,12 +170,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Bar { [Exposed=Foo] readonly attribute bool attr; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -149,12 +188,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Bar { [Exposed=Foo] void operation(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -165,12 +206,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Bar { [Exposed=Foo] const long constant = 5; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -181,7 +224,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global, Exposed=Bar] interface Bar {}; @@ -190,16 +234,20 @@ def WebIDLTest(parser, harness): [Exposed=Bar] void method(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should have thrown on member exposed where its interface is not.") + harness.ok( + threw, "Should have thrown on member exposed where its interface is not." + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo {}; [Global, Exposed=Bar] interface Bar {}; @@ -214,25 +262,31 @@ def WebIDLTest(parser, harness): }; Baz includes Mixin; - """) + """ + ) results = parser.finish() - harness.check(len(results), 5, "Should know about five things"); + harness.check(len(results), 5, "Should know about five things") iface = results[2] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should have an interface here"); + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should have an interface here") members = iface.members harness.check(len(members), 2, "Should have two members") - harness.ok(members[0].exposureSet == set(["Foo"]), - "method should have the right exposure set") - harness.ok(members[0]._exposureGlobalNames == set(["Foo"]), - "method should have the right exposure global names") - - harness.ok(members[1].exposureSet == set(["Bar"]), - "otherMethod should have the right exposure set") - harness.ok(members[1]._exposureGlobalNames == set(["Bar"]), - "otherMethod should have the right exposure global names") - - + harness.ok( + members[0].exposureSet == set(["Foo"]), + "method should have the right exposure set", + ) + harness.ok( + members[0]._exposureGlobalNames == set(["Foo"]), + "method should have the right exposure global names", + ) + + harness.ok( + members[1].exposureSet == set(["Bar"]), + "otherMethod should have the right exposure set", + ) + harness.ok( + members[1]._exposureGlobalNames == set(["Bar"]), + "otherMethod should have the right exposure global names", + ) diff --git a/dom/bindings/parser/tests/test_extended_attributes.py b/dom/bindings/parser/tests/test_extended_attributes.py index 144c945bc10d1f..c912508db6da9b 100644 --- a/dom/bindings/parser/tests/test_extended_attributes.py +++ b/dom/bindings/parser/tests/test_extended_attributes.py @@ -1,44 +1,52 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [NoInterfaceObject] interface TestExtendedAttr { [Unforgeable] readonly attribute byte b; }; - """) + """ + ) results = parser.finish() parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Pref="foo.bar",Pref=flop] interface TestExtendedAttr { [Pref="foo.bar"] attribute byte b; }; - """) + """ + ) results = parser.finish() parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestLenientThis { [LenientThis] attribute byte b; }; - """) + """ + ) results = parser.finish() - harness.ok(results[0].members[0].hasLenientThis(), - "Should have a lenient this") + harness.ok(results[0].members[0].hasLenientThis(), "Should have a lenient this") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestLenientThis2 { [LenientThis=something] attribute byte b; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -46,29 +54,36 @@ def WebIDLTest(parser, harness): harness.ok(threw, "[LenientThis] must take no arguments") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestClamp { void testClamp([Clamp] long foo); void testNotClamp(long foo); }; - """) + """ + ) results = parser.finish() # Pull out the first argument out of the arglist of the first (and # only) signature. - harness.ok(results[0].members[0].signatures()[0][1][0].type.hasClamp(), - "Should be clamped") - harness.ok(not results[0].members[1].signatures()[0][1][0].type.hasClamp(), - "Should not be clamped") + harness.ok( + results[0].members[0].signatures()[0][1][0].type.hasClamp(), "Should be clamped" + ) + harness.ok( + not results[0].members[1].signatures()[0][1][0].type.hasClamp(), + "Should not be clamped", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestClamp2 { void testClamp([Clamp=something] long foo); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -76,32 +91,39 @@ def WebIDLTest(parser, harness): harness.ok(threw, "[Clamp] must take no arguments") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestEnforceRange { void testEnforceRange([EnforceRange] long foo); void testNotEnforceRange(long foo); }; - """) + """ + ) results = parser.finish() # Pull out the first argument out of the arglist of the first (and # only) signature. - harness.ok(results[0].members[0].signatures()[0][1][0].type.hasEnforceRange(), - "Should be enforceRange") - harness.ok(not results[0].members[1].signatures()[0][1][0].type.hasEnforceRange(), - "Should not be enforceRange") + harness.ok( + results[0].members[0].signatures()[0][1][0].type.hasEnforceRange(), + "Should be enforceRange", + ) + harness.ok( + not results[0].members[1].signatures()[0][1][0].type.hasEnforceRange(), + "Should not be enforceRange", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestEnforceRange2 { void testEnforceRange([EnforceRange=something] long foo); }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "[EnforceRange] must take no arguments") - diff --git a/dom/bindings/parser/tests/test_float_types.py b/dom/bindings/parser/tests/test_float_types.py index b7325cf9d268f7..d077f8dc334dd4 100644 --- a/dom/bindings/parser/tests/test_float_types.py +++ b/dom/bindings/parser/tests/test_float_types.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ typedef float myFloat; typedef unrestricted float myUnrestrictedFloat; interface FloatTypes { @@ -32,14 +34,14 @@ def WebIDLTest(parser, harness): [LenientFloat] void m6(sequence arg); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 3, "Should be two typedefs and one interface.") iface = results[2] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") types = [a.type for a in iface.members if a.isAttr()] harness.ok(types[0].isFloat(), "'float' is a float") harness.ok(not types[0].isUnrestricted(), "'float' is not unrestricted") @@ -55,19 +57,23 @@ def WebIDLTest(parser, harness): argtypes = [a.type for a in method.signatures()[0][1]] for (idx, type) in enumerate(argtypes): harness.ok(type.isFloat(), "Type %d should be float" % idx) - harness.check(type.isUnrestricted(), idx >= 5, - "Type %d should %sbe unrestricted" % ( - idx, "" if idx >= 4 else "not ")) + harness.check( + type.isUnrestricted(), + idx >= 5, + "Type %d should %sbe unrestricted" % (idx, "" if idx >= 4 else "not "), + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] long m(float arg); }; - """) + """ + ) except Exception as x: threw = True harness.ok(threw, "[LenientFloat] only allowed on void methods") @@ -75,51 +81,65 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] void m(unrestricted float arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on methods with unrestricted float args") + harness.ok( + threw, "[LenientFloat] only allowed on methods with unrestricted float args" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] void m(sequence arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on methods with unrestricted float args (2)") + harness.ok( + threw, "[LenientFloat] only allowed on methods with unrestricted float args (2)" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] void m((unrestricted float or FloatTypes) arg); }; - """) + """ + ) except Exception as x: threw = True - harness.ok(threw, "[LenientFloat] only allowed on methods with unrestricted float args (3)") + harness.ok( + threw, "[LenientFloat] only allowed on methods with unrestricted float args (3)" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface FloatTypes { [LenientFloat] readonly attribute float foo; }; - """) + """ + ) except Exception as x: threw = True harness.ok(threw, "[LenientFloat] only allowed on writable attributes") diff --git a/dom/bindings/parser/tests/test_forward_decl.py b/dom/bindings/parser/tests/test_forward_decl.py index cac24c832ccaf1..1c81718400a071 100644 --- a/dom/bindings/parser/tests/test_forward_decl.py +++ b/dom/bindings/parser/tests/test_forward_decl.py @@ -1,14 +1,17 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface ForwardDeclared; interface ForwardDeclared; interface TestForwardDecl { attribute ForwardDeclared foo; }; - """) + """ + ) results = parser.finish() diff --git a/dom/bindings/parser/tests/test_global_extended_attr.py b/dom/bindings/parser/tests/test_global_extended_attr.py index 28b79642d86b7d..5f5bb0279c5c3d 100644 --- a/dom/bindings/parser/tests/test_global_extended_attr.py +++ b/dom/bindings/parser/tests/test_global_extended_attr.py @@ -1,106 +1,129 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo : Bar { getter any(DOMString name); }; [Exposed=Foo] interface Bar {}; - """) + """ + ) results = parser.finish() - harness.ok(results[0].isOnGlobalProtoChain(), - "[Global] interface should be on global's proto chain") - harness.ok(results[1].isOnGlobalProtoChain(), - "[Global] interface should be on global's proto chain") + harness.ok( + results[0].isOnGlobalProtoChain(), + "[Global] interface should be on global's proto chain", + ) + harness.ok( + results[1].isOnGlobalProtoChain(), + "[Global] interface should be on global's proto chain", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo { getter any(DOMString name); setter void(DOMString name, any arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "named setter") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " "named setter", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo { getter any(DOMString name); deleter void(DOMString name); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "named deleter") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " "named deleter", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, OverrideBuiltins, Exposed=Foo] interface Foo { }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "[OverrideBuiltins]") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " + "[OverrideBuiltins]", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo : Bar { }; [OverrideBuiltins, Exposed=Foo] interface Bar { }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with an " - "[OverrideBuiltins] ancestor") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with an " + "[OverrideBuiltins] ancestor", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Foo] interface Foo { }; [Exposed=Foo] interface Bar : Foo { }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown for [Global] used on an interface with a " - "descendant") + harness.ok( + threw, + "Should have thrown for [Global] used on an interface with a " "descendant", + ) diff --git a/dom/bindings/parser/tests/test_identifier_conflict.py b/dom/bindings/parser/tests/test_identifier_conflict.py index 0e9a6654aa71a0..1f841056a1f391 100644 --- a/dom/bindings/parser/tests/test_identifier_conflict.py +++ b/dom/bindings/parser/tests/test_identifier_conflict.py @@ -1,39 +1,49 @@ # Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL + def WebIDLTest(parser, harness): try: - parser.parse(""" + parser.parse( + """ enum Foo { "a" }; interface Foo; - """) + """ + ) results = parser.finish() harness.ok(False, "Should fail to parse") except Exception as e: - harness.ok("Name collision" in e.message, - "Should have name collision for interface") + harness.ok( + "Name collision" in e.message, "Should have name collision for interface" + ) parser = parser.reset() try: - parser.parse(""" + parser.parse( + """ dictionary Foo { long x; }; enum Foo { "a" }; - """) + """ + ) results = parser.finish() harness.ok(False, "Should fail to parse") except Exception as e: - harness.ok("Name collision" in e.message, - "Should have name collision for dictionary") + harness.ok( + "Name collision" in e.message, "Should have name collision for dictionary" + ) parser = parser.reset() try: - parser.parse(""" + parser.parse( + """ enum Foo { "a" }; enum Foo { "b" }; - """) + """ + ) results = parser.finish() harness.ok(False, "Should fail to parse") except Exception as e: - harness.ok("Multiple unresolvable definitions" in e.message, - "Should have name collision for dictionary") - + harness.ok( + "Multiple unresolvable definitions" in e.message, + "Should have name collision for dictionary", + ) diff --git a/dom/bindings/parser/tests/test_incomplete_parent.py b/dom/bindings/parser/tests/test_incomplete_parent.py index 1f520a28e1696c..c94a62817f1c03 100644 --- a/dom/bindings/parser/tests/test_incomplete_parent.py +++ b/dom/bindings/parser/tests/test_incomplete_parent.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestIncompleteParent : NotYetDefined { void foo(); }; @@ -11,7 +13,8 @@ def WebIDLTest(parser, harness): interface EvenHigherOnTheChain { }; - """) + """ + ) parser.finish() diff --git a/dom/bindings/parser/tests/test_incomplete_types.py b/dom/bindings/parser/tests/test_incomplete_types.py index fdc39604070908..0d54f708bba72d 100644 --- a/dom/bindings/parser/tests/test_incomplete_types.py +++ b/dom/bindings/parser/tests/test_incomplete_types.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestIncompleteTypes { attribute FooInterface attr1; @@ -10,35 +12,50 @@ def WebIDLTest(parser, harness): interface FooInterface { }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestIncompleteTypes interface parsed without error.") harness.check(len(results), 2, "Should be two productions.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestIncompleteTypes", "Interface has the right QName") - harness.check(iface.identifier.name, "TestIncompleteTypes", "Interface has the right name") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), + "::TestIncompleteTypes", + "Interface has the right QName", + ) + harness.check( + iface.identifier.name, "TestIncompleteTypes", "Interface has the right name" + ) harness.check(len(iface.members), 2, "Expect 2 members") attr = iface.members[0] - harness.ok(isinstance(attr, WebIDL.IDLAttribute), - "Should be an IDLAttribute") + harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") method = iface.members[1] - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") - - harness.check(attr.identifier.QName(), "::TestIncompleteTypes::attr1", - "Attribute has the right QName") - harness.check(attr.type.name, "FooInterface", - "Previously unresolved type has the right name") - - harness.check(method.identifier.QName(), "::TestIncompleteTypes::method1", - "Attribute has the right QName") + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") + + harness.check( + attr.identifier.QName(), + "::TestIncompleteTypes::attr1", + "Attribute has the right QName", + ) + harness.check( + attr.type.name, "FooInterface", "Previously unresolved type has the right name" + ) + + harness.check( + method.identifier.QName(), + "::TestIncompleteTypes::method1", + "Attribute has the right QName", + ) (returnType, args) = method.signatures()[0] - harness.check(returnType.name, "FooInterface", - "Previously unresolved type has the right name") - harness.check(args[0].type.name, "FooInterface", - "Previously unresolved type has the right name") + harness.check( + returnType.name, "FooInterface", "Previously unresolved type has the right name" + ) + harness.check( + args[0].type.name, + "FooInterface", + "Previously unresolved type has the right name", + ) diff --git a/dom/bindings/parser/tests/test_interface.py b/dom/bindings/parser/tests/test_interface.py index 47db3ae4cc9f45..e9168770a85d7b 100644 --- a/dom/bindings/parser/tests/test_interface.py +++ b/dom/bindings/parser/tests/test_interface.py @@ -1,12 +1,12 @@ import WebIDL + def WebIDLTest(parser, harness): parser.parse("interface Foo { };") results = parser.finish() harness.ok(True, "Empty interface parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] harness.check(iface.identifier.QName(), "::Foo", "Interface has the right QName") harness.check(iface.identifier.name, "Foo", "Interface has the right name") @@ -16,16 +16,15 @@ def WebIDLTest(parser, harness): results = parser.finish() harness.ok(True, "Empty interface parsed without error.") harness.check(len(results), 2, "Should be two productions") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[1] harness.check(iface.identifier.QName(), "::Bar", "Interface has the right QName") harness.check(iface.identifier.name, "Bar", "Interface has the right name") - harness.ok(isinstance(iface.parent, WebIDL.IDLInterface), - "Interface has a parent") + harness.ok(isinstance(iface.parent, WebIDL.IDLInterface), "Interface has a parent") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface QNameBase { attribute long foo; }; @@ -34,32 +33,42 @@ def WebIDLTest(parser, harness): attribute long long foo; attribute byte bar; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should be two productions") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") + harness.ok(isinstance(results[1], WebIDL.IDLInterface), "Should be an IDLInterface") harness.check(results[1].parent, results[0], "Inheritance chain is right") harness.check(len(results[0].members), 1, "Expect 1 productions") harness.check(len(results[1].members), 2, "Expect 2 productions") base = results[0] derived = results[1] - harness.check(base.members[0].identifier.QName(), "::QNameBase::foo", - "Member has the right QName") - harness.check(derived.members[0].identifier.QName(), "::QNameDerived::foo", - "Member has the right QName") - harness.check(derived.members[1].identifier.QName(), "::QNameDerived::bar", - "Member has the right QName") + harness.check( + base.members[0].identifier.QName(), + "::QNameBase::foo", + "Member has the right QName", + ) + harness.check( + derived.members[0].identifier.QName(), + "::QNameDerived::foo", + "Member has the right QName", + ) + harness.check( + derived.members[1].identifier.QName(), + "::QNameDerived::bar", + "Member has the right QName", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A : B {}; interface B : A {}; - """) + """ + ) results = parser.finish() except: threw = True @@ -69,32 +78,42 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A : C {}; interface C : B {}; interface B : A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow indirect cycles in interface inheritance chains") + harness.ok( + threw, "Should not allow indirect cycles in interface inheritance chains" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A; interface B : A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow inheriting from an interface that is only forward declared") + harness.ok( + threw, + "Should not allow inheriting from an interface that is only forward declared", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A { constructor(); constructor(long arg); @@ -105,26 +124,43 @@ def WebIDLTest(parser, harness): readonly attribute boolean y; void foo(long arg); }; - """); - results = parser.finish(); - harness.check(len(results), 2, - "Should have two results with partial interface") + """ + ) + results = parser.finish() + harness.check(len(results), 2, "Should have two results with partial interface") iface = results[0] - harness.check(len(iface.members), 3, - "Should have three members with partial interface") - harness.check(iface.members[0].identifier.name, "x", - "First member should be x with partial interface") - harness.check(iface.members[1].identifier.name, "foo", - "Second member should be foo with partial interface") - harness.check(len(iface.members[1].signatures()), 2, - "Should have two foo signatures with partial interface") - harness.check(iface.members[2].identifier.name, "y", - "Third member should be y with partial interface") - harness.check(len(iface.ctor().signatures()), 2, - "Should have two constructors with partial interface") + harness.check( + len(iface.members), 3, "Should have three members with partial interface" + ) + harness.check( + iface.members[0].identifier.name, + "x", + "First member should be x with partial interface", + ) + harness.check( + iface.members[1].identifier.name, + "foo", + "Second member should be foo with partial interface", + ) + harness.check( + len(iface.members[1].signatures()), + 2, + "Should have two foo signatures with partial interface", + ) + harness.check( + iface.members[2].identifier.name, + "y", + "Third member should be y with partial interface", + ) + harness.check( + len(iface.ctor().signatures()), + 2, + "Should have two constructors with partial interface", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ partial interface A { readonly attribute boolean y; void foo(long arg); @@ -135,236 +171,289 @@ def WebIDLTest(parser, harness): readonly attribute boolean x; void foo(); }; - """); - results = parser.finish(); - harness.check(len(results), 2, - "Should have two results with reversed partial interface") + """ + ) + results = parser.finish() + harness.check( + len(results), 2, "Should have two results with reversed partial interface" + ) iface = results[1] - harness.check(len(iface.members), 3, - "Should have three members with reversed partial interface") - harness.check(iface.members[0].identifier.name, "x", - "First member should be x with reversed partial interface") - harness.check(iface.members[1].identifier.name, "foo", - "Second member should be foo with reversed partial interface") - harness.check(len(iface.members[1].signatures()), 2, - "Should have two foo signatures with reversed partial interface") - harness.check(iface.members[2].identifier.name, "y", - "Third member should be y with reversed partial interface") - harness.check(len(iface.ctor().signatures()), 2, - "Should have two constructors with reversed partial interface") + harness.check( + len(iface.members), + 3, + "Should have three members with reversed partial interface", + ) + harness.check( + iface.members[0].identifier.name, + "x", + "First member should be x with reversed partial interface", + ) + harness.check( + iface.members[1].identifier.name, + "foo", + "Second member should be foo with reversed partial interface", + ) + harness.check( + len(iface.members[1].signatures()), + 2, + "Should have two foo signatures with reversed partial interface", + ) + harness.check( + iface.members[2].identifier.name, + "y", + "Third member should be y with reversed partial interface", + ) + harness.check( + len(iface.ctor().signatures()), + 2, + "Should have two constructors with reversed partial interface", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { readonly attribute boolean x; }; interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow two non-partial interfaces with the same name") + harness.ok(threw, "Should not allow two non-partial interfaces with the same name") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ partial interface A { readonly attribute boolean x; }; partial interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Must have a non-partial interface for a given name") + harness.ok(threw, "Must have a non-partial interface for a given name") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; partial interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between partial interface " - "and other object") + harness.ok( + threw, + "Should not allow a name collision between partial interface " + "and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between interface " - "and other object") + harness.ok( + threw, "Should not allow a name collision between interface " "and other object" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; interface A; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between external interface " - "and other object") + harness.ok( + threw, + "Should not allow a name collision between external interface " + "and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { readonly attribute boolean x; }; interface A; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between external interface " - "and interface") + harness.ok( + threw, + "Should not allow a name collision between external interface " "and interface", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A; interface A; - """) + """ + ) results = parser.finish() - harness.ok(len(results) == 1 and - isinstance(results[0], WebIDL.IDLExternalInterface), - "Should allow name collisions between external interface " - "declarations") + harness.ok( + len(results) == 1 and isinstance(results[0], WebIDL.IDLExternalInterface), + "Should allow name collisions between external interface " "declarations", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SomeRandomAnnotation] interface A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow unknown extended attributes on interfaces") + harness.ok(threw, "Should not allow unknown extended attributes on interfaces") parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; [Exposed=Window, LegacyWindowAlias=(C, D)] interface E {}; - """); - results = parser.finish(); - harness.check(results[1].legacyWindowAliases, ["A"], - "Should support a single identifier") - harness.check(results[2].legacyWindowAliases, ["C", "D"], - "Should support an identifier list") + """ + ) + results = parser.finish() + harness.check( + results[1].legacyWindowAliases, ["A"], "Should support a single identifier" + ) + harness.check( + results[2].legacyWindowAliases, ["C", "D"], "Should support an identifier list" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyWindowAlias] interface A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] with no value") + harness.ok(threw, "Should not allow [LegacyWindowAlias] with no value") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Exposed=Worker, LegacyWindowAlias=B] interface A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] without Window exposure") + harness.ok(threw, "Should not allow [LegacyWindowAlias] without Window exposure") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window] interface A {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] to conflict with other identifiers") + harness.ok( + threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; [Exposed=Window] interface A {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] to conflict with other identifiers") + harness.ok( + threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Exposed=Window, LegacyWindowAlias=A] interface B {}; [Exposed=Window, LegacyWindowAlias=A] interface C {}; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LegacyWindowAlias] to conflict with other identifiers") + harness.ok( + threw, "Should not allow [LegacyWindowAlias] to conflict with other identifiers" + ) diff --git a/dom/bindings/parser/tests/test_interface_const_identifier_conflicts.py b/dom/bindings/parser/tests/test_interface_const_identifier_conflicts.py index db944e7aaf7637..5750f87a6fcf9a 100644 --- a/dom/bindings/parser/tests/test_interface_const_identifier_conflicts.py +++ b/dom/bindings/parser/tests/test_interface_const_identifier_conflicts.py @@ -1,12 +1,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflict { const byte thing1 = 1; const unsigned long thing1 = 1; }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_interface_identifier_conflicts_across_members.py b/dom/bindings/parser/tests/test_interface_identifier_conflicts_across_members.py index 1a73fb917ed4d5..c1a544ce718006 100644 --- a/dom/bindings/parser/tests/test_interface_identifier_conflicts_across_members.py +++ b/dom/bindings/parser/tests/test_interface_identifier_conflicts_across_members.py @@ -1,12 +1,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers1 { const byte thing1 = 1; readonly attribute long thing1; }; - """) + """ + ) results = parser.finish() except: @@ -16,12 +18,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers2 { readonly attribute long thing1; const byte thing1 = 1; }; - """) + """ + ) results = parser.finish() except: @@ -31,12 +35,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers3 { getter boolean thing1(DOMString name); readonly attribute long thing1; }; - """) + """ + ) results = parser.finish() except: @@ -46,12 +52,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface IdentifierConflictAcrossMembers1 { const byte thing1 = 1; long thing1(); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_interface_maplikesetlikeiterable.py b/dom/bindings/parser/tests/test_interface_maplikesetlikeiterable.py index e070adee7e62df..577336a66fa25c 100644 --- a/dom/bindings/parser/tests/test_interface_maplikesetlikeiterable.py +++ b/dom/bindings/parser/tests/test_interface_maplikesetlikeiterable.py @@ -1,33 +1,42 @@ import WebIDL import traceback -def WebIDLTest(parser, harness): + +def WebIDLTest(parser, harness): def shouldPass(prefix, iface, expectedMembers, numProductions=1): p = parser.reset() p.parse(iface) results = p.finish() - harness.check(len(results), numProductions, - "%s - Should have production count %d" % (prefix, numProductions)) - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "%s - Should be an IDLInterface" % (prefix)) + harness.check( + len(results), + numProductions, + "%s - Should have production count %d" % (prefix, numProductions), + ) + harness.ok( + isinstance(results[0], WebIDL.IDLInterface), + "%s - Should be an IDLInterface" % (prefix), + ) # Make a copy, since we plan to modify it expectedMembers = list(expectedMembers) for m in results[0].members: name = m.identifier.name if (name, type(m)) in expectedMembers: - harness.ok(True, "%s - %s - Should be a %s" % (prefix, name, - type(m))) + harness.ok(True, "%s - %s - Should be a %s" % (prefix, name, type(m))) expectedMembers.remove((name, type(m))) else: - harness.ok(False, "%s - %s - Unknown symbol of type %s" % - (prefix, name, type(m))) + harness.ok( + False, + "%s - %s - Unknown symbol of type %s" % (prefix, name, type(m)), + ) # A bit of a hoop because we can't generate the error string if we pass if len(expectedMembers) == 0: harness.ok(True, "Found all the members") else: - harness.ok(False, - "Expected member not found: %s of type %s" % - (expectedMembers[0][0], expectedMembers[0][1])) + harness.ok( + False, + "Expected member not found: %s of type %s" + % (expectedMembers[0][0], expectedMembers[0][1]), + ) return results def shouldFail(prefix, iface): @@ -35,44 +44,46 @@ def shouldFail(prefix, iface): p = parser.reset() p.parse(iface) p.finish() - harness.ok(False, - prefix + " - Interface passed when should've failed") + harness.ok(False, prefix + " - Interface passed when should've failed") except WebIDL.WebIDLError as e: - harness.ok(True, - prefix + " - Interface failed as expected") + harness.ok(True, prefix + " - Interface failed as expected") except Exception as e: - harness.ok(False, - prefix + " - Interface failed but not as a WebIDLError exception: %s" % e) - - iterableMembers = [(x, WebIDL.IDLMethod) for x in ["entries", "keys", - "values", "forEach"]] - setROMembers = ([(x, WebIDL.IDLMethod) for x in ["has"]] + - [("__setlike", WebIDL.IDLMaplikeOrSetlike)] + - iterableMembers) + harness.ok( + False, + prefix + + " - Interface failed but not as a WebIDLError exception: %s" % e, + ) + + iterableMembers = [ + (x, WebIDL.IDLMethod) for x in ["entries", "keys", "values", "forEach"] + ] + setROMembers = ( + [(x, WebIDL.IDLMethod) for x in ["has"]] + + [("__setlike", WebIDL.IDLMaplikeOrSetlike)] + + iterableMembers + ) setROMembers.extend([("size", WebIDL.IDLAttribute)]) - setRWMembers = ([(x, WebIDL.IDLMethod) for x in ["add", - "clear", - "delete"]] + - setROMembers) - setROChromeMembers = ([(x, WebIDL.IDLMethod) for x in ["__add", - "__clear", - "__delete"]] + - setROMembers) - setRWChromeMembers = ([(x, WebIDL.IDLMethod) for x in ["__add", - "__clear", - "__delete"]] + - setRWMembers) - mapROMembers = ([(x, WebIDL.IDLMethod) for x in ["get", "has"]] + - [("__maplike", WebIDL.IDLMaplikeOrSetlike)] + - iterableMembers) + setRWMembers = [ + (x, WebIDL.IDLMethod) for x in ["add", "clear", "delete"] + ] + setROMembers + setROChromeMembers = [ + (x, WebIDL.IDLMethod) for x in ["__add", "__clear", "__delete"] + ] + setROMembers + setRWChromeMembers = [ + (x, WebIDL.IDLMethod) for x in ["__add", "__clear", "__delete"] + ] + setRWMembers + mapROMembers = ( + [(x, WebIDL.IDLMethod) for x in ["get", "has"]] + + [("__maplike", WebIDL.IDLMaplikeOrSetlike)] + + iterableMembers + ) mapROMembers.extend([("size", WebIDL.IDLAttribute)]) - mapRWMembers = ([(x, WebIDL.IDLMethod) for x in ["set", - "clear", - "delete"]] + mapROMembers) - mapRWChromeMembers = ([(x, WebIDL.IDLMethod) for x in ["__set", - "__clear", - "__delete"]] + - mapRWMembers) + mapRWMembers = [ + (x, WebIDL.IDLMethod) for x in ["set", "clear", "delete"] + ] + mapROMembers + mapRWChromeMembers = [ + (x, WebIDL.IDLMethod) for x in ["__set", "__clear", "__delete"] + ] + mapRWMembers # OK, now that we've used iterableMembers to set up the above, append # __iterable to it for the iterable<> case. @@ -88,15 +99,18 @@ def shouldFail(prefix, iface): disallowedNonMethodNames = ["clear", "delete"] mapDisallowedNonMethodNames = ["set"] + disallowedNonMethodNames setDisallowedNonMethodNames = ["add"] + disallowedNonMethodNames - unrelatedMembers = [("unrelatedAttribute", WebIDL.IDLAttribute), - ("unrelatedMethod", WebIDL.IDLMethod)] + unrelatedMembers = [ + ("unrelatedAttribute", WebIDL.IDLAttribute), + ("unrelatedMethod", WebIDL.IDLMethod), + ] # # Simple Usage Tests # - shouldPass("Iterable (key only)", - """ + shouldPass( + "Iterable (key only)", + """ interface Foo1 { iterable; readonly attribute unsigned long length; @@ -104,10 +118,13 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, valueIterableMembers + unrelatedMembers) + """, + valueIterableMembers + unrelatedMembers, + ) - shouldPass("Iterable (key only) inheriting from parent", - """ + shouldPass( + "Iterable (key only) inheriting from parent", + """ interface Foo1 : Foo2 { iterable; readonly attribute unsigned long length; @@ -117,21 +134,28 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, valueIterableMembers, numProductions=2) + """, + valueIterableMembers, + numProductions=2, + ) - shouldPass("Iterable (key and value)", - """ + shouldPass( + "Iterable (key and value)", + """ interface Foo1 { iterable; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, iterableMembers + unrelatedMembers, - # numProductions == 2 because of the generated iterator iface, - numProductions=2) + """, + iterableMembers + unrelatedMembers, + # numProductions == 2 because of the generated iterator iface, + numProductions=2, + ) - shouldPass("Iterable (key and value) inheriting from parent", - """ + shouldPass( + "Iterable (key and value) inheriting from parent", + """ interface Foo1 : Foo2 { iterable; }; @@ -139,21 +163,27 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, iterableMembers, - # numProductions == 3 because of the generated iterator iface, - numProductions=3) + """, + iterableMembers, + # numProductions == 3 because of the generated iterator iface, + numProductions=3, + ) - shouldPass("Maplike (readwrite)", - """ + shouldPass( + "Maplike (readwrite)", + """ interface Foo1 { maplike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers + unrelatedMembers) + """, + mapRWMembers + unrelatedMembers, + ) - shouldPass("Maplike (readwrite) inheriting from parent", - """ + shouldPass( + "Maplike (readwrite) inheriting from parent", + """ interface Foo1 : Foo2 { maplike; }; @@ -161,19 +191,26 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Maplike (readwrite)", - """ + shouldPass( + "Maplike (readwrite)", + """ interface Foo1 { maplike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers + unrelatedMembers) + """, + mapRWMembers + unrelatedMembers, + ) - shouldPass("Maplike (readwrite) inheriting from parent", - """ + shouldPass( + "Maplike (readwrite) inheriting from parent", + """ interface Foo1 : Foo2 { maplike; }; @@ -181,19 +218,26 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Maplike (readonly)", - """ + shouldPass( + "Maplike (readonly)", + """ interface Foo1 { readonly maplike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapROMembers + unrelatedMembers) + """, + mapROMembers + unrelatedMembers, + ) - shouldPass("Maplike (readonly) inheriting from parent", - """ + shouldPass( + "Maplike (readonly) inheriting from parent", + """ interface Foo1 : Foo2 { readonly maplike; }; @@ -201,19 +245,26 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, mapROMembers, numProductions=2) + """, + mapROMembers, + numProductions=2, + ) - shouldPass("Setlike (readwrite)", - """ + shouldPass( + "Setlike (readwrite)", + """ interface Foo1 { setlike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setRWMembers + unrelatedMembers) + """, + setRWMembers + unrelatedMembers, + ) - shouldPass("Setlike (readwrite) inheriting from parent", - """ + shouldPass( + "Setlike (readwrite) inheriting from parent", + """ interface Foo1 : Foo2 { setlike; }; @@ -221,19 +272,26 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setRWMembers, numProductions=2) + """, + setRWMembers, + numProductions=2, + ) - shouldPass("Setlike (readonly)", - """ + shouldPass( + "Setlike (readonly)", + """ interface Foo1 { readonly setlike; attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setROMembers + unrelatedMembers) + """, + setROMembers + unrelatedMembers, + ) - shouldPass("Setlike (readonly) inheriting from parent", - """ + shouldPass( + "Setlike (readonly) inheriting from parent", + """ interface Foo1 : Foo2 { readonly setlike; }; @@ -241,95 +299,121 @@ def shouldFail(prefix, iface): attribute long unrelatedAttribute; long unrelatedMethod(); }; - """, setROMembers, numProductions=2) + """, + setROMembers, + numProductions=2, + ) - shouldPass("Inheritance of maplike/setlike", - """ + shouldPass( + "Inheritance of maplike/setlike", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("JS Implemented maplike interface", - """ + shouldPass( + "JS Implemented maplike interface", + """ [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] interface Foo1 { constructor(); setlike; }; - """, setRWChromeMembers) + """, + setRWChromeMembers, + ) - shouldPass("JS Implemented maplike interface", - """ + shouldPass( + "JS Implemented maplike interface", + """ [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] interface Foo1 { constructor(); maplike; }; - """, mapRWChromeMembers) + """, + mapRWChromeMembers, + ) # # Multiple maplike/setlike tests # - shouldFail("Two maplike/setlikes on same interface", - """ + shouldFail( + "Two maplike/setlikes on same interface", + """ interface Foo1 { setlike; maplike; }; - """) + """, + ) - shouldFail("Two iterable/setlikes on same interface", - """ + shouldFail( + "Two iterable/setlikes on same interface", + """ interface Foo1 { iterable; maplike; }; - """) + """, + ) - shouldFail("Two iterables on same interface", - """ + shouldFail( + "Two iterables on same interface", + """ interface Foo1 { iterable; iterable; }; - """) + """, + ) - shouldFail("Two maplike/setlikes in partials", - """ + shouldFail( + "Two maplike/setlikes in partials", + """ interface Foo1 { maplike; }; partial interface Foo1 { setlike; }; - """) + """, + ) - shouldFail("Conflicting maplike/setlikes across inheritance", - """ + shouldFail( + "Conflicting maplike/setlikes across inheritance", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { setlike; }; - """) + """, + ) - shouldFail("Conflicting maplike/iterable across inheritance", - """ + shouldFail( + "Conflicting maplike/iterable across inheritance", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { iterable; }; - """) + """, + ) - shouldFail("Conflicting maplike/setlikes across multistep inheritance", - """ + shouldFail( + "Conflicting maplike/setlikes across multistep inheritance", + """ interface Foo1 { maplike; }; @@ -338,7 +422,8 @@ def shouldFail(prefix, iface): interface Foo3 : Foo2 { setlike; }; - """) + """, + ) # # Member name collision tests @@ -353,61 +438,83 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass """ if methodPasses: - shouldPass("Conflicting method: %s and %s" % (likeMember, conflictName), - """ + shouldPass( + "Conflicting method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; [Throws] void %s(long test1, double test2, double test3); }; - """ % (likeMember, conflictName), expectedMembers) - else: - shouldFail("Conflicting method: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + expectedMembers, + ) + else: + shouldFail( + "Conflicting method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; [Throws] void %s(long test1, double test2, double test3); }; - """ % (likeMember, conflictName)) + """ + % (likeMember, conflictName), + ) # Inherited conflicting methods should ALWAYS fail - shouldFail("Conflicting inherited method: %s and %s" % (likeMember, conflictName), - """ + shouldFail( + "Conflicting inherited method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { void %s(long test1, double test2, double test3); }; interface Foo2 : Foo1 { %s; }; - """ % (conflictName, likeMember)) - shouldFail("Conflicting static method: %s and %s" % (likeMember, conflictName), """ + % (conflictName, likeMember), + ) + shouldFail( + "Conflicting static method: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; static void %s(long test1, double test2, double test3); }; - """ % (likeMember, conflictName)) - shouldFail("Conflicting attribute: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + ) + shouldFail( + "Conflicting attribute: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s attribute double %s; }; - """ % (likeMember, conflictName)) - shouldFail("Conflicting const: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + ) + shouldFail( + "Conflicting const: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; const double %s = 0; }; - """ % (likeMember, conflictName)) - shouldFail("Conflicting static attribute: %s and %s" % (likeMember, conflictName), """ + % (likeMember, conflictName), + ) + shouldFail( + "Conflicting static attribute: %s and %s" % (likeMember, conflictName), + """ interface Foo1 { %s; static attribute long %s; }; - """ % (likeMember, conflictName)) + """ + % (likeMember, conflictName), + ) for member in disallowedIterableNames: testConflictingMembers("iterable", member, iterableMembers, False) @@ -420,18 +527,23 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass for member in setDisallowedNonMethodNames: testConflictingMembers("setlike", member, setRWMembers, True) - shouldPass("Inheritance of maplike/setlike with child member collision", - """ + shouldPass( + "Inheritance of maplike/setlike with child member collision", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { void entries(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Inheritance of multi-level maplike/setlike with child member collision", - """ + shouldPass( + "Inheritance of multi-level maplike/setlike with child member collision", + """ interface Foo1 { maplike; }; @@ -440,10 +552,14 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo3 : Foo2 { void entries(); }; - """, mapRWMembers, numProductions=3) + """, + mapRWMembers, + numProductions=3, + ) - shouldFail("Maplike interface with mixin member collision", - """ + shouldFail( + "Maplike interface with mixin member collision", + """ interface Foo1 { maplike; }; @@ -451,10 +567,12 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass void entries(); }; Foo1 includes Foo2; - """) + """, + ) - shouldPass("Inherited Maplike interface with consequential interface member collision", - """ + shouldPass( + "Inherited Maplike interface with consequential interface member collision", + """ interface Foo1 { maplike; }; @@ -464,20 +582,26 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo3 : Foo1 { }; Foo3 includes Foo2; - """, mapRWMembers, numProductions=4) + """, + mapRWMembers, + numProductions=4, + ) - shouldFail("Inheritance of name collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of name collision with child maplike/setlike", + """ interface Foo1 { void entries(); }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of multi-level name collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of multi-level name collision with child maplike/setlike", + """ interface Foo1 { void entries(); }; @@ -486,20 +610,26 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo3 : Foo2 { maplike; }; - """) + """, + ) - shouldPass("Inheritance of attribute collision with parent maplike/setlike", - """ + shouldPass( + "Inheritance of attribute collision with parent maplike/setlike", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { attribute double size; }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldPass("Inheritance of multi-level attribute collision with parent maplike/setlike", - """ + shouldPass( + "Inheritance of multi-level attribute collision with parent maplike/setlike", + """ interface Foo1 { maplike; }; @@ -508,20 +638,26 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo3 : Foo2 { attribute double size; }; - """, mapRWMembers, numProductions=3) + """, + mapRWMembers, + numProductions=3, + ) - shouldFail("Inheritance of attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of attribute collision with child maplike/setlike", + """ interface Foo1 { attribute double size; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of multi-level attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of multi-level attribute collision with child maplike/setlike", + """ interface Foo1 { attribute double size; }; @@ -530,40 +666,50 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo3 : Foo2 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of attribute/rw function collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of attribute/rw function collision with child maplike/setlike", + """ interface Foo1 { attribute double set; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of const/rw function collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of const/rw function collision with child maplike/setlike", + """ interface Foo1 { const double set = 0; }; interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldPass("Inheritance of rw function with same name in child maplike/setlike", - """ + shouldPass( + "Inheritance of rw function with same name in child maplike/setlike", + """ interface Foo1 { maplike; }; interface Foo2 : Foo1 { void clear(); }; - """, mapRWMembers, numProductions=2) + """, + mapRWMembers, + numProductions=2, + ) - shouldFail("Inheritance of unforgeable attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of unforgeable attribute collision with child maplike/setlike", + """ interface Foo1 { [Unforgeable] attribute double size; @@ -571,10 +717,12 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo2 : Foo1 { maplike; }; - """) + """, + ) - shouldFail("Inheritance of multi-level unforgeable attribute collision with child maplike/setlike", - """ + shouldFail( + "Inheritance of multi-level unforgeable attribute collision with child maplike/setlike", + """ interface Foo1 { [Unforgeable] attribute double size; @@ -584,49 +732,65 @@ def testConflictingMembers(likeMember, conflictName, expectedMembers, methodPass interface Foo3 : Foo2 { maplike; }; - """) + """, + ) - shouldPass("Interface with readonly allowable overrides", - """ + shouldPass( + "Interface with readonly allowable overrides", + """ interface Foo1 { readonly setlike; readonly attribute boolean clear; }; - """, setROMembers + [("clear", WebIDL.IDLAttribute)]) + """, + setROMembers + [("clear", WebIDL.IDLAttribute)], + ) - shouldPass("JS Implemented read-only interface with readonly allowable overrides", - """ + shouldPass( + "JS Implemented read-only interface with readonly allowable overrides", + """ [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] interface Foo1 { constructor(); readonly setlike; readonly attribute boolean clear; }; - """, setROChromeMembers + [("clear", WebIDL.IDLAttribute)]) + """, + setROChromeMembers + [("clear", WebIDL.IDLAttribute)], + ) - shouldFail("JS Implemented read-write interface with non-readwrite allowable overrides", - """ + shouldFail( + "JS Implemented read-write interface with non-readwrite allowable overrides", + """ [JSImplementation="@mozilla.org/dom/test-interface-js-maplike;1"] interface Foo1 { constructor(); setlike; readonly attribute boolean clear; }; - """) + """, + ) - r = shouldPass("Check proper override of clear/delete/set", - """ + r = shouldPass( + "Check proper override of clear/delete/set", + """ interface Foo1 { maplike; long clear(long a, long b, double c, double d); long set(long a, long b, double c, double d); long delete(long a, long b, double c, double d); }; - """, mapRWMembers) + """, + mapRWMembers, + ) for m in r[0].members: if m.identifier.name in ["clear", "set", "delete"]: harness.ok(m.isMethod(), "%s should be a method" % m.identifier.name) - harness.check(m.maxArgCount, 4, "%s should have 4 arguments" % m.identifier.name) - harness.ok(not m.isMaplikeOrSetlikeOrIterableMethod(), - "%s should not be a maplike/setlike function" % m.identifier.name) + harness.check( + m.maxArgCount, 4, "%s should have 4 arguments" % m.identifier.name + ) + harness.ok( + not m.isMaplikeOrSetlikeOrIterableMethod(), + "%s should not be a maplike/setlike function" % m.identifier.name, + ) diff --git a/dom/bindings/parser/tests/test_interfacemixin.py b/dom/bindings/parser/tests/test_interfacemixin.py index 477a9f377998dc..deda8642281fd8 100644 --- a/dom/bindings/parser/tests/test_interfacemixin.py +++ b/dom/bindings/parser/tests/test_interfacemixin.py @@ -1,33 +1,46 @@ import WebIDL + def WebIDLTest(parser, harness): parser.parse("interface mixin Foo { };") results = parser.finish() harness.ok(True, "Empty interface mixin parsed without error.") harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterfaceMixin), - "Should be an IDLInterfaceMixin") + harness.ok( + isinstance(results[0], WebIDL.IDLInterfaceMixin), + "Should be an IDLInterfaceMixin", + ) mixin = results[0] - harness.check(mixin.identifier.QName(), "::Foo", "Interface mixin has the right QName") + harness.check( + mixin.identifier.QName(), "::Foo", "Interface mixin has the right QName" + ) harness.check(mixin.identifier.name, "Foo", "Interface mixin has the right name") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface mixin QNameBase { const long foo = 3; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should be one productions") - harness.ok(isinstance(results[0], WebIDL.IDLInterfaceMixin), - "Should be an IDLInterfaceMixin") + harness.ok( + isinstance(results[0], WebIDL.IDLInterfaceMixin), + "Should be an IDLInterfaceMixin", + ) harness.check(len(results[0].members), 1, "Expect 1 productions") mixin = results[0] - harness.check(mixin.members[0].identifier.QName(), "::QNameBase::foo", - "Member has the right QName") + harness.check( + mixin.members[0].identifier.QName(), + "::QNameBase::foo", + "Member has the right QName", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface mixin A { readonly attribute boolean x; void foo(); @@ -36,24 +49,40 @@ def WebIDLTest(parser, harness): readonly attribute boolean y; void foo(long arg); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should have two results with partial interface mixin") + harness.check( + len(results), 2, "Should have two results with partial interface mixin" + ) mixin = results[0] - harness.check(len(mixin.members), 3, - "Should have three members with partial interface mixin") - harness.check(mixin.members[0].identifier.name, "x", - "First member should be x with partial interface mixin") - harness.check(mixin.members[1].identifier.name, "foo", - "Second member should be foo with partial interface mixin") - harness.check(len(mixin.members[1].signatures()), 2, - "Should have two foo signatures with partial interface mixin") - harness.check(mixin.members[2].identifier.name, "y", - "Third member should be y with partial interface mixin") + harness.check( + len(mixin.members), 3, "Should have three members with partial interface mixin" + ) + harness.check( + mixin.members[0].identifier.name, + "x", + "First member should be x with partial interface mixin", + ) + harness.check( + mixin.members[1].identifier.name, + "foo", + "Second member should be foo with partial interface mixin", + ) + harness.check( + len(mixin.members[1].signatures()), + 2, + "Should have two foo signatures with partial interface mixin", + ) + harness.check( + mixin.members[2].identifier.name, + "y", + "Third member should be y with partial interface mixin", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ partial interface mixin A { readonly attribute boolean y; void foo(long arg); @@ -62,285 +91,334 @@ def WebIDLTest(parser, harness): readonly attribute boolean x; void foo(); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should have two results with reversed partial interface mixin") + harness.check( + len(results), 2, "Should have two results with reversed partial interface mixin" + ) mixin = results[1] - harness.check(len(mixin.members), 3, - "Should have three members with reversed partial interface mixin") - harness.check(mixin.members[0].identifier.name, "x", - "First member should be x with reversed partial interface mixin") - harness.check(mixin.members[1].identifier.name, "foo", - "Second member should be foo with reversed partial interface mixin") - harness.check(len(mixin.members[1].signatures()), 2, - "Should have two foo signatures with reversed partial interface mixin") - harness.check(mixin.members[2].identifier.name, "y", - "Third member should be y with reversed partial interface mixin") + harness.check( + len(mixin.members), + 3, + "Should have three members with reversed partial interface mixin", + ) + harness.check( + mixin.members[0].identifier.name, + "x", + "First member should be x with reversed partial interface mixin", + ) + harness.check( + mixin.members[1].identifier.name, + "foo", + "Second member should be foo with reversed partial interface mixin", + ) + harness.check( + len(mixin.members[1].signatures()), + 2, + "Should have two foo signatures with reversed partial interface mixin", + ) + harness.check( + mixin.members[2].identifier.name, + "y", + "Third member should be y with reversed partial interface mixin", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface Interface {}; interface mixin Mixin { attribute short x; }; Interface includes Mixin; - """) + """ + ) results = parser.finish() iface = results[0] harness.check(len(iface.members), 1, "Should merge members from mixins") - harness.check(iface.members[0].identifier.name, "x", - "Should merge members from mixins") + harness.check( + iface.members[0].identifier.name, "x", "Should merge members from mixins" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { readonly attribute boolean x; }; interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow two non-partial interface mixins with the same name") + harness.ok( + threw, "Should not allow two non-partial interface mixins with the same name" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ partial interface mixin A { readonly attribute boolean x; }; partial interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Must have a non-partial interface mixin for a given name") + harness.ok(threw, "Must have a non-partial interface mixin for a given name") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; partial interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between partial interface " - "mixin and other object") + harness.ok( + threw, + "Should not allow a name collision between partial interface " + "mixin and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary A { boolean x; }; interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between interface mixin " - "and other object") + harness.ok( + threw, + "Should not allow a name collision between interface mixin " "and other object", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { readonly attribute boolean x; }; interface A; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow a name collision between external interface " - "and interface mixin") + harness.ok( + threw, + "Should not allow a name collision between external interface " + "and interface mixin", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SomeRandomAnnotation] interface mixin A { readonly attribute boolean y; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow unknown extended attributes on interface mixins") + harness.ok( + threw, "Should not allow unknown extended attributes on interface mixins" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { getter double (DOMString propertyName); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow getters on interface mixins") + harness.ok(threw, "Should not allow getters on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { setter void (DOMString propertyName, double propertyValue); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow setters on interface mixins") + harness.ok(threw, "Should not allow setters on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { deleter void (DOMString propertyName); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow deleters on interface mixins") + harness.ok(threw, "Should not allow deleters on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { legacycaller double compute(double x); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow legacycallers on interface mixins") + harness.ok(threw, "Should not allow legacycallers on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin A { inherit attribute x; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow inherited attribute on interface mixins") + harness.ok(threw, "Should not allow inherited attribute on interface mixins") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Interface {}; interface NotMixin { attribute short x; }; Interface includes NotMixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the right side does not point an interface mixin") + harness.ok(threw, "Should fail if the right side does not point an interface mixin") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin NotInterface {}; interface mixin Mixin { attribute short x; }; NotInterface includes Mixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the left side does not point an interface") + harness.ok(threw, "Should fail if the left side does not point an interface") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin Mixin { iterable; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if an interface mixin includes iterable") + harness.ok(threw, "Should fail if an interface mixin includes iterable") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin Mixin { setlike; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if an interface mixin includes setlike") + harness.ok(threw, "Should fail if an interface mixin includes setlike") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface mixin Mixin { maplike; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if an interface mixin includes maplike") + harness.ok(threw, "Should fail if an interface mixin includes maplike") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Interface { attribute short attr; }; @@ -348,17 +426,20 @@ def WebIDLTest(parser, harness): attribute short attr; }; Interface includes Mixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the included mixin interface has duplicated member") + harness.ok( + threw, "Should fail if the included mixin interface has duplicated member" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Interface {}; interface mixin Mixin1 { attribute short attr; @@ -368,15 +449,18 @@ def WebIDLTest(parser, harness): }; Interface includes Mixin1; Interface includes Mixin2; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should fail if the included mixin interfaces have duplicated member") + harness.ok( + threw, "Should fail if the included mixin interfaces have duplicated member" + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Global, Exposed=Worker] interface Worker {}; [Exposed=Window] @@ -385,15 +469,20 @@ def WebIDLTest(parser, harness): Base returnSelf(); }; Base includes Mixin; - """) + """ + ) results = parser.finish() base = results[2] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window"]), - "Should expose on globals where the base interfaces are exposed") + harness.check( + attr.exposureSet, + set(["Window"]), + "Should expose on globals where the base interfaces are exposed", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Global, Exposed=Worker] interface Worker {}; [Exposed=Window] @@ -403,15 +492,18 @@ def WebIDLTest(parser, harness): attribute short a; }; Base includes Mixin; - """) + """ + ) results = parser.finish() base = results[2] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window"]), - "Should follow [Exposed] on interface mixin") + harness.check( + attr.exposureSet, set(["Window"]), "Should follow [Exposed] on interface mixin" + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ [Global, Exposed=Window] interface Window {}; [Global, Exposed=Worker] interface Worker {}; [Exposed=Window] @@ -423,15 +515,20 @@ def WebIDLTest(parser, harness): }; Base1 includes Mixin; Base2 includes Mixin; - """) + """ + ) results = parser.finish() base = results[2] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window", "Worker"]), - "Should expose on all globals where including interfaces are " - "exposed") + harness.check( + attr.exposureSet, + set(["Window", "Worker"]), + "Should expose on all globals where including interfaces are " "exposed", + ) base = results[3] attr = base.members[0] - harness.check(attr.exposureSet, set(["Window", "Worker"]), - "Should expose on all globals where including interfaces are " - "exposed") + harness.check( + attr.exposureSet, + set(["Window", "Worker"]), + "Should expose on all globals where including interfaces are " "exposed", + ) diff --git a/dom/bindings/parser/tests/test_lenientSetter.py b/dom/bindings/parser/tests/test_lenientSetter.py index 78a9ffe9eaa6cb..e4584e704c197c 100644 --- a/dom/bindings/parser/tests/test_lenientSetter.py +++ b/dom/bindings/parser/tests/test_lenientSetter.py @@ -2,8 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + def should_throw(parser, harness, message, code): - parser = parser.reset(); + parser = parser.reset() threw = False try: parser.parse(code) @@ -16,43 +17,68 @@ def should_throw(parser, harness, message, code): def WebIDLTest(parser, harness): # The [LenientSetter] extended attribute MUST take no arguments. - should_throw(parser, harness, "no arguments", """ + should_throw( + parser, + harness, + "no arguments", + """ interface I { [LenientSetter=X] readonly attribute long A; }; - """) + """, + ) # An attribute with the [LenientSetter] extended attribute MUST NOT # also be declared with the [PutForwards] extended attribute. - should_throw(parser, harness, "PutForwards", """ + should_throw( + parser, + harness, + "PutForwards", + """ interface I { [PutForwards=B, LenientSetter] readonly attribute J A; }; interface J { attribute long B; }; - """) + """, + ) # An attribute with the [LenientSetter] extended attribute MUST NOT # also be declared with the [Replaceable] extended attribute. - should_throw(parser, harness, "Replaceable", """ + should_throw( + parser, + harness, + "Replaceable", + """ interface I { [Replaceable, LenientSetter] readonly attribute J A; }; - """) + """, + ) # The [LenientSetter] extended attribute MUST NOT be used on an # attribute that is not read only. - should_throw(parser, harness, "writable attribute", """ + should_throw( + parser, + harness, + "writable attribute", + """ interface I { [LenientSetter] attribute long A; }; - """) + """, + ) # The [LenientSetter] extended attribute MUST NOT be used on a # static attribute. - should_throw(parser, harness, "static attribute", """ + should_throw( + parser, + harness, + "static attribute", + """ interface I { [LenientSetter] static readonly attribute long A; }; - """) + """, + ) diff --git a/dom/bindings/parser/tests/test_method.py b/dom/bindings/parser/tests/test_method.py index 88ee874386c34e..b7ee4cc5e752fb 100644 --- a/dom/bindings/parser/tests/test_method.py +++ b/dom/bindings/parser/tests/test_method.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestMethods { void basic(); static void basicStatic(); @@ -16,35 +18,50 @@ def WebIDLTest(parser, harness): void setAny(any arg1); float doFloats(float arg1); }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestMethods interface parsed without error.") harness.check(len(results), 1, "Should be one production.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestMethods", "Interface has the right QName") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestMethods", "Interface has the right QName" + ) harness.check(iface.identifier.name, "TestMethods", "Interface has the right name") harness.check(len(iface.members), 12, "Expect 12 members") methods = iface.members def checkArgument(argument, QName, name, type, optional, variadic): - harness.ok(isinstance(argument, WebIDL.IDLArgument), - "Should be an IDLArgument") - harness.check(argument.identifier.QName(), QName, "Argument has the right QName") + harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument") + harness.check( + argument.identifier.QName(), QName, "Argument has the right QName" + ) harness.check(argument.identifier.name, name, "Argument has the right name") harness.check(str(argument.type), type, "Argument has the right return type") - harness.check(argument.optional, optional, "Argument has the right optional value") - harness.check(argument.variadic, variadic, "Argument has the right variadic value") + harness.check( + argument.optional, optional, "Argument has the right optional value" + ) + harness.check( + argument.variadic, variadic, "Argument has the right variadic value" + ) - def checkMethod(method, QName, name, signatures, - static=False, getter=False, setter=False, - deleter=False, legacycaller=False, stringifier=False): - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + def checkMethod( + method, + QName, + name, + signatures, + static=False, + getter=False, + setter=False, + deleter=False, + legacycaller=False, + stringifier=False, + ): + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") harness.ok(method.isMethod(), "Method is a method") harness.ok(not method.isAttr(), "Method is not an attr") harness.ok(not method.isConst(), "Method is not a const") @@ -53,72 +70,202 @@ def checkMethod(method, QName, name, signatures, harness.check(method.isStatic(), static, "Method has the correct static value") harness.check(method.isGetter(), getter, "Method has the correct getter value") harness.check(method.isSetter(), setter, "Method has the correct setter value") - harness.check(method.isDeleter(), deleter, "Method has the correct deleter value") - harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value") - harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value") - harness.check(len(method.signatures()), len(signatures), "Method has the correct number of signatures") + harness.check( + method.isDeleter(), deleter, "Method has the correct deleter value" + ) + harness.check( + method.isLegacycaller(), + legacycaller, + "Method has the correct legacycaller value", + ) + harness.check( + method.isStringifier(), + stringifier, + "Method has the correct stringifier value", + ) + harness.check( + len(method.signatures()), + len(signatures), + "Method has the correct number of signatures", + ) sigpairs = zip(method.signatures(), signatures) for (gotSignature, expectedSignature) in sigpairs: (gotRetType, gotArgs) = gotSignature (expectedRetType, expectedArgs) = expectedSignature - harness.check(str(gotRetType), expectedRetType, - "Method has the expected return type.") + harness.check( + str(gotRetType), expectedRetType, "Method has the expected return type." + ) for i in range(0, len(gotArgs)): (QName, name, type, optional, variadic) = expectedArgs[i] checkArgument(gotArgs[i], QName, name, type, optional, variadic) checkMethod(methods[0], "::TestMethods::basic", "basic", [("Void", [])]) - checkMethod(methods[1], "::TestMethods::basicStatic", "basicStatic", - [("Void", [])], static=True) - checkMethod(methods[2], "::TestMethods::basicWithSimpleArgs", - "basicWithSimpleArgs", - [("Void", - [("::TestMethods::basicWithSimpleArgs::arg1", "arg1", "Boolean", False, False), - ("::TestMethods::basicWithSimpleArgs::arg2", "arg2", "Byte", False, False), - ("::TestMethods::basicWithSimpleArgs::arg3", "arg3", "UnsignedLong", False, False)])]) - checkMethod(methods[3], "::TestMethods::basicBoolean", "basicBoolean", [("Boolean", [])]) - checkMethod(methods[4], "::TestMethods::basicStaticBoolean", "basicStaticBoolean", [("Boolean", [])], static=True) - checkMethod(methods[5], "::TestMethods::basicBooleanWithSimpleArgs", - "basicBooleanWithSimpleArgs", - [("Boolean", - [("::TestMethods::basicBooleanWithSimpleArgs::arg1", "arg1", "Boolean", False, False), - ("::TestMethods::basicBooleanWithSimpleArgs::arg2", "arg2", "Byte", False, False), - ("::TestMethods::basicBooleanWithSimpleArgs::arg3", "arg3", "UnsignedLong", False, False)])]) - checkMethod(methods[6], "::TestMethods::optionalArg", - "optionalArg", - [("Void", - [("::TestMethods::optionalArg::arg1", "arg1", "ByteOrNull", True, False), - ("::TestMethods::optionalArg::arg2", "arg2", "ByteSequence", True, False)])]) - checkMethod(methods[7], "::TestMethods::variadicArg", - "variadicArg", - [("Void", - [("::TestMethods::variadicArg::arg1", "arg1", "ByteOrNull", True, True)])]) - checkMethod(methods[8], "::TestMethods::getObject", - "getObject", [("Object", [])]) - checkMethod(methods[9], "::TestMethods::setObject", - "setObject", - [("Void", - [("::TestMethods::setObject::arg1", "arg1", "Object", False, False)])]) - checkMethod(methods[10], "::TestMethods::setAny", - "setAny", - [("Void", - [("::TestMethods::setAny::arg1", "arg1", "Any", False, False)])]) - checkMethod(methods[11], "::TestMethods::doFloats", - "doFloats", - [("Float", - [("::TestMethods::doFloats::arg1", "arg1", "Float", False, False)])]) + checkMethod( + methods[1], + "::TestMethods::basicStatic", + "basicStatic", + [("Void", [])], + static=True, + ) + checkMethod( + methods[2], + "::TestMethods::basicWithSimpleArgs", + "basicWithSimpleArgs", + [ + ( + "Void", + [ + ( + "::TestMethods::basicWithSimpleArgs::arg1", + "arg1", + "Boolean", + False, + False, + ), + ( + "::TestMethods::basicWithSimpleArgs::arg2", + "arg2", + "Byte", + False, + False, + ), + ( + "::TestMethods::basicWithSimpleArgs::arg3", + "arg3", + "UnsignedLong", + False, + False, + ), + ], + ) + ], + ) + checkMethod( + methods[3], "::TestMethods::basicBoolean", "basicBoolean", [("Boolean", [])] + ) + checkMethod( + methods[4], + "::TestMethods::basicStaticBoolean", + "basicStaticBoolean", + [("Boolean", [])], + static=True, + ) + checkMethod( + methods[5], + "::TestMethods::basicBooleanWithSimpleArgs", + "basicBooleanWithSimpleArgs", + [ + ( + "Boolean", + [ + ( + "::TestMethods::basicBooleanWithSimpleArgs::arg1", + "arg1", + "Boolean", + False, + False, + ), + ( + "::TestMethods::basicBooleanWithSimpleArgs::arg2", + "arg2", + "Byte", + False, + False, + ), + ( + "::TestMethods::basicBooleanWithSimpleArgs::arg3", + "arg3", + "UnsignedLong", + False, + False, + ), + ], + ) + ], + ) + checkMethod( + methods[6], + "::TestMethods::optionalArg", + "optionalArg", + [ + ( + "Void", + [ + ( + "::TestMethods::optionalArg::arg1", + "arg1", + "ByteOrNull", + True, + False, + ), + ( + "::TestMethods::optionalArg::arg2", + "arg2", + "ByteSequence", + True, + False, + ), + ], + ) + ], + ) + checkMethod( + methods[7], + "::TestMethods::variadicArg", + "variadicArg", + [ + ( + "Void", + [ + ( + "::TestMethods::variadicArg::arg1", + "arg1", + "ByteOrNull", + True, + True, + ) + ], + ) + ], + ) + checkMethod(methods[8], "::TestMethods::getObject", "getObject", [("Object", [])]) + checkMethod( + methods[9], + "::TestMethods::setObject", + "setObject", + [ + ( + "Void", + [("::TestMethods::setObject::arg1", "arg1", "Object", False, False)], + ) + ], + ) + checkMethod( + methods[10], + "::TestMethods::setAny", + "setAny", + [("Void", [("::TestMethods::setAny::arg1", "arg1", "Any", False, False)])], + ) + checkMethod( + methods[11], + "::TestMethods::doFloats", + "doFloats", + [("Float", [("::TestMethods::doFloats::arg1", "arg1", "Float", False, False)])], + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { void foo(optional float bar = 1); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -127,11 +274,13 @@ def checkMethod(method, QName, name, signatures, parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [GetterThrows] void foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -140,11 +289,13 @@ def checkMethod(method, QName, name, signatures, parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SetterThrows] void foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -153,11 +304,13 @@ def checkMethod(method, QName, name, signatures, parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throw] void foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -166,11 +319,13 @@ def checkMethod(method, QName, name, signatures, parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { void __noSuchMethod__(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True @@ -179,77 +334,96 @@ def checkMethod(method, QName, name, signatures, parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws, LenientFloat] void foo(float myFloat); [Throws] void foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True harness.ok(not threw, "Should allow LenientFloat to be only in a specific overload") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A { [Throws] void foo(); [Throws, LenientFloat] void foo(float myFloat); }; - """) + """ + ) results = parser.finish() iface = results[0] methods = iface.members lenientFloat = methods[0].getExtendedAttribute("LenientFloat") - harness.ok(lenientFloat is not None, "LenientFloat in overloads must be added to the method") + harness.ok( + lenientFloat is not None, + "LenientFloat in overloads must be added to the method", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws, LenientFloat] void foo(float myFloat); [Throws] void foo(float myFloat, float yourFloat); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should prevent overloads from getting different restricted float behavior") + harness.ok( + threw, + "Should prevent overloads from getting different restricted float behavior", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws] void foo(float myFloat, float yourFloat); [Throws, LenientFloat] void foo(float myFloat); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should prevent overloads from getting different restricted float behavior (2)") + harness.ok( + threw, + "Should prevent overloads from getting different restricted float behavior (2)", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Throws, LenientFloat] void foo(float myFloat); [Throws, LenientFloat] void foo(short myShort); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True diff --git a/dom/bindings/parser/tests/test_namespace.py b/dom/bindings/parser/tests/test_namespace.py index 62edb270c638cf..247c5b22232311 100644 --- a/dom/bindings/parser/tests/test_namespace.py +++ b/dom/bindings/parser/tests/test_namespace.py @@ -5,13 +5,13 @@ def WebIDLTest(parser, harness): attribute any foo; any bar(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 1, "Should have a thing.") - harness.ok(results[0].isNamespace(), "Our thing should be a namespace"); - harness.check(len(results[0].members), 2, - "Should have two things in our namespace") + harness.ok(results[0].isNamespace(), "Our thing should be a namespace") + harness.check(len(results[0].members), 2, "Should have two things in our namespace") harness.ok(results[0].members[0].isAttr(), "First member is attribute") harness.ok(results[0].members[0].isStatic(), "Attribute should be static") harness.ok(results[0].members[1].isMethod(), "Second member is method") @@ -26,17 +26,17 @@ def WebIDLTest(parser, harness): partial namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should have things.") - harness.ok(results[0].isNamespace(), "Our thing should be a namespace"); - harness.check(len(results[0].members), 2, - "Should have two things in our namespace") + harness.ok(results[0].isNamespace(), "Our thing should be a namespace") + harness.check(len(results[0].members), 2, "Should have two things in our namespace") harness.ok(results[0].members[0].isAttr(), "First member is attribute") - harness.ok(results[0].members[0].isStatic(), "Attribute should be static"); + harness.ok(results[0].members[0].isStatic(), "Attribute should be static") harness.ok(results[0].members[1].isMethod(), "Second member is method") - harness.ok(results[0].members[1].isStatic(), "Operation should be static"); + harness.ok(results[0].members[1].isStatic(), "Operation should be static") parser = parser.reset() parser.parse( @@ -47,17 +47,17 @@ def WebIDLTest(parser, harness): namespace MyNamespace { attribute any foo; }; - """) + """ + ) results = parser.finish() harness.check(len(results), 2, "Should have things.") - harness.ok(results[1].isNamespace(), "Our thing should be a namespace"); - harness.check(len(results[1].members), 2, - "Should have two things in our namespace") + harness.ok(results[1].isNamespace(), "Our thing should be a namespace") + harness.check(len(results[1].members), 2, "Should have two things in our namespace") harness.ok(results[1].members[0].isAttr(), "First member is attribute") - harness.ok(results[1].members[0].isStatic(), "Attribute should be static"); + harness.ok(results[1].members[0].isStatic(), "Attribute should be static") harness.ok(results[1].members[1].isMethod(), "Second member is method") - harness.ok(results[1].members[1].isStatic(), "Operation should be static"); + harness.ok(results[1].members[1].isStatic(), "Operation should be static") parser = parser.reset() threw = False @@ -67,7 +67,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { static attribute any foo; }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -82,7 +83,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { static any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -101,7 +103,8 @@ def WebIDLTest(parser, harness): interface MyNamespace { any baz(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -120,7 +123,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -139,7 +143,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -158,7 +163,8 @@ def WebIDLTest(parser, harness): interface MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -177,7 +183,8 @@ def WebIDLTest(parser, harness): partial interface MyNamespace { any baz(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -196,7 +203,8 @@ def WebIDLTest(parser, harness): namespace MyNamespace { any bar(); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -215,7 +223,8 @@ def WebIDLTest(parser, harness): partial namespace MyNamespace { any baz(); }; - """) + """ + ) results = parser.finish() except Exception as x: diff --git a/dom/bindings/parser/tests/test_newobject.py b/dom/bindings/parser/tests/test_newobject.py index 26785c6a2700f9..c12995a0e86cf0 100644 --- a/dom/bindings/parser/tests/test_newobject.py +++ b/dom/bindings/parser/tests/test_newobject.py @@ -1,6 +1,7 @@ # Import the WebIDL module, so we can do isinstance checks and whatnot import WebIDL + def WebIDLTest(parser, harness): # Basic functionality parser.parse( @@ -9,7 +10,8 @@ def WebIDLTest(parser, harness): [NewObject] readonly attribute Iface attr; [NewObject] Iface method(); }; - """) + """ + ) results = parser.finish() harness.ok(results, "Should not have thrown on basic [NewObject] usage") @@ -21,7 +23,8 @@ def WebIDLTest(parser, harness): interface Iface { [Pure, NewObject] readonly attribute Iface attr; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -35,7 +38,8 @@ def WebIDLTest(parser, harness): interface Iface { [Pure, NewObject] Iface method(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -49,7 +53,8 @@ def WebIDLTest(parser, harness): interface Iface { [Cached, NewObject, Affects=Nothing] readonly attribute Iface attr; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -63,7 +68,8 @@ def WebIDLTest(parser, harness): interface Iface { [StoreInSlot, NewObject, Affects=Nothing] readonly attribute Iface attr; }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/dom/bindings/parser/tests/test_nullable_equivalency.py b/dom/bindings/parser/tests/test_nullable_equivalency.py index 8ba6771677a1a2..012c5fcff7c924 100644 --- a/dom/bindings/parser/tests/test_nullable_equivalency.py +++ b/dom/bindings/parser/tests/test_nullable_equivalency.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestNullableEquivalency1 { attribute long a; attribute long? b; @@ -53,22 +55,24 @@ def WebIDLTest(parser, harness): attribute object a; attribute object? b; }; - """) + """ + ) for decl in parser.finish(): if decl.isInterface(): checkEquivalent(decl, harness) + def checkEquivalent(iface, harness): type1 = iface.members[0].type type2 = iface.members[1].type - harness.check(type1.nullable(), False, 'attr1 should not be nullable') - harness.check(type2.nullable(), True, 'attr2 should be nullable') + harness.check(type1.nullable(), False, "attr1 should not be nullable") + harness.check(type2.nullable(), True, "attr2 should be nullable") # We don't know about type1, but type2, the nullable type, definitely # shouldn't be builtin. - harness.check(type2.builtin, False, 'attr2 should not be builtin') + harness.check(type2.builtin, False, "attr2 should not be builtin") # Ensure that all attributes of type2 match those in type1, except for: # - names on an ignore list, @@ -78,10 +82,22 @@ def checkEquivalent(iface, harness): # # Yes, this is an ugly, fragile hack. But it finds bugs... for attr in dir(type1): - if attr.startswith('_') or \ - attr in ['nullable', 'builtin', 'filename', 'location', - 'inner', 'QName', 'getDeps', 'name', 'prettyName'] or \ - (hasattr(type(type1), attr) and not callable(getattr(type1, attr))): + if ( + attr.startswith("_") + or attr + in [ + "nullable", + "builtin", + "filename", + "location", + "inner", + "QName", + "getDeps", + "name", + "prettyName", + ] + or (hasattr(type(type1), attr) and not callable(getattr(type1, attr))) + ): continue a1 = getattr(type1, attr) @@ -96,20 +112,30 @@ def checkEquivalent(iface, harness): try: a2 = getattr(type2, attr) except: - harness.ok(False, 'Missing %s attribute on type %s in %s' % (attr, type2, iface)) + harness.ok( + False, + "Missing %s attribute on type %s in %s" % (attr, type2, iface), + ) continue if not callable(a2): - harness.ok(False, "%s attribute on type %s in %s wasn't callable" % (attr, type2, iface)) + harness.ok( + False, + "%s attribute on type %s in %s wasn't callable" + % (attr, type2, iface), + ) continue v2 = a2() - harness.check(v2, v1, '%s method return value' % attr) + harness.check(v2, v1, "%s method return value" % attr) else: try: a2 = getattr(type2, attr) except: - harness.ok(False, 'Missing %s attribute on type %s in %s' % (attr, type2, iface)) + harness.ok( + False, + "Missing %s attribute on type %s in %s" % (attr, type2, iface), + ) continue - harness.check(a2, a1, '%s attribute should match' % attr) + harness.check(a2, a1, "%s attribute should match" % attr) diff --git a/dom/bindings/parser/tests/test_nullable_void.py b/dom/bindings/parser/tests/test_nullable_void.py index 961ff825e9f9d8..414fa559336d98 100644 --- a/dom/bindings/parser/tests/test_nullable_void.py +++ b/dom/bindings/parser/tests/test_nullable_void.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface NullableVoid { void? foo(); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_optional_constraints.py b/dom/bindings/parser/tests/test_optional_constraints.py index 6217465ce7dfd0..bafa6269e41e2c 100644 --- a/dom/bindings/parser/tests/test_optional_constraints.py +++ b/dom/bindings/parser/tests/test_optional_constraints.py @@ -1,30 +1,35 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface OptionalConstraints1 { void foo(optional byte arg1, byte arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, - "Should not have thrown on non-optional argument following " - "optional argument.") + harness.ok( + not threw, + "Should not have thrown on non-optional argument following " + "optional argument.", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface OptionalConstraints2 { void foo(optional byte arg1 = 1, optional byte arg2 = 2, optional byte arg3, optional byte arg4 = 4, optional byte arg5, optional byte arg6 = 9); }; - """) + """ + ) results = parser.finish() args = results[0].members[0].signatures()[0][1] harness.check(len(args), 6, "Should have 6 arguments") - harness.check(args[5].defaultValue.value, 9, - "Should have correct default value") + harness.check(args[5].defaultValue.value, 9, "Should have correct default value") diff --git a/dom/bindings/parser/tests/test_overload.py b/dom/bindings/parser/tests/test_overload.py index 3c680ad52333c9..b1b82b86e7a7da 100644 --- a/dom/bindings/parser/tests/test_overload.py +++ b/dom/bindings/parser/tests/test_overload.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestOverloads { void basic(); void basic(long arg1); @@ -14,21 +16,29 @@ def WebIDLTest(parser, harness): void optionalTest(); void optionalTest(optional long num1, long num2); }; - """) + """ + ) results = parser.finish() harness.ok(True, "TestOverloads interface parsed without error.") harness.check(len(results), 1, "Should be one production.") iface = results[0] - harness.ok(isinstance(iface, WebIDL.IDLInterface), - "Should be an IDLInterface") - harness.check(iface.identifier.QName(), "::TestOverloads", "Interface has the right QName") - harness.check(iface.identifier.name, "TestOverloads", "Interface has the right name") + harness.ok(isinstance(iface, WebIDL.IDLInterface), "Should be an IDLInterface") + harness.check( + iface.identifier.QName(), "::TestOverloads", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestOverloads", "Interface has the right name" + ) harness.check(len(iface.members), 4, "Expect %s members" % 4) member = iface.members[0] - harness.check(member.identifier.QName(), "::TestOverloads::basic", "Method has the right QName") + harness.check( + member.identifier.QName(), + "::TestOverloads::basic", + "Method has the right QName", + ) harness.check(member.identifier.name, "basic", "Method has the right name") harness.check(member.hasOverloads(), True, "Method has overloads") @@ -45,16 +55,20 @@ def WebIDLTest(parser, harness): harness.check(len(argumentSet), 1, "Expect an argument set with one argument") argument = argumentSet[0] - harness.ok(isinstance(argument, WebIDL.IDLArgument), - "Should be an IDLArgument") - harness.check(argument.identifier.QName(), "::TestOverloads::basic::arg1", "Argument has the right QName") + harness.ok(isinstance(argument, WebIDL.IDLArgument), "Should be an IDLArgument") + harness.check( + argument.identifier.QName(), + "::TestOverloads::basic::arg1", + "Argument has the right QName", + ) harness.check(argument.identifier.name, "arg1", "Argument has the right name") harness.check(str(argument.type), "Long", "Argument has the right type") member = iface.members[3] - harness.check(len(member.overloadsForArgCount(0)), 1, - "Only one overload for no args") - harness.check(len(member.overloadsForArgCount(1)), 0, - "No overloads for one arg") - harness.check(len(member.overloadsForArgCount(2)), 1, - "Only one overload for two args") + harness.check( + len(member.overloadsForArgCount(0)), 1, "Only one overload for no args" + ) + harness.check(len(member.overloadsForArgCount(1)), 0, "No overloads for one arg") + harness.check( + len(member.overloadsForArgCount(2)), 1, "Only one overload for two args" + ) diff --git a/dom/bindings/parser/tests/test_promise.py b/dom/bindings/parser/tests/test_promise.py index 43c74029dc525a..1dac59dfd112a1 100644 --- a/dom/bindings/parser/tests/test_promise.py +++ b/dom/bindings/parser/tests/test_promise.py @@ -1,157 +1,175 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface A { legacycaller Promise foo(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow Promise return values for legacycaller.") + harness.ok(threw, "Should not allow Promise return values for legacycaller.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { Promise foo(); long foo(long arg); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow overloads which have both Promise and " - "non-Promise return types.") + harness.ok( + threw, + "Should not allow overloads which have both Promise and " + "non-Promise return types.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { long foo(long arg); Promise foo(); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow overloads which have both Promise and " - "non-Promise return types.") + harness.ok( + threw, + "Should not allow overloads which have both Promise and " + "non-Promise return types.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { Promise? foo(); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow nullable Promise return values.") + harness.ok(threw, "Should not allow nullable Promise return values.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { void foo(Promise? arg); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow nullable Promise arguments.") + harness.ok(threw, "Should not allow nullable Promise arguments.") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface A { Promise foo(); Promise foo(long arg); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() - harness.ok(True, - "Should allow overloads which only have Promise and return " - "types.") + harness.ok( + True, "Should allow overloads which only have Promise and return " "types." + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow writable Promise-typed attributes.") + harness.ok(threw, "Should not allow writable Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [LenientSetter] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [LenientSetter] Promise-typed attributes.") + harness.ok(threw, "Should not allow [LenientSetter] Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [PutForwards=bar] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [PutForwards] Promise-typed attributes.") + harness.ok(threw, "Should not allow [PutForwards] Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [Replaceable] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [Replaceable] Promise-typed attributes.") + harness.ok(threw, "Should not allow [Replaceable] Promise-typed attributes.") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface A { [SameObject] readonly attribute Promise attr; }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should not allow [SameObject] Promise-typed attributes.") + harness.ok(threw, "Should not allow [SameObject] Promise-typed attributes.") diff --git a/dom/bindings/parser/tests/test_prototype_ident.py b/dom/bindings/parser/tests/test_prototype_ident.py index d3932b54f8bcf8..5a806bf2a2dc55 100644 --- a/dom/bindings/parser/tests/test_prototype_ident.py +++ b/dom/bindings/parser/tests/test_prototype_ident.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface TestIface { static attribute boolean prototype; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -15,11 +17,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestIface { static boolean prototype(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -29,11 +33,13 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestIface { const boolean prototype = true; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -42,39 +48,60 @@ def WebIDLTest(parser, harness): # Make sure that we can parse non-static attributes with 'prototype' as identifier. parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestIface { attribute boolean prototype; }; - """) + """ + ) results = parser.finish() - testIface = results[0]; - harness.check(testIface.members[0].isStatic(), False, "Attribute should not be static") - harness.check(testIface.members[0].identifier.name, "prototype", "Attribute identifier should be 'prototype'") + testIface = results[0] + harness.check( + testIface.members[0].isStatic(), False, "Attribute should not be static" + ) + harness.check( + testIface.members[0].identifier.name, + "prototype", + "Attribute identifier should be 'prototype'", + ) # Make sure that we can parse non-static operations with 'prototype' as identifier. parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestIface { boolean prototype(); }; - """) + """ + ) results = parser.finish() - testIface = results[0]; - harness.check(testIface.members[0].isStatic(), False, "Operation should not be static") - harness.check(testIface.members[0].identifier.name, "prototype", "Operation identifier should be 'prototype'") + testIface = results[0] + harness.check( + testIface.members[0].isStatic(), False, "Operation should not be static" + ) + harness.check( + testIface.members[0].identifier.name, + "prototype", + "Operation identifier should be 'prototype'", + ) # Make sure that we can parse dictionary members with 'prototype' as identifier. parser = parser.reset() - parser.parse(""" + parser.parse( + """ dictionary TestDict { boolean prototype; }; - """) + """ + ) results = parser.finish() - testDict = results[0]; - harness.check(testDict.members[0].identifier.name, "prototype", "Dictionary member should be 'prototype'") - + testDict = results[0] + harness.check( + testDict.members[0].identifier.name, + "prototype", + "Dictionary member should be 'prototype'", + ) diff --git a/dom/bindings/parser/tests/test_putForwards.py b/dom/bindings/parser/tests/test_putForwards.py index 86a1bf115b6cc1..5ec4dde280e4d8 100644 --- a/dom/bindings/parser/tests/test_putForwards.py +++ b/dom/bindings/parser/tests/test_putForwards.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] readonly attribute long A; }; - """) + """ + ) results = parser.finish() except: @@ -13,16 +15,18 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] readonly attribute J A; }; interface J { }; - """) + """ + ) results = parser.finish() except: @@ -30,17 +34,19 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] attribute J A; }; interface J { attribute long B; }; - """) + """ + ) results = parser.finish() except: @@ -48,17 +54,19 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=B] static readonly attribute J A; }; interface J { attribute long B; }; - """) + """ + ) results = parser.finish() except: @@ -66,17 +74,19 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ callback interface I { [PutForwards=B] readonly attribute J A; }; interface J { attribute long B; }; - """) + """ + ) results = parser.finish() except: @@ -84,10 +94,11 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should have thrown.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface I { [PutForwards=C] readonly attribute J A; [PutForwards=C] readonly attribute J B; @@ -98,7 +109,8 @@ def WebIDLTest(parser, harness): interface K { [PutForwards=A] readonly attribute I D; }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_record.py b/dom/bindings/parser/tests/test_record.py index d50572caf0786e..b9e51f5027444a 100644 --- a/dom/bindings/parser/tests/test_record.py +++ b/dom/bindings/parser/tests/test_record.py @@ -1,18 +1,22 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ dictionary Dict {}; interface RecordArg { void foo(record arg); }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, "Should know about two things"); - harness.ok(isinstance(results[1], WebIDL.IDLInterface), - "Should have an interface here"); + harness.check(len(results), 2, "Should know about two things") + harness.ok( + isinstance(results[1], WebIDL.IDLInterface), "Should have an interface here" + ) members = results[1].members harness.check(len(members), 1, "Should have one member") harness.ok(members[0].isMethod(), "Should have method") @@ -20,34 +24,38 @@ def WebIDLTest(parser, harness): args = signature[1] harness.check(len(args), 1, "Should have one arg") harness.ok(args[0].type.isRecord(), "Should have a record type here") - harness.ok(args[0].type.inner.isDictionary(), - "Should have a dictionary inner type") + harness.ok(args[0].type.inner.isDictionary(), "Should have a dictionary inner type") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface RecordVoidArg { void foo(record arg); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, "Should have thrown because record can't have void as value type.") - + harness.ok( + threw, "Should have thrown because record can't have void as value type." + ) + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ dictionary Dict { record val; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, - "Should have thrown on dictionary containing itself via record.") + harness.ok(threw, "Should have thrown on dictionary containing itself via record.") diff --git a/dom/bindings/parser/tests/test_replaceable.py b/dom/bindings/parser/tests/test_replaceable.py index 93ee42ed91985f..06ea6a47239d68 100644 --- a/dom/bindings/parser/tests/test_replaceable.py +++ b/dom/bindings/parser/tests/test_replaceable.py @@ -2,8 +2,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + def should_throw(parser, harness, message, code): - parser = parser.reset(); + parser = parser.reset() threw = False try: parser.parse(code) @@ -16,43 +17,68 @@ def should_throw(parser, harness, message, code): def WebIDLTest(parser, harness): # The [Replaceable] extended attribute MUST take no arguments. - should_throw(parser, harness, "no arguments", """ + should_throw( + parser, + harness, + "no arguments", + """ interface I { [Replaceable=X] readonly attribute long A; }; - """) + """, + ) # An attribute with the [Replaceable] extended attribute MUST NOT also be # declared with the [PutForwards] extended attribute. - should_throw(parser, harness, "PutForwards", """ + should_throw( + parser, + harness, + "PutForwards", + """ interface I { [PutForwards=B, Replaceable] readonly attribute J A; }; interface J { attribute long B; }; - """) + """, + ) # The [Replaceable] extended attribute MUST NOT be used on an attribute # that is not read only. - should_throw(parser, harness, "writable attribute", """ + should_throw( + parser, + harness, + "writable attribute", + """ interface I { [Replaceable] attribute long A; }; - """) + """, + ) # The [Replaceable] extended attribute MUST NOT be used on a static # attribute. - should_throw(parser, harness, "static attribute", """ + should_throw( + parser, + harness, + "static attribute", + """ interface I { [Replaceable] static readonly attribute long A; }; - """) + """, + ) # The [Replaceable] extended attribute MUST NOT be used on an attribute # declared on a callback interface. - should_throw(parser, harness, "callback interface", """ + should_throw( + parser, + harness, + "callback interface", + """ callback interface I { [Replaceable] readonly attribute long A; }; - """) + """, + ) diff --git a/dom/bindings/parser/tests/test_securecontext_extended_attribute.py b/dom/bindings/parser/tests/test_securecontext_extended_attribute.py index 442dba45d760c9..71a2deff59395a 100644 --- a/dom/bindings/parser/tests/test_securecontext_extended_attribute.py +++ b/dom/bindings/parser/tests/test_securecontext_extended_attribute.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInterface { const octet TEST_CONSTANT = 0; @@ -13,27 +15,47 @@ def WebIDLTest(parser, harness): readonly attribute byte testAttribute2; void testMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 6, "TestSecureContextOnInterface should have six members") - harness.ok(results[0].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members from partial interface") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members from partial interface") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members from partial interface") + harness.check( + len(results[0].members), + 6, + "TestSecureContextOnInterface should have six members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members from partial interface", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members from partial interface", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members from partial interface", + ) # Same thing, but with the partial interface specified first: parser = parser.reset() - parser.parse(""" + parser.parse( + """ partial interface TestSecureContextOnInterfaceAfterPartialInterface { const octet TEST_CONSTANT_2 = 0; readonly attribute byte testAttribute2; @@ -45,26 +67,46 @@ def WebIDLTest(parser, harness): readonly attribute byte testAttribute; void testMethod(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[1].members), 6, "TestSecureContextOnInterfaceAfterPartialInterface should have six members") - harness.ok(results[1].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[1].members[0].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members") - harness.ok(results[1].members[1].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members") - harness.ok(results[1].members[2].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members") - harness.ok(results[1].members[3].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members from partial interface") - harness.ok(results[1].members[4].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to attribute members from partial interface") - harness.ok(results[1].members[5].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to method members from partial interface") + harness.check( + len(results[1].members), + 6, + "TestSecureContextOnInterfaceAfterPartialInterface should have six members", + ) + harness.ok( + results[1].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[1].members[0].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members", + ) + harness.ok( + results[1].members[1].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members", + ) + harness.ok( + results[1].members[2].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members", + ) + harness.ok( + results[1].members[3].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members from partial interface", + ) + harness.ok( + results[1].members[4].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to attribute members from partial interface", + ) + harness.ok( + results[1].members[5].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to method members from partial interface", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnPartialInterface { const octet TEST_CONSTANT = 0; readonly attribute byte testAttribute; @@ -76,26 +118,46 @@ def WebIDLTest(parser, harness): readonly attribute byte testAttribute2; void testMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 6, "TestSecureContextOnPartialInterface should have six members") - harness.ok(results[0].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface's constant members") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface's attribute members") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "[SecureContext] should not propagate from a partial interface to the interface's method members") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext"), - "Constant members from [SecureContext] partial interface should be [SecureContext]") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "Attribute members from [SecureContext] partial interface should be [SecureContext]") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext"), - "Method members from [SecureContext] partial interface should be [SecureContext]") + harness.check( + len(results[0].members), + 6, + "TestSecureContextOnPartialInterface should have six members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface's constant members", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface's attribute members", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "[SecureContext] should not propagate from a partial interface to the interface's method members", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext"), + "Constant members from [SecureContext] partial interface should be [SecureContext]", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "Attribute members from [SecureContext] partial interface should be [SecureContext]", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext"), + "Method members from [SecureContext] partial interface should be [SecureContext]", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnInterfaceMembers { const octet TEST_NON_SECURE_CONSTANT_1 = 0; [SecureContext] @@ -110,32 +172,58 @@ def WebIDLTest(parser, harness): void testSecureMethod(byte foo); void testNonSecureMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 9, "TestSecureContextOnInterfaceMembers should have nine members") - harness.ok(results[0].getExtendedAttribute("SecureContext") is None, - "[SecureContext] on members should not propagate up to the interface") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext") is None, - "Constant should not have [SecureContext] extended attribute") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext"), - "Constant should have [SecureContext] extended attribute") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "Constant should not have [SecureContext] extended attribute") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext") is None, - "Attribute should not have [SecureContext] extended attribute") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "Attribute should have [SecureContext] extended attribute") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext") is None, - "Attribute should not have [SecureContext] extended attribute") - harness.ok(results[0].members[6].getExtendedAttribute("SecureContext") is None, - "Method should not have [SecureContext] extended attribute") - harness.ok(results[0].members[7].getExtendedAttribute("SecureContext"), - "Method should have [SecureContext] extended attribute") - harness.ok(results[0].members[8].getExtendedAttribute("SecureContext") is None, - "Method should not have [SecureContext] extended attribute") + harness.check( + len(results[0].members), + 9, + "TestSecureContextOnInterfaceMembers should have nine members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext") is None, + "[SecureContext] on members should not propagate up to the interface", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext") is None, + "Constant should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext"), + "Constant should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "Constant should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext") is None, + "Attribute should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "Attribute should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext") is None, + "Attribute should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[6].getExtendedAttribute("SecureContext") is None, + "Method should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[7].getExtendedAttribute("SecureContext"), + "Method should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[8].getExtendedAttribute("SecureContext") is None, + "Method should not have [SecureContext] extended attribute", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnPartialInterfaceMembers { }; partial interface TestSecureContextOnPartialInterfaceMembers { @@ -152,37 +240,62 @@ def WebIDLTest(parser, harness): void testSecureMethod(byte foo); void testNonSecureMethod2(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 9, "TestSecureContextOnPartialInterfaceMembers should have nine members") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext") is None, - "Constant from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext"), - "Constant from partial interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "Constant from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext") is None, - "Attribute from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[4].getExtendedAttribute("SecureContext"), - "Attribute from partial interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[5].getExtendedAttribute("SecureContext") is None, - "Attribute from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[6].getExtendedAttribute("SecureContext") is None, - "Method from partial interface should not have [SecureContext] extended attribute") - harness.ok(results[0].members[7].getExtendedAttribute("SecureContext"), - "Method from partial interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[8].getExtendedAttribute("SecureContext") is None, - "Method from partial interface should not have [SecureContext] extended attribute") + harness.check( + len(results[0].members), + 9, + "TestSecureContextOnPartialInterfaceMembers should have nine members", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext") is None, + "Constant from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext"), + "Constant from partial interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "Constant from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext") is None, + "Attribute from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[4].getExtendedAttribute("SecureContext"), + "Attribute from partial interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[5].getExtendedAttribute("SecureContext") is None, + "Attribute from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[6].getExtendedAttribute("SecureContext") is None, + "Method from partial interface should not have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[7].getExtendedAttribute("SecureContext"), + "Method from partial interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[8].getExtendedAttribute("SecureContext") is None, + "Method from partial interface should not have [SecureContext] extended attribute", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext=something] interface TestSecureContextTakesNoValue1 { const octet TEST_SECURE_CONSTANT = 0; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -191,7 +304,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestSecureContextForOverloads1 { [SecureContext] void testSecureMethod(byte foo); @@ -199,16 +313,21 @@ def WebIDLTest(parser, harness): partial interface TestSecureContextForOverloads1 { void testSecureMethod(byte foo, byte bar); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "If [SecureContext] appears on an overloaded operation, then it MUST appear on all overloads") + harness.ok( + threw, + "If [SecureContext] appears on an overloaded operation, then it MUST appear on all overloads", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestSecureContextForOverloads2 { [SecureContext] void testSecureMethod(byte foo); @@ -217,31 +336,40 @@ def WebIDLTest(parser, harness): [SecureContext] void testSecureMethod(byte foo, byte bar); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, "[SecureContext] can appear on an overloaded operation if it appears on all overloads") + harness.ok( + not threw, + "[SecureContext] can appear on an overloaded operation if it appears on all overloads", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInterfaceAndMember { [SecureContext] void testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must not appear on an interface and interface member") + harness.ok( + threw, "[SecureContext] must not appear on an interface and interface member" + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestSecureContextOnPartialInterfaceAndMember { }; [SecureContext] @@ -249,16 +377,21 @@ def WebIDLTest(parser, harness): [SecureContext] void testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must not appear on a partial interface and one of the partial interface's member's") + harness.ok( + threw, + "[SecureContext] must not appear on a partial interface and one of the partial interface's member's", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInterfaceAndPartialInterfaceMember { }; @@ -266,31 +399,41 @@ def WebIDLTest(parser, harness): [SecureContext] void testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must not appear on an interface and one of its partial interface's member's") + harness.ok( + threw, + "[SecureContext] must not appear on an interface and one of its partial interface's member's", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextOnInheritedInterface { }; interface TestSecureContextNotOnInheritingInterface : TestSecureContextOnInheritedInterface { void testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "[SecureContext] must appear on interfaces that inherit from another [SecureContext] interface") + harness.ok( + threw, + "[SecureContext] must appear on interfaces that inherit from another [SecureContext] interface", + ) # Test 'includes'. parser = parser.reset() - parser.parse(""" + parser.parse( + """ [SecureContext] interface TestSecureContextInterfaceThatIncludesNonSecureContextMixin { const octet TEST_CONSTANT = 0; @@ -301,31 +444,56 @@ def WebIDLTest(parser, harness): void testMethod2(byte foo); }; TestSecureContextInterfaceThatIncludesNonSecureContextMixin includes TestNonSecureContextMixin; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 4, "TestSecureContextInterfaceThatImplementsNonSecureContextInterface should have four members") - harness.ok(results[0].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext"), - "[SecureContext] should propagate from interface to constant members even when other members are copied from a non-[SecureContext] interface") - harness.ok(results[0].members[1].getExtendedAttribute("SecureContext") is None, - "Constants copied from non-[SecureContext] mixin should not be [SecureContext]") - harness.ok(results[0].members[2].getExtendedAttribute("SecureContext") is None, - "Attributes copied from non-[SecureContext] mixin should not be [SecureContext]") - harness.ok(results[0].members[3].getExtendedAttribute("SecureContext") is None, - "Methods copied from non-[SecureContext] mixin should not be [SecureContext]") - + harness.check( + len(results[0].members), + 4, + "TestSecureContextInterfaceThatImplementsNonSecureContextInterface should have four members", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext"), + "[SecureContext] should propagate from interface to constant members even when other members are copied from a non-[SecureContext] interface", + ) + harness.ok( + results[0].members[1].getExtendedAttribute("SecureContext") is None, + "Constants copied from non-[SecureContext] mixin should not be [SecureContext]", + ) + harness.ok( + results[0].members[2].getExtendedAttribute("SecureContext") is None, + "Attributes copied from non-[SecureContext] mixin should not be [SecureContext]", + ) + harness.ok( + results[0].members[3].getExtendedAttribute("SecureContext") is None, + "Methods copied from non-[SecureContext] mixin should not be [SecureContext]", + ) + # Test SecureContext and NoInterfaceObject parser = parser.reset() - parser.parse(""" + parser.parse( + """ [NoInterfaceObject, SecureContext] interface TestSecureContextNoInterfaceObject { void testSecureMethod(byte foo); }; - """) + """ + ) results = parser.finish() - harness.check(len(results[0].members), 1, "TestSecureContextNoInterfaceObject should have only one member") - harness.ok(results[0].getExtendedAttribute("SecureContext"), - "Interface should have [SecureContext] extended attribute") - harness.ok(results[0].members[0].getExtendedAttribute("SecureContext"), - "Interface member should have [SecureContext] extended attribute") + harness.check( + len(results[0].members), + 1, + "TestSecureContextNoInterfaceObject should have only one member", + ) + harness.ok( + results[0].getExtendedAttribute("SecureContext"), + "Interface should have [SecureContext] extended attribute", + ) + harness.ok( + results[0].members[0].getExtendedAttribute("SecureContext"), + "Interface member should have [SecureContext] extended attribute", + ) diff --git a/dom/bindings/parser/tests/test_special_method_signature_mismatch.py b/dom/bindings/parser/tests/test_special_method_signature_mismatch.py index 52cfcb96817377..91cf48a25d43c9 100644 --- a/dom/bindings/parser/tests/test_special_method_signature_mismatch.py +++ b/dom/bindings/parser/tests/test_special_method_signature_mismatch.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch1 { getter long long foo(long index); }; - """) + """ + ) results = parser.finish() except: @@ -15,11 +17,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch2 { getter void foo(unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -29,11 +33,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch3 { getter boolean foo(unsigned long index, boolean extraArg); }; - """) + """ + ) results = parser.finish() except: @@ -43,11 +49,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch4 { getter boolean foo(unsigned long... index); }; - """) + """ + ) results = parser.finish() except: @@ -57,11 +65,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch5 { getter boolean foo(optional unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -71,11 +81,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch6 { getter boolean foo(); }; - """) + """ + ) results = parser.finish() except: @@ -85,11 +97,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch7 { deleter long long foo(long index); }; - """) + """ + ) results = parser.finish() except: @@ -99,11 +113,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch9 { deleter boolean foo(unsigned long index, boolean extraArg); }; - """) + """ + ) results = parser.finish() except: @@ -113,11 +129,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch10 { deleter boolean foo(unsigned long... index); }; - """) + """ + ) results = parser.finish() except: @@ -127,11 +145,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch11 { deleter boolean foo(optional unsigned long index); }; - """) + """ + ) results = parser.finish() except: @@ -141,11 +161,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch12 { deleter boolean foo(); }; - """) + """ + ) results = parser.finish() except: @@ -155,11 +177,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch13 { setter long long foo(long index, long long value); }; - """) + """ + ) results = parser.finish() except: @@ -169,11 +193,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch15 { setter boolean foo(unsigned long index, boolean value, long long extraArg); }; - """) + """ + ) results = parser.finish() except: @@ -183,11 +209,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch16 { setter boolean foo(unsigned long index, boolean... value); }; - """) + """ + ) results = parser.finish() except: @@ -197,11 +225,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch17 { setter boolean foo(unsigned long index, optional boolean value); }; - """) + """ + ) results = parser.finish() except: @@ -211,11 +241,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodSignatureMismatch18 { setter boolean foo(); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_special_methods.py b/dom/bindings/parser/tests/test_special_methods.py index 7f911733b62b01..936557a06cc24e 100644 --- a/dom/bindings/parser/tests/test_special_methods.py +++ b/dom/bindings/parser/tests/test_special_methods.py @@ -1,7 +1,9 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface SpecialMethods { getter long long (unsigned long index); setter long long (unsigned long index, long long value); @@ -14,47 +16,90 @@ def WebIDLTest(parser, harness): interface SpecialMethodsCombination { getter deleter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() - def checkMethod(method, QName, name, - static=False, getter=False, setter=False, - deleter=False, legacycaller=False, stringifier=False): - harness.ok(isinstance(method, WebIDL.IDLMethod), - "Should be an IDLMethod") + def checkMethod( + method, + QName, + name, + static=False, + getter=False, + setter=False, + deleter=False, + legacycaller=False, + stringifier=False, + ): + harness.ok(isinstance(method, WebIDL.IDLMethod), "Should be an IDLMethod") harness.check(method.identifier.QName(), QName, "Method has the right QName") harness.check(method.identifier.name, name, "Method has the right name") harness.check(method.isStatic(), static, "Method has the correct static value") harness.check(method.isGetter(), getter, "Method has the correct getter value") harness.check(method.isSetter(), setter, "Method has the correct setter value") - harness.check(method.isDeleter(), deleter, "Method has the correct deleter value") - harness.check(method.isLegacycaller(), legacycaller, "Method has the correct legacycaller value") - harness.check(method.isStringifier(), stringifier, "Method has the correct stringifier value") + harness.check( + method.isDeleter(), deleter, "Method has the correct deleter value" + ) + harness.check( + method.isLegacycaller(), + legacycaller, + "Method has the correct legacycaller value", + ) + harness.check( + method.isStringifier(), + stringifier, + "Method has the correct stringifier value", + ) harness.check(len(results), 2, "Expect 2 interfaces") iface = results[0] harness.check(len(iface.members), 6, "Expect 6 members") - checkMethod(iface.members[0], "::SpecialMethods::__indexedgetter", "__indexedgetter", - getter=True) - checkMethod(iface.members[1], "::SpecialMethods::__indexedsetter", "__indexedsetter", - setter=True) - checkMethod(iface.members[2], "::SpecialMethods::__namedgetter", "__namedgetter", - getter=True) - checkMethod(iface.members[3], "::SpecialMethods::__namedsetter", "__namedsetter", - setter=True) - checkMethod(iface.members[4], "::SpecialMethods::__nameddeleter", "__nameddeleter", - deleter=True) + checkMethod( + iface.members[0], + "::SpecialMethods::__indexedgetter", + "__indexedgetter", + getter=True, + ) + checkMethod( + iface.members[1], + "::SpecialMethods::__indexedsetter", + "__indexedsetter", + setter=True, + ) + checkMethod( + iface.members[2], + "::SpecialMethods::__namedgetter", + "__namedgetter", + getter=True, + ) + checkMethod( + iface.members[3], + "::SpecialMethods::__namedsetter", + "__namedsetter", + setter=True, + ) + checkMethod( + iface.members[4], + "::SpecialMethods::__nameddeleter", + "__nameddeleter", + deleter=True, + ) iface = results[1] harness.check(len(iface.members), 1, "Expect 1 member") - checkMethod(iface.members[0], "::SpecialMethodsCombination::__namedgetterdeleter", - "__namedgetterdeleter", getter=True, deleter=True) + checkMethod( + iface.members[0], + "::SpecialMethodsCombination::__namedgetterdeleter", + "__namedgetterdeleter", + getter=True, + deleter=True, + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: @@ -63,11 +108,10 @@ def checkMethod(method, QName, name, interface IndexedDeleter { deleter void(unsigned long index); }; - """) + """ + ) parser.finish() except: threw = True harness.ok(threw, "There are no indexed deleters") - - diff --git a/dom/bindings/parser/tests/test_special_methods_uniqueness.py b/dom/bindings/parser/tests/test_special_methods_uniqueness.py index 9bf3d903463b79..014737e8168999 100644 --- a/dom/bindings/parser/tests/test_special_methods_uniqueness.py +++ b/dom/bindings/parser/tests/test_special_methods_uniqueness.py @@ -1,14 +1,17 @@ import WebIDL + def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodUniqueness1 { getter deleter boolean (DOMString name); getter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() except: @@ -18,12 +21,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodUniqueness1 { deleter boolean (DOMString name); getter deleter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() except: @@ -33,12 +38,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface SpecialMethodUniqueness1 { setter boolean (DOMString name); setter boolean (DOMString name); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_stringifier.py b/dom/bindings/parser/tests/test_stringifier.py index deabdc5ec81855..f650b528abd8e3 100644 --- a/dom/bindings/parser/tests/test_stringifier.py +++ b/dom/bindings/parser/tests/test_stringifier.py @@ -1,27 +1,34 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; }; - """) + """ + ) results = parser.finish() - harness.ok(isinstance(results[0].members[0], WebIDL.IDLMethod), - "Stringifer should be method") + harness.ok( + isinstance(results[0].members[0], WebIDL.IDLMethod), + "Stringifer should be method", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; stringifier; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -32,12 +39,14 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; stringifier DOMString foo(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -45,70 +54,92 @@ def WebIDLTest(parser, harness): harness.ok(threw, "Should not allow a 'stringifier;' and a 'stringifier()'") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute DOMString foo; }; - """) + """ + ) results = parser.finish() - harness.ok(isinstance(results[0].members[0], WebIDL.IDLAttribute), - "Stringifier attribute should be an attribute") + harness.ok( + isinstance(results[0].members[0], WebIDL.IDLAttribute), + "Stringifier attribute should be an attribute", + ) stringifier = results[0].members[1] - harness.ok(isinstance(stringifier, WebIDL.IDLMethod), - "Stringifier attribute should insert a method") - harness.ok(stringifier.isStringifier(), - "Inserted method should be a stringifier") + harness.ok( + isinstance(stringifier, WebIDL.IDLMethod), + "Stringifier attribute should insert a method", + ) + harness.ok(stringifier.isStringifier(), "Inserted method should be a stringifier") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier {}; interface mixin TestStringifierMixin { stringifier attribute DOMString foo; }; TestStringifier includes TestStringifierMixin; - """) + """ + ) results = parser.finish() - harness.ok(isinstance(results[0].members[0], WebIDL.IDLAttribute), - "Stringifier attribute should be an attribute") + harness.ok( + isinstance(results[0].members[0], WebIDL.IDLAttribute), + "Stringifier attribute should be an attribute", + ) stringifier = results[0].members[1] - harness.ok(isinstance(stringifier, WebIDL.IDLMethod), - "Stringifier attribute should insert a method") - harness.ok(stringifier.isStringifier(), - "Inserted method should be a stringifier") + harness.ok( + isinstance(stringifier, WebIDL.IDLMethod), + "Stringifier attribute should insert a method", + ) + harness.ok(stringifier.isStringifier(), "Inserted method should be a stringifier") parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute USVString foo; }; - """) + """ + ) results = parser.finish() stringifier = results[0].members[1] - harness.ok(stringifier.signatures()[0][0].isUSVString(), - "Stringifier attributes should allow USVString") + harness.ok( + stringifier.signatures()[0][0].isUSVString(), + "Stringifier attributes should allow USVString", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface TestStringifier { [Throws, NeedsSubjectPrincipal] stringifier attribute USVString foo; }; - """) + """ + ) results = parser.finish() stringifier = results[0].members[1] - harness.ok(stringifier.getExtendedAttribute("Throws"), - "Stringifier attributes should support [Throws]") - harness.ok(stringifier.getExtendedAttribute("NeedsSubjectPrincipal"), - "Stringifier attributes should support [NeedsSubjectPrincipal]") + harness.ok( + stringifier.getExtendedAttribute("Throws"), + "Stringifier attributes should support [Throws]", + ) + harness.ok( + stringifier.getExtendedAttribute("NeedsSubjectPrincipal"), + "Stringifier attributes should support [NeedsSubjectPrincipal]", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute ByteString foo; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -118,12 +149,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier; stringifier attribute DOMString foo; }; - """) + """ + ) results = parser.finish() except: threw = True @@ -133,12 +166,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface TestStringifier { stringifier attribute DOMString foo; stringifier attribute DOMString bar; }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/dom/bindings/parser/tests/test_toJSON.py b/dom/bindings/parser/tests/test_toJSON.py index ad01330e65a049..f312667ec4daa0 100644 --- a/dom/bindings/parser/tests/test_toJSON.py +++ b/dom/bindings/parser/tests/test_toJSON.py @@ -6,7 +6,8 @@ def WebIDLTest(parser, harness): interface Test { object toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -21,7 +22,8 @@ def WebIDLTest(parser, harness): object toJSON(object arg); object toJSON(long arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -35,7 +37,8 @@ def WebIDLTest(parser, harness): interface Test { object toJSON(object arg); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -49,7 +52,8 @@ def WebIDLTest(parser, harness): interface Test { long toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -63,11 +67,14 @@ def WebIDLTest(parser, harness): interface Test { [Default] object toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(not threw, "Should allow a default toJSON method with 'object' as return type.") + harness.ok( + not threw, "Should allow a default toJSON method with 'object' as return type." + ) parser = parser.reset() threw = False @@ -77,119 +84,226 @@ def WebIDLTest(parser, harness): interface Test { [Default] long toJSON(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, "Should not allow a default toJSON method with non-'object' as return type.") - - JsonTypes = [ "byte", "octet", "short", "unsigned short", "long", "unsigned long", "long long", - "unsigned long long", "float", "unrestricted float", "double", "unrestricted double", "boolean", - "DOMString", "ByteString", "UTF8String", "USVString", "Enum", "InterfaceWithToJSON", "object" ] - - nonJsonTypes = [ "InterfaceWithoutToJSON", "any", "Int8Array", "Int16Array", "Int32Array","Uint8Array", - "Uint16Array", "Uint32Array", "Uint8ClampedArray", "Float32Array", "Float64Array", "ArrayBuffer" ] + harness.ok( + threw, + "Should not allow a default toJSON method with non-'object' as return type.", + ) + + JsonTypes = [ + "byte", + "octet", + "short", + "unsigned short", + "long", + "unsigned long", + "long long", + "unsigned long long", + "float", + "unrestricted float", + "double", + "unrestricted double", + "boolean", + "DOMString", + "ByteString", + "UTF8String", + "USVString", + "Enum", + "InterfaceWithToJSON", + "object", + ] + + nonJsonTypes = [ + "InterfaceWithoutToJSON", + "any", + "Int8Array", + "Int16Array", + "Int32Array", + "Uint8Array", + "Uint16Array", + "Uint32Array", + "Uint8ClampedArray", + "Float32Array", + "Float64Array", + "ArrayBuffer", + ] def doTest(testIDL, shouldThrow, description): p = parser.reset() threw = False try: - p.parse(testIDL + - """ + p.parse( + testIDL + + """ enum Enum { "a", "b", "c" }; interface InterfaceWithToJSON { long toJSON(); }; interface InterfaceWithoutToJSON {}; - """); - p.finish(); + """ + ) + p.finish() except Exception as x: threw = True harness.ok(x.message == "toJSON method has non-JSON return type", x) harness.check(threw, shouldThrow, description) - for type in JsonTypes: - doTest("interface Test { %s toJSON(); };" % type, False, - "%s should be a JSON type" % type) - - doTest("interface Test { sequence<%s> toJSON(); };" % type, False, - "sequence<%s> should be a JSON type" % type) - - doTest("dictionary Foo { %s foo; }; " - "interface Test { Foo toJSON(); }; " % type, False, - "dictionary containing only JSON type (%s) should be a JSON type" % type) - - doTest("dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " - "interface Test { Bar toJSON(); }; " % type, False, - "dictionary whose ancestors only contain JSON types should be a JSON type") - - doTest("dictionary Foo { any foo; }; dictionary Bar : Foo { %s bar; };" - "interface Test { Bar toJSON(); };" % type, True, - "dictionary whose ancestors contain non-JSON types should not be a JSON type") - - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) - - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) - - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) - - doTest("interface Test { record toJSON(); };" % type, False, - "record should be a JSON type" % type) + doTest( + "interface Test { %s toJSON(); };" % type, + False, + "%s should be a JSON type" % type, + ) + + doTest( + "interface Test { sequence<%s> toJSON(); };" % type, + False, + "sequence<%s> should be a JSON type" % type, + ) + + doTest( + "dictionary Foo { %s foo; }; " "interface Test { Foo toJSON(); }; " % type, + False, + "dictionary containing only JSON type (%s) should be a JSON type" % type, + ) + + doTest( + "dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " + "interface Test { Bar toJSON(); }; " % type, + False, + "dictionary whose ancestors only contain JSON types should be a JSON type", + ) + + doTest( + "dictionary Foo { any foo; }; dictionary Bar : Foo { %s bar; };" + "interface Test { Bar toJSON(); };" % type, + True, + "dictionary whose ancestors contain non-JSON types should not be a JSON type", + ) + + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) + + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) + + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) + + doTest( + "interface Test { record toJSON(); };" % type, + False, + "record should be a JSON type" % type, + ) otherUnionType = "Foo" if type != "object" else "long" - doTest("interface Foo { object toJSON(); };" - "interface Test { (%s or %s) toJSON(); };" % (otherUnionType, type), False, - "union containing only JSON types (%s or %s) should be a JSON type" %(otherUnionType, type)) - - doTest("interface test { %s? toJSON(); };" % type, False, - "Nullable type (%s) should be a JSON type" % type) - - doTest("interface Foo : InterfaceWithoutToJSON { %s toJSON(); };" - "interface Test { Foo toJSON(); };" % type, False, - "interface with toJSON should be a JSON type") - - doTest("interface Foo : InterfaceWithToJSON { };" - "interface Test { Foo toJSON(); };", False, - "inherited interface with toJSON should be a JSON type") + doTest( + "interface Foo { object toJSON(); };" + "interface Test { (%s or %s) toJSON(); };" % (otherUnionType, type), + False, + "union containing only JSON types (%s or %s) should be a JSON type" + % (otherUnionType, type), + ) + + doTest( + "interface test { %s? toJSON(); };" % type, + False, + "Nullable type (%s) should be a JSON type" % type, + ) + + doTest( + "interface Foo : InterfaceWithoutToJSON { %s toJSON(); };" + "interface Test { Foo toJSON(); };" % type, + False, + "interface with toJSON should be a JSON type", + ) + + doTest( + "interface Foo : InterfaceWithToJSON { };" "interface Test { Foo toJSON(); };", + False, + "inherited interface with toJSON should be a JSON type", + ) for type in nonJsonTypes: - doTest("interface Test { %s toJSON(); };" % type, True, - "%s should not be a JSON type" % type) - - doTest("interface Test { sequence<%s> toJSON(); };" % type, True, - "sequence<%s> should not be a JSON type" % type) - - doTest("dictionary Foo { %s foo; }; " - "interface Test { Foo toJSON(); }; " % type, True, - "Dictionary containing a non-JSON type (%s) should not be a JSON type" % type) - - doTest("dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " - "interface Test { Bar toJSON(); }; " % type, True, - "dictionary whose ancestors only contain non-JSON types should not be a JSON type") - - doTest("interface Test { record toJSON(); };" % type, True, - "record should not be a JSON type" % type) - - doTest("interface Test { record toJSON(); };" % type, True, - "record should not be a JSON type" % type) - - doTest("interface Test { record toJSON(); };" % type, True, - "record should not be a JSON type" % type) + doTest( + "interface Test { %s toJSON(); };" % type, + True, + "%s should not be a JSON type" % type, + ) + + doTest( + "interface Test { sequence<%s> toJSON(); };" % type, + True, + "sequence<%s> should not be a JSON type" % type, + ) + + doTest( + "dictionary Foo { %s foo; }; " "interface Test { Foo toJSON(); }; " % type, + True, + "Dictionary containing a non-JSON type (%s) should not be a JSON type" + % type, + ) + + doTest( + "dictionary Foo { %s foo; }; dictionary Bar : Foo { }; " + "interface Test { Bar toJSON(); }; " % type, + True, + "dictionary whose ancestors only contain non-JSON types should not be a JSON type", + ) + + doTest( + "interface Test { record toJSON(); };" % type, + True, + "record should not be a JSON type" % type, + ) + + doTest( + "interface Test { record toJSON(); };" % type, + True, + "record should not be a JSON type" % type, + ) + + doTest( + "interface Test { record toJSON(); };" % type, + True, + "record should not be a JSON type" % type, + ) if type != "any": - doTest("interface Foo { object toJSON(); }; " - "interface Test { (Foo or %s) toJSON(); };" % type, True, - "union containing a non-JSON type (%s) should not be a JSON type" % type) - - doTest("interface test { %s? toJSON(); };" % type, True, - "Nullable type (%s) should not be a JSON type" % type) - - doTest("dictionary Foo { long foo; any bar; };" - "interface Test { Foo toJSON(); };", True, - "dictionary containing a non-JSON type should not be a JSON type") - - doTest("interface Foo : InterfaceWithoutToJSON { }; " - "interface Test { Foo toJSON(); };", True, - "interface without toJSON should not be a JSON type") + doTest( + "interface Foo { object toJSON(); }; " + "interface Test { (Foo or %s) toJSON(); };" % type, + True, + "union containing a non-JSON type (%s) should not be a JSON type" + % type, + ) + + doTest( + "interface test { %s? toJSON(); };" % type, + True, + "Nullable type (%s) should not be a JSON type" % type, + ) + + doTest( + "dictionary Foo { long foo; any bar; };" "interface Test { Foo toJSON(); };", + True, + "dictionary containing a non-JSON type should not be a JSON type", + ) + + doTest( + "interface Foo : InterfaceWithoutToJSON { }; " + "interface Test { Foo toJSON(); };", + True, + "interface without toJSON should not be a JSON type", + ) diff --git a/dom/bindings/parser/tests/test_treatNonCallableAsNull.py b/dom/bindings/parser/tests/test_treatNonCallableAsNull.py index 7a0bde8a6dcd43..aec6f69bd9cba5 100644 --- a/dom/bindings/parser/tests/test_treatNonCallableAsNull.py +++ b/dom/bindings/parser/tests/test_treatNonCallableAsNull.py @@ -1,14 +1,17 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ [TreatNonCallableAsNull] callback Function = any(any... arguments); interface TestTreatNonCallableAsNull1 { attribute Function? onfoo; attribute Function onbar; }; - """) + """ + ) results = parser.finish() @@ -22,13 +25,15 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ callback Function = any(any... arguments); interface TestTreatNonCallableAsNull2 { [TreatNonCallableAsNull] attribute Function onfoo; }; - """) + """ + ) results = parser.finish() except: @@ -40,14 +45,16 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ callback Function = any(any... arguments); [TreatNonCallableAsNull] interface TestTreatNonCallableAsNull3 { attribute Function onfoo; }; - """) + """ + ) results = parser.finish() except: @@ -59,10 +66,12 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ [TreatNonCallableAsNull, TreatNonObjectAsNull] callback Function = any(any... arguments); - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_typedef.py b/dom/bindings/parser/tests/test_typedef.py index b5fc1c68890888..1edc2b9065e866 100644 --- a/dom/bindings/parser/tests/test_typedef.py +++ b/dom/bindings/parser/tests/test_typedef.py @@ -1,5 +1,6 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ typedef long mylong; typedef long? mynullablelong; interface Foo { @@ -8,22 +9,28 @@ def WebIDLTest(parser, harness): void bar(optional mynullablelong arg = null); void baz(mylong arg); }; - """) + """ + ) results = parser.finish() - harness.check(results[2].members[1].signatures()[0][1][0].type.name, "LongOrNull", - "Should expand typedefs") + harness.check( + results[2].members[1].signatures()[0][1][0].type.name, + "LongOrNull", + "Should expand typedefs", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef long? mynullablelong; interface Foo { void foo(mynullablelong? Y); }; - """) + """ + ) results = parser.finish() except: threw = True @@ -33,44 +40,55 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ typedef long? mynullablelong; interface Foo { const mynullablelong? X = 5; }; - """) + """ + ) results = parser.finish() except: threw = True harness.ok(threw, "Should have thrown on nullable inside nullable const.") - + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Foo { const mynullablelong? X = 5; }; typedef long? mynullablelong; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on nullable inside nullable const typedef " - "after interface.") + harness.ok( + threw, + "Should have thrown on nullable inside nullable const typedef " + "after interface.", + ) parser = parser.reset() - parser.parse(""" + parser.parse( + """ interface Foo { const mylong X = 5; }; typedef long mylong; - """) + """ + ) results = parser.finish() - harness.check(results[0].members[0].type.name, "Long", - "Should expand typedefs that come before interface") + harness.check( + results[0].members[0].type.name, + "Long", + "Should expand typedefs that come before interface", + ) diff --git a/dom/bindings/parser/tests/test_typedef_identifier_conflict.py b/dom/bindings/parser/tests/test_typedef_identifier_conflict.py index 0ea38ce437bf41..2aab3a8a91fd6d 100644 --- a/dom/bindings/parser/tests/test_typedef_identifier_conflict.py +++ b/dom/bindings/parser/tests/test_typedef_identifier_conflict.py @@ -5,12 +5,15 @@ def WebIDLTest(parser, harness): """ typedef long foo; typedef long foo; - """) + """ + ) results = parser.finish() except Exception as e: exception = e harness.ok(exception, "Should have thrown.") - harness.ok("Multiple unresolvable definitions of identifier 'foo'" in str(exception), - "Should have a sane exception message") + harness.ok( + "Multiple unresolvable definitions of identifier 'foo'" in str(exception), + "Should have a sane exception message", + ) diff --git a/dom/bindings/parser/tests/test_unenumerable_own_properties.py b/dom/bindings/parser/tests/test_unenumerable_own_properties.py index d28cc1ec052e22..b024d317492cf2 100644 --- a/dom/bindings/parser/tests/test_unenumerable_own_properties.py +++ b/dom/bindings/parser/tests/test_unenumerable_own_properties.py @@ -10,18 +10,21 @@ def WebIDLTest(parser, harness): interface Baz : Bar { getter long(DOMString name); }; - """); - results = parser.finish(); + """ + ) + results = parser.finish() harness.check(len(results), 3, "Should have three interfaces") parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyUnenumerableNamedProperties] interface NoNamedGetter { }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -31,12 +34,14 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyUnenumerableNamedProperties=Foo] interface ShouldNotHaveArg { getter long(DOMString name); }; - """) + """ + ) results = parser.finish() except Exception as x: @@ -46,7 +51,8 @@ def WebIDLTest(parser, harness): parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ [LegacyUnenumerableNamedProperties] interface Foo { getter long(DOMString name); @@ -56,7 +62,8 @@ def WebIDLTest(parser, harness): interface Baz : Bar { getter long(DOMString name); }; - """) + """ + ) results = parser.finish() except Exception as x: diff --git a/dom/bindings/parser/tests/test_unforgeable.py b/dom/bindings/parser/tests/test_unforgeable.py index 770a9d3736f7d0..cb5f71fe2141f1 100644 --- a/dom/bindings/parser/tests/test_unforgeable.py +++ b/dom/bindings/parser/tests/test_unforgeable.py @@ -1,143 +1,179 @@ def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface Child : Parent { }; interface Parent { [Unforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " "[Unforgeable] properties.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { const short foo = 10; }; interface Parent { [Unforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties even if we have a constant with " - "the same name.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[Unforgeable] properties even if we have a constant with " + "the same name.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { static attribute short foo; }; interface Parent { [Unforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties even if we have a static attribute " - "with the same name.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[Unforgeable] properties even if we have a static attribute " + "with the same name.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { static void foo(); }; interface Parent { [Unforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 2, - "Should be able to inherit from an interface with " - "[Unforgeable] properties even if we have a static operation " - "with the same name.") + harness.check( + len(results), + 2, + "Should be able to inherit from an interface with " + "[Unforgeable] properties even if we have a static operation " + "with the same name.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { void foo(); }; interface Parent { [Unforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable attribute on " - "parent with operation.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable attribute on " + "parent with operation.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { void foo(); }; interface Parent { [Unforgeable] void foo(); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable operation on " - "parent with operation.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable operation on " + "parent with operation.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { attribute short foo; }; interface Parent { [Unforgeable] readonly attribute long foo; }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable attribute on " - "parent with attribute.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable attribute on " + "parent with attribute.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { attribute short foo; }; interface Parent { [Unforgeable] void foo(); }; - """) + """ + ) results = parser.finish() except Exception as x: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable operation on " - "parent with attribute.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable operation on " + "parent with attribute.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface Child : Parent { }; interface Parent {}; @@ -145,17 +181,22 @@ def WebIDLTest(parser, harness): [Unforgeable] readonly attribute long foo; }; Parent includes Mixin; - """) + """ + ) results = parser.finish() - harness.check(len(results), 4, - "Should be able to inherit from an interface with a " - "mixin with [Unforgeable] properties.") + harness.check( + len(results), + 4, + "Should be able to inherit from an interface with a " + "mixin with [Unforgeable] properties.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { void foo(); }; @@ -164,20 +205,24 @@ def WebIDLTest(parser, harness): [Unforgeable] readonly attribute long foo; }; Parent includes Mixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when shadowing unforgeable attribute " - "of parent's consequential interface.") + harness.ok( + threw, + "Should have thrown when shadowing unforgeable attribute " + "of parent's consequential interface.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { }; interface Parent : GrandParent {}; @@ -190,20 +235,24 @@ def WebIDLTest(parser, harness): void foo(); }; Child includes ChildMixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when our consequential interface shadows unforgeable attribute " - "of ancestor's consequential interface.") + harness.ok( + threw, + "Should have thrown when our consequential interface shadows unforgeable attribute " + "of ancestor's consequential interface.", + ) - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface Child : Parent { }; interface Parent : GrandParent {}; @@ -216,35 +265,41 @@ def WebIDLTest(parser, harness): void foo(); }; Child includes ChildMixin; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown when our consequential interface shadows unforgeable operation " - "of ancestor's consequential interface.") + harness.ok( + threw, + "Should have thrown when our consequential interface shadows unforgeable operation " + "of ancestor's consequential interface.", + ) - parser = parser.reset(); - parser.parse(""" + parser = parser.reset() + parser.parse( + """ interface iface { [Unforgeable] attribute long foo; }; - """) + """ + ) results = parser.finish() - harness.check(len(results), 1, - "Should allow writable [Unforgeable] attribute.") + harness.check(len(results), 1, "Should allow writable [Unforgeable] attribute.") - parser = parser.reset(); + parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface iface { [Unforgeable] static readonly attribute long foo; }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_union.py b/dom/bindings/parser/tests/test_union.py index 801314fd0bd86c..5f948607275659 100644 --- a/dom/bindings/parser/tests/test_union.py +++ b/dom/bindings/parser/tests/test_union.py @@ -9,6 +9,7 @@ def chain(*iterables): for element in it: yield element + # We'd like to use itertools.combinations but it's 2.6 or higher. def combinations(iterable, r): # combinations('ABCD', 2) --> AB AC AD BC BD CD @@ -26,10 +27,11 @@ def combinations(iterable, r): else: return indices[i] += 1 - for j in range(i+1, r): - indices[j] = indices[j-1] + 1 + for j in range(i + 1, r): + indices[j] = indices[j - 1] + 1 yield tuple(pool[i] for i in indices) + # We'd like to use itertools.combinations_with_replacement but it's 2.7 or # higher. def combinations_with_replacement(iterable, r): @@ -49,27 +51,30 @@ def combinations_with_replacement(iterable, r): indices[i:] = [indices[i] + 1] * (r - i) yield tuple(pool[i] for i in indices) + def WebIDLTest(parser, harness): - types = ["float", - "double", - "short", - "unsigned short", - "long", - "unsigned long", - "long long", - "unsigned long long", - "boolean", - "byte", - "octet", - "DOMString", - "ByteString", - "USVString", - #"sequence", - "object", - "ArrayBuffer", - #"Date", - "TestInterface1", - "TestInterface2"] + types = [ + "float", + "double", + "short", + "unsigned short", + "long", + "unsigned long", + "long long", + "unsigned long long", + "boolean", + "byte", + "octet", + "DOMString", + "ByteString", + "USVString", + # "sequence", + "object", + "ArrayBuffer", + # "Date", + "TestInterface1", + "TestInterface2", + ] testPre = """ interface TestInterface1 { @@ -78,13 +83,18 @@ def WebIDLTest(parser, harness): }; """ - interface = testPre + """ + interface = ( + testPre + + """ interface PrepareForTest { """ + ) for (i, type) in enumerate(types): - interface += string.Template(""" + interface += string.Template( + """ readonly attribute ${type} attr${i}; - """).substitute(i=i, type=type) + """ + ).substitute(i=i, type=type) interface += """ }; """ @@ -98,8 +108,10 @@ def WebIDLTest(parser, harness): def typesAreDistinguishable(t): return all(u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2)) + def typesAreNotDistinguishable(t): return any(not u[0].isDistinguishableFrom(u[1]) for u in combinations(t, 2)) + def unionTypeName(t): if len(t) > 2: t[0:2] = [unionTypeName(t[0:2])] @@ -118,29 +130,40 @@ def unionTypes(typeCombinations, predicate): # as a string and the parsed IDL type. def invalidUnionWithUnion(typeCombinations): for c in typeCombinations: - if (typesAreNotDistinguishable((c[0][1], c[1][1])) and - typesAreDistinguishable((c[1][1], c[2][1])) and - typesAreDistinguishable((c[0][1], c[2][1]))): + if ( + typesAreNotDistinguishable((c[0][1], c[1][1])) + and typesAreDistinguishable((c[1][1], c[2][1])) + and typesAreDistinguishable((c[0][1], c[2][1])) + ): yield unionTypeName([t[0] for t in c]) # Create a list of tuples containing the name of the type as a string and # the parsed IDL type. types = zip(types, (a.type for a in iface.members)) - validUnionTypes = chain(unionTypes(combinations(types, 2), typesAreDistinguishable), - unionTypes(combinations(types, 3), typesAreDistinguishable)) - invalidUnionTypes = chain(unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable), - invalidUnionWithUnion(combinations(types, 3))) - interface = testPre + """ + validUnionTypes = chain( + unionTypes(combinations(types, 2), typesAreDistinguishable), + unionTypes(combinations(types, 3), typesAreDistinguishable), + ) + invalidUnionTypes = chain( + unionTypes(combinations_with_replacement(types, 2), typesAreNotDistinguishable), + invalidUnionWithUnion(combinations(types, 3)), + ) + interface = ( + testPre + + """ interface TestUnion { """ + ) for (i, type) in enumerate(validUnionTypes): - interface += string.Template(""" + interface += string.Template( + """ void method${i}(${type} arg); ${type} returnMethod${i}(); attribute ${type} attr${i}; void optionalMethod${i}(${type}? arg); - """).substitute(i=i, type=type) + """ + ).substitute(i=i, type=type) interface += """ }; """ @@ -150,11 +173,16 @@ def invalidUnionWithUnion(typeCombinations): parser = parser.reset() for invalid in invalidUnionTypes: - interface = testPre + string.Template(""" + interface = ( + testPre + + string.Template( + """ interface TestUnion { void method(${type} arg); }; - """).substitute(type=invalid) + """ + ).substitute(type=invalid) + ) threw = False try: diff --git a/dom/bindings/parser/tests/test_union_any.py b/dom/bindings/parser/tests/test_union_any.py index e34cadab470ef1..32c8154feb6c5f 100644 --- a/dom/bindings/parser/tests/test_union_any.py +++ b/dom/bindings/parser/tests/test_union_any.py @@ -1,11 +1,13 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface AnyNotInUnion { void foo((any or DOMString) arg); }; - """) + """ + ) results = parser.finish() except: diff --git a/dom/bindings/parser/tests/test_union_nullable.py b/dom/bindings/parser/tests/test_union_nullable.py index 08430a94a2ef4a..31f004bdffc946 100644 --- a/dom/bindings/parser/tests/test_union_nullable.py +++ b/dom/bindings/parser/tests/test_union_nullable.py @@ -1,53 +1,60 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface OneNullableInUnion { void foo((object? or DOMString?) arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Two nullable member types of a union should have thrown.") + harness.ok(threw, "Two nullable member types of a union should have thrown.") parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface NullableInNullableUnion { void foo((object? or DOMString)? arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "A nullable union type with a nullable member type should have " - "thrown.") + harness.ok( + threw, + "A nullable union type with a nullable member type should have " "thrown.", + ) parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface NullableInUnionNullableUnionHelper { }; interface NullableInUnionNullableUnion { void foo(((object? or DOMString) or NullableInUnionNullableUnionHelper)? arg); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "A nullable union type with a nullable member type should have " - "thrown.") + harness.ok( + threw, + "A nullable union type with a nullable member type should have " "thrown.", + ) diff --git a/dom/bindings/parser/tests/test_usvstring.py b/dom/bindings/parser/tests/test_usvstring.py index 3a1369abd02c98..effede391cb480 100644 --- a/dom/bindings/parser/tests/test_usvstring.py +++ b/dom/bindings/parser/tests/test_usvstring.py @@ -2,23 +2,27 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ interface TestUSVString { attribute USVString svs; }; - """) + """ + ) - results = parser.finish(); + results = parser.finish() harness.check(len(results), 1, "Should be one production") - harness.ok(isinstance(results[0], WebIDL.IDLInterface), - "Should be an IDLInterface") + harness.ok(isinstance(results[0], WebIDL.IDLInterface), "Should be an IDLInterface") iface = results[0] - harness.check(iface.identifier.QName(), "::TestUSVString", - "Interface has the right QName") - harness.check(iface.identifier.name, "TestUSVString", - "Interface has the right name") + harness.check( + iface.identifier.QName(), "::TestUSVString", "Interface has the right QName" + ) + harness.check( + iface.identifier.name, "TestUSVString", "Interface has the right name" + ) harness.check(iface.parent, None, "Interface has no parent") members = iface.members @@ -26,11 +30,11 @@ def WebIDLTest(parser, harness): attr = members[0] harness.ok(isinstance(attr, WebIDL.IDLAttribute), "Should be an IDLAttribute") - harness.check(attr.identifier.QName(), "::TestUSVString::svs", - "Attr has correct QName") + harness.check( + attr.identifier.QName(), "::TestUSVString::svs", "Attr has correct QName" + ) harness.check(attr.identifier.name, "svs", "Attr has correct name") - harness.check(str(attr.type), "USVString", - "Attr type is the correct name") + harness.check(str(attr.type), "USVString", "Attr type is the correct name") harness.ok(attr.type.isUSVString(), "Should be USVString type") harness.ok(attr.type.isString(), "Should be String collective type") harness.ok(not attr.type.isDOMString(), "Should be not be DOMString type") diff --git a/dom/bindings/parser/tests/test_variadic_callback.py b/dom/bindings/parser/tests/test_variadic_callback.py index d9a78db2043e76..3fd3dccd37ad8d 100644 --- a/dom/bindings/parser/tests/test_variadic_callback.py +++ b/dom/bindings/parser/tests/test_variadic_callback.py @@ -1,9 +1,12 @@ import WebIDL + def WebIDLTest(parser, harness): - parser.parse(""" + parser.parse( + """ callback TestVariadicCallback = any(any... arguments); - """) + """ + ) results = parser.finish() diff --git a/dom/bindings/parser/tests/test_variadic_constraints.py b/dom/bindings/parser/tests/test_variadic_constraints.py index 7448e40d5a97cb..1ada53334b75d1 100644 --- a/dom/bindings/parser/tests/test_variadic_constraints.py +++ b/dom/bindings/parser/tests/test_variadic_constraints.py @@ -1,61 +1,72 @@ def WebIDLTest(parser, harness): threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints1 { void foo(byte... arg1, byte arg2); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on variadic argument followed by required " - "argument.") + harness.ok( + threw, + "Should have thrown on variadic argument followed by required " "argument.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints2 { void foo(byte... arg1, optional byte arg2); }; - """) - results = parser.finish(); + """ + ) + results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on variadic argument followed by optional " - "argument.") + harness.ok( + threw, + "Should have thrown on variadic argument followed by optional " "argument.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints3 { void foo(optional byte... arg1); }; - """) + """ + ) results = parser.finish() except: threw = True - harness.ok(threw, - "Should have thrown on variadic argument explicitly flagged as " - "optional.") + harness.ok( + threw, + "Should have thrown on variadic argument explicitly flagged as " "optional.", + ) parser = parser.reset() threw = False try: - parser.parse(""" + parser.parse( + """ interface VariadicConstraints4 { void foo(byte... arg1 = 0); }; - """) + """ + ) results = parser.finish() except: threw = True diff --git a/dom/bindings/test/moz.build b/dom/bindings/test/moz.build index 9bf000f8e96bc1..ad11af45c02c73 100644 --- a/dom/bindings/test/moz.build +++ b/dom/bindings/test/moz.build @@ -4,70 +4,70 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DEFINES['IMPL_LIBXUL'] = True -DEFINES['MOZILLA_INTERNAL_API'] = True +DEFINES["IMPL_LIBXUL"] = True +DEFINES["MOZILLA_INTERNAL_API"] = True # Do NOT export this library. We don't actually want our test code # being added to libxul or anything. -Library('dombindings_test_s') +Library("dombindings_test_s") EXTRA_COMPONENTS += [ - 'TestInterfaceJS.js', - 'TestInterfaceJS.manifest', - 'TestInterfaceJSMaplike.js' + "TestInterfaceJS.js", + "TestInterfaceJS.manifest", + "TestInterfaceJSMaplike.js", ] -MOCHITEST_MANIFESTS += ['mochitest.ini'] +MOCHITEST_MANIFESTS += ["mochitest.ini"] -MOCHITEST_CHROME_MANIFESTS += ['chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["chrome.ini"] TEST_WEBIDL_FILES += [ - 'TestDictionary.webidl', - 'TestJSImplInheritanceGen.webidl', - 'TestTypedef.webidl', + "TestDictionary.webidl", + "TestJSImplInheritanceGen.webidl", + "TestTypedef.webidl", ] PREPROCESSED_TEST_WEBIDL_FILES += [ - 'TestCodeGen.webidl', - 'TestExampleGen.webidl', - 'TestJSImplGen.webidl', + "TestCodeGen.webidl", + "TestExampleGen.webidl", + "TestJSImplGen.webidl", ] WEBIDL_EXAMPLE_INTERFACES += [ - 'TestExampleInterface', - 'TestExampleProxyInterface', - 'TestExampleThrowingConstructorInterface', - 'TestExampleWorkerInterface', + "TestExampleInterface", + "TestExampleProxyInterface", + "TestExampleThrowingConstructorInterface", + "TestExampleWorkerInterface", ] # Bug 932082 tracks having bindings use namespaced includes. LOCAL_INCLUDES += [ - '!/dist/include/mozilla/dom', + "!/dist/include/mozilla/dom", ] LOCAL_INCLUDES += [ - '!..', - '/dom/bindings', - '/js/xpconnect/src', - '/js/xpconnect/wrappers', + "!..", + "/dom/bindings", + "/js/xpconnect/src", + "/js/xpconnect/wrappers", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['MOZ_DEBUG']: +if CONFIG["MOZ_DEBUG"]: XPIDL_SOURCES += [ - 'mozITestInterfaceJS.idl', + "mozITestInterfaceJS.idl", ] - XPIDL_MODULE = 'dom_bindings_test' + XPIDL_MODULE = "dom_bindings_test" # Because we don't actually link this code anywhere, we don't care about # their optimization level, so don't waste time on optimization. -if CONFIG['CXX_TYPE'] == 'clang-cl': - CXXFLAGS += ['-Od'] +if CONFIG["CXX_TYPE"] == "clang-cl": + CXXFLAGS += ["-Od"] else: - CXXFLAGS += ['-O0'] + CXXFLAGS += ["-O0"] diff --git a/dom/broadcastchannel/moz.build b/dom/broadcastchannel/moz.build index d7ce7d4833cc01..a36176af8a0adb 100644 --- a/dom/broadcastchannel/moz.build +++ b/dom/broadcastchannel/moz.build @@ -8,23 +8,23 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: postMessage") EXPORTS.mozilla.dom += [ - 'BroadcastChannel.h', + "BroadcastChannel.h", ] UNIFIED_SOURCES += [ - 'BroadcastChannel.cpp', - 'BroadcastChannelChild.cpp', - 'BroadcastChannelParent.cpp', - 'BroadcastChannelService.cpp', + "BroadcastChannel.cpp", + "BroadcastChannelChild.cpp", + "BroadcastChannelParent.cpp", + "BroadcastChannelService.cpp", ] IPDL_SOURCES += [ - 'PBroadcastChannel.ipdl', + "PBroadcastChannel.ipdl", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/browser.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/browser-element/moz.build b/dom/browser-element/moz.build index e1095697682e46..1f83fbd436dae3 100644 --- a/dom/browser-element/moz.build +++ b/dom/browser-element/moz.build @@ -8,30 +8,30 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIBrowserElementAPI.idl', + "nsIBrowserElementAPI.idl", ] -XPIDL_MODULE = 'browser-element' +XPIDL_MODULE = "browser-element" EXTRA_JS_MODULES += [ - 'BrowserElementParent.jsm', - 'BrowserElementPromptService.jsm', + "BrowserElementParent.jsm", + "BrowserElementPromptService.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '/dom/html', + "/dom/html", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/', - '/dom/base', - '/dom/ipc', + "/dom/", + "/dom/base", + "/dom/ipc", ] diff --git a/dom/cache/moz.build b/dom/cache/moz.build index cfbc982fd4ae66..73612f2fae8421 100644 --- a/dom/cache/moz.build +++ b/dom/cache/moz.build @@ -9,94 +9,94 @@ with Files("**"): BUG_COMPONENT = ("Core", "Storage: Cache API") EXPORTS.mozilla.dom.cache += [ - 'Action.h', - 'ActorChild.h', - 'ActorUtils.h', - 'AutoUtils.h', - 'Cache.h', - 'CacheChild.h', - 'CacheCommon.h', - 'CacheOpChild.h', - 'CacheOpParent.h', - 'CacheParent.h', - 'CacheStorage.h', - 'CacheStorageChild.h', - 'CacheStorageParent.h', - 'CacheStreamControlChild.h', - 'CacheStreamControlParent.h', - 'CacheWorkerRef.h', - 'Connection.h', - 'Context.h', - 'DBAction.h', - 'DBSchema.h', - 'FileUtils.h', - 'IPCUtils.h', - 'Manager.h', - 'ManagerId.h', - 'PrincipalVerifier.h', - 'QuotaClient.h', - 'ReadStream.h', - 'SavedTypes.h', - 'StreamControl.h', - 'StreamList.h', - 'Types.h', - 'TypeUtils.h', + "Action.h", + "ActorChild.h", + "ActorUtils.h", + "AutoUtils.h", + "Cache.h", + "CacheChild.h", + "CacheCommon.h", + "CacheOpChild.h", + "CacheOpParent.h", + "CacheParent.h", + "CacheStorage.h", + "CacheStorageChild.h", + "CacheStorageParent.h", + "CacheStreamControlChild.h", + "CacheStreamControlParent.h", + "CacheWorkerRef.h", + "Connection.h", + "Context.h", + "DBAction.h", + "DBSchema.h", + "FileUtils.h", + "IPCUtils.h", + "Manager.h", + "ManagerId.h", + "PrincipalVerifier.h", + "QuotaClient.h", + "ReadStream.h", + "SavedTypes.h", + "StreamControl.h", + "StreamList.h", + "Types.h", + "TypeUtils.h", ] UNIFIED_SOURCES += [ - 'Action.cpp', - 'ActorChild.cpp', - 'AutoUtils.cpp', - 'Cache.cpp', - 'CacheChild.cpp', - 'CacheCommon.cpp', - 'CacheOpChild.cpp', - 'CacheOpParent.cpp', - 'CacheParent.cpp', - 'CacheStorage.cpp', - 'CacheStorageChild.cpp', - 'CacheStorageParent.cpp', - 'CacheStreamControlChild.cpp', - 'CacheStreamControlParent.cpp', - 'CacheWorkerRef.cpp', - 'Connection.cpp', - 'Context.cpp', - 'DBAction.cpp', - 'DBSchema.cpp', - 'FileUtils.cpp', - 'Manager.cpp', - 'ManagerId.cpp', - 'PrincipalVerifier.cpp', - 'QuotaClient.cpp', - 'ReadStream.cpp', - 'StreamControl.cpp', - 'StreamList.cpp', - 'TypeUtils.cpp', + "Action.cpp", + "ActorChild.cpp", + "AutoUtils.cpp", + "Cache.cpp", + "CacheChild.cpp", + "CacheCommon.cpp", + "CacheOpChild.cpp", + "CacheOpParent.cpp", + "CacheParent.cpp", + "CacheStorage.cpp", + "CacheStorageChild.cpp", + "CacheStorageParent.cpp", + "CacheStreamControlChild.cpp", + "CacheStreamControlParent.cpp", + "CacheWorkerRef.cpp", + "Connection.cpp", + "Context.cpp", + "DBAction.cpp", + "DBSchema.cpp", + "FileUtils.cpp", + "Manager.cpp", + "ManagerId.cpp", + "PrincipalVerifier.cpp", + "QuotaClient.cpp", + "ReadStream.cpp", + "StreamControl.cpp", + "StreamList.cpp", + "TypeUtils.cpp", ] IPDL_SOURCES += [ - 'CacheTypes.ipdlh', - 'PCache.ipdl', - 'PCacheOp.ipdl', - 'PCacheStorage.ipdl', - 'PCacheStreamControl.ipdl', + "CacheTypes.ipdlh", + "PCache.ipdl", + "PCacheOp.ipdl", + "PCacheStorage.ipdl", + "PCacheStreamControl.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" MOCHITEST_MANIFESTS += [ - 'test/mochitest/mochitest.ini', + "test/mochitest/mochitest.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'test/browser/browser.ini', + "test/browser/browser.ini", ] XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini', + "test/xpcshell/xpcshell.ini", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/canvas/moz.build b/dom/canvas/moz.build index 94d61acefcf886..dfb86caef5cf8c 100644 --- a/dom/canvas/moz.build +++ b/dom/canvas/moz.build @@ -4,223 +4,223 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Canvas: 2D') +with Files("**"): + BUG_COMPONENT = ("Core", "Canvas: 2D") -with Files('TexUnpackBlob.cpp'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("TexUnpackBlob.cpp"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") -with Files('WebGL*'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("WebGL*"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") -with Files('test/webgl-conf/**'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("test/webgl-conf/**"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") -with Files('test/webgl-mochitest/**'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("test/webgl-mochitest/**"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") -with Files('test/reftest/webgl*'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("test/reftest/webgl*"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") -with Files('test/chrome/*webgl*'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("test/chrome/*webgl*"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") -with Files('test/crossorigin/*webgl*'): - BUG_COMPONENT = ('Core', 'Canvas: WebGL') +with Files("test/crossorigin/*webgl*"): + BUG_COMPONENT = ("Core", "Canvas: WebGL") # Change the following line(s) to avoid bug 1081323 (clobber after changing a manifest): # * Adjust failure errata for webgl-conf. MOCHITEST_MANIFESTS += [ - 'test/crash/mochitest.ini', - 'test/crossorigin/mochitest.ini', - 'test/mochitest.ini', - 'test/webgl-conf/generated-mochitest.ini', - 'test/webgl-mochitest/mochitest.ini', + "test/crash/mochitest.ini", + "test/crossorigin/mochitest.ini", + "test/mochitest.ini", + "test/webgl-conf/generated-mochitest.ini", + "test/webgl-mochitest/mochitest.ini", ] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome/chrome.ini"] EXPORTS += [ - 'nsICanvasRenderingContextInternal.h', + "nsICanvasRenderingContextInternal.h", ] EXPORTS.mozilla.dom += [ - 'BasicRenderingContext2D.h', - 'CanvasGradient.h', - 'CanvasPath.h', - 'CanvasPattern.h', - 'CanvasRenderingContext2D.h', - 'CanvasRenderingContextHelper.h', - 'CanvasUtils.h', - 'GeneratePlaceholderCanvasData.h', - 'ImageBitmap.h', - 'ImageBitmapRenderingContext.h', - 'ImageBitmapSource.h', - 'ImageData.h', - 'ImageUtils.h', - 'IpdlQueue.h', - 'OffscreenCanvas.h', - 'ProducerConsumerQueue.h', - 'QueueParamTraits.h', - 'TextMetrics.h', - 'WebGLChild.h', - 'WebGLCommandQueue.h', - 'WebGLCrossProcessCommandQueue.h', - 'WebGLIpdl.h', - 'WebGLParent.h', - 'WebGLTypes.h', - 'XRWebGLLayer.h', + "BasicRenderingContext2D.h", + "CanvasGradient.h", + "CanvasPath.h", + "CanvasPattern.h", + "CanvasRenderingContext2D.h", + "CanvasRenderingContextHelper.h", + "CanvasUtils.h", + "GeneratePlaceholderCanvasData.h", + "ImageBitmap.h", + "ImageBitmapRenderingContext.h", + "ImageBitmapSource.h", + "ImageData.h", + "ImageUtils.h", + "IpdlQueue.h", + "OffscreenCanvas.h", + "ProducerConsumerQueue.h", + "QueueParamTraits.h", + "TextMetrics.h", + "WebGLChild.h", + "WebGLCommandQueue.h", + "WebGLCrossProcessCommandQueue.h", + "WebGLIpdl.h", + "WebGLParent.h", + "WebGLTypes.h", + "XRWebGLLayer.h", ] # XRWebGLLayer.h must be exported for use by the generated WebXRBinding.h # Canvas 2D and common sources UNIFIED_SOURCES += [ - 'CanvasGradient.cpp', - 'CanvasImageCache.cpp', - 'CanvasPattern.cpp', - 'CanvasRenderingContext2D.cpp', - 'CanvasRenderingContextHelper.cpp', - 'CanvasUtils.cpp', - 'ImageBitmap.cpp', - 'ImageBitmapRenderingContext.cpp', - 'ImageData.cpp', - 'nsICanvasRenderingContextInternal.cpp', - 'OffscreenCanvas.cpp', - 'XRWebGLLayer.cpp', + "CanvasGradient.cpp", + "CanvasImageCache.cpp", + "CanvasPattern.cpp", + "CanvasRenderingContext2D.cpp", + "CanvasRenderingContextHelper.cpp", + "CanvasUtils.cpp", + "ImageBitmap.cpp", + "ImageBitmapRenderingContext.cpp", + "ImageData.cpp", + "nsICanvasRenderingContextInternal.cpp", + "OffscreenCanvas.cpp", + "XRWebGLLayer.cpp", ] SOURCES += [ - 'ImageUtils.cpp', + "ImageUtils.cpp", ] # WebGL Sources UNIFIED_SOURCES += [ - 'CacheInvalidator.cpp', - 'ClientWebGLContext.cpp', - 'ClientWebGLExtensions.cpp', - 'HostWebGLContext.cpp', - 'Queue.cpp', - 'TexUnpackBlob.cpp', - 'WebGL2Context.cpp', - 'WebGL2ContextBuffers.cpp', - 'WebGL2ContextFramebuffers.cpp', - 'WebGL2ContextMRTs.cpp', - 'WebGL2ContextQueries.cpp', - 'WebGL2ContextRenderbuffers.cpp', - 'WebGL2ContextSamplers.cpp', - 'WebGL2ContextState.cpp', - 'WebGL2ContextSync.cpp', - 'WebGL2ContextTransformFeedback.cpp', - 'WebGL2ContextUniforms.cpp', - 'WebGLBuffer.cpp', - 'WebGLChild.cpp', - 'WebGLContext.cpp', - 'WebGLContextBuffers.cpp', - 'WebGLContextDraw.cpp', - 'WebGLContextExtensions.cpp', - 'WebGLContextFramebufferOperations.cpp', - 'WebGLContextGL.cpp', - 'WebGLContextLossHandler.cpp', - 'WebGLContextState.cpp', - 'WebGLContextTextures.cpp', - 'WebGLContextUtils.cpp', - 'WebGLContextValidate.cpp', - 'WebGLContextVertexArray.cpp', - 'WebGLContextVertices.cpp', - 'WebGLCrossProcessCommandQueue.cpp', - 'WebGLExtensionColorBufferFloat.cpp', - 'WebGLExtensionColorBufferHalfFloat.cpp', - 'WebGLExtensionCompressedTextureASTC.cpp', - 'WebGLExtensionCompressedTextureBPTC.cpp', - 'WebGLExtensionCompressedTextureES3.cpp', - 'WebGLExtensionCompressedTextureETC1.cpp', - 'WebGLExtensionCompressedTexturePVRTC.cpp', - 'WebGLExtensionCompressedTextureRGTC.cpp', - 'WebGLExtensionCompressedTextureS3TC.cpp', - 'WebGLExtensionCompressedTextureS3TC_SRGB.cpp', - 'WebGLExtensionDepthTexture.cpp', - 'WebGLExtensionDisjointTimerQuery.cpp', - 'WebGLExtensionDrawBuffers.cpp', - 'WebGLExtensionEXTColorBufferFloat.cpp', - 'WebGLExtensionFragDepth.cpp', - 'WebGLExtensionInstancedArrays.cpp', - 'WebGLExtensions.cpp', - 'WebGLExtensionShaderTextureLod.cpp', - 'WebGLExtensionSRGB.cpp', - 'WebGLExtensionTextureFloat.cpp', - 'WebGLExtensionTextureFloatLinear.cpp', - 'WebGLExtensionTextureHalfFloat.cpp', - 'WebGLExtensionTextureHalfFloatLinear.cpp', - 'WebGLFormats.cpp', - 'WebGLFramebuffer.cpp', - 'WebGLMemoryTracker.cpp', - 'WebGLParent.cpp', - 'WebGLProgram.cpp', - 'WebGLQuery.cpp', - 'WebGLRenderbuffer.cpp', - 'WebGLSampler.cpp', - 'WebGLShader.cpp', - 'WebGLShaderValidator.cpp', - 'WebGLSync.cpp', - 'WebGLTexelConversions.cpp', - 'WebGLTexture.cpp', - 'WebGLTextureUpload.cpp', - 'WebGLTransformFeedback.cpp', - 'WebGLValidateStrings.cpp', - 'WebGLVertexArray.cpp', - 'WebGLVertexArrayFake.cpp', - 'WebGLVertexArrayGL.cpp', + "CacheInvalidator.cpp", + "ClientWebGLContext.cpp", + "ClientWebGLExtensions.cpp", + "HostWebGLContext.cpp", + "Queue.cpp", + "TexUnpackBlob.cpp", + "WebGL2Context.cpp", + "WebGL2ContextBuffers.cpp", + "WebGL2ContextFramebuffers.cpp", + "WebGL2ContextMRTs.cpp", + "WebGL2ContextQueries.cpp", + "WebGL2ContextRenderbuffers.cpp", + "WebGL2ContextSamplers.cpp", + "WebGL2ContextState.cpp", + "WebGL2ContextSync.cpp", + "WebGL2ContextTransformFeedback.cpp", + "WebGL2ContextUniforms.cpp", + "WebGLBuffer.cpp", + "WebGLChild.cpp", + "WebGLContext.cpp", + "WebGLContextBuffers.cpp", + "WebGLContextDraw.cpp", + "WebGLContextExtensions.cpp", + "WebGLContextFramebufferOperations.cpp", + "WebGLContextGL.cpp", + "WebGLContextLossHandler.cpp", + "WebGLContextState.cpp", + "WebGLContextTextures.cpp", + "WebGLContextUtils.cpp", + "WebGLContextValidate.cpp", + "WebGLContextVertexArray.cpp", + "WebGLContextVertices.cpp", + "WebGLCrossProcessCommandQueue.cpp", + "WebGLExtensionColorBufferFloat.cpp", + "WebGLExtensionColorBufferHalfFloat.cpp", + "WebGLExtensionCompressedTextureASTC.cpp", + "WebGLExtensionCompressedTextureBPTC.cpp", + "WebGLExtensionCompressedTextureES3.cpp", + "WebGLExtensionCompressedTextureETC1.cpp", + "WebGLExtensionCompressedTexturePVRTC.cpp", + "WebGLExtensionCompressedTextureRGTC.cpp", + "WebGLExtensionCompressedTextureS3TC.cpp", + "WebGLExtensionCompressedTextureS3TC_SRGB.cpp", + "WebGLExtensionDepthTexture.cpp", + "WebGLExtensionDisjointTimerQuery.cpp", + "WebGLExtensionDrawBuffers.cpp", + "WebGLExtensionEXTColorBufferFloat.cpp", + "WebGLExtensionFragDepth.cpp", + "WebGLExtensionInstancedArrays.cpp", + "WebGLExtensions.cpp", + "WebGLExtensionShaderTextureLod.cpp", + "WebGLExtensionSRGB.cpp", + "WebGLExtensionTextureFloat.cpp", + "WebGLExtensionTextureFloatLinear.cpp", + "WebGLExtensionTextureHalfFloat.cpp", + "WebGLExtensionTextureHalfFloatLinear.cpp", + "WebGLFormats.cpp", + "WebGLFramebuffer.cpp", + "WebGLMemoryTracker.cpp", + "WebGLParent.cpp", + "WebGLProgram.cpp", + "WebGLQuery.cpp", + "WebGLRenderbuffer.cpp", + "WebGLSampler.cpp", + "WebGLShader.cpp", + "WebGLShaderValidator.cpp", + "WebGLSync.cpp", + "WebGLTexelConversions.cpp", + "WebGLTexture.cpp", + "WebGLTextureUpload.cpp", + "WebGLTransformFeedback.cpp", + "WebGLValidateStrings.cpp", + "WebGLVertexArray.cpp", + "WebGLVertexArrayFake.cpp", + "WebGLVertexArrayGL.cpp", ] SOURCES += [ - 'MurmurHash3.cpp', + "MurmurHash3.cpp", ] IPDL_SOURCES += [ - 'PWebGL.ipdl', + "PWebGL.ipdl", ] # Suppress warnings from third-party code. -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - SOURCES['MurmurHash3.cpp'].flags += ['-Wno-implicit-fallthrough'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + SOURCES["MurmurHash3.cpp"].flags += ["-Wno-implicit-fallthrough"] LOCAL_INCLUDES += [ - '/js/xpconnect/wrappers', + "/js/xpconnect/wrappers", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -USE_LIBS += [ 'translator' ] # Grab the Angle shader translator. +USE_LIBS += ["translator"] # Grab the Angle shader translator. -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', - '/dom/html', - '/dom/svg', - '/dom/workers', - '/dom/xul', - '/gfx/angle/checkout/include', - '/gfx/gl', - '/image', - '/js/xpconnect/src', - '/layout/generic', - '/layout/style', - '/layout/xul', - '/media/libyuv/libyuv/include', + "/dom/base", + "/dom/html", + "/dom/svg", + "/dom/workers", + "/dom/xul", + "/gfx/angle/checkout/include", + "/gfx/gl", + "/image", + "/js/xpconnect/src", + "/layout/generic", + "/layout/style", + "/layout/xul", + "/media/libyuv/libyuv/include", ] -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['CC_TYPE'] in ('clang', 'clang-cl'): - CXXFLAGS += ['-Werror=implicit-int-conversion'] +if CONFIG["CC_TYPE"] in ("clang", "clang-cl"): + CXXFLAGS += ["-Werror=implicit-int-conversion"] -if CONFIG['CC_TYPE'] == 'gcc': - CXXFLAGS += ['-Wno-error=unused-result'] # GCC doesn't ignore (void)MustUse(); +if CONFIG["CC_TYPE"] == "gcc": + CXXFLAGS += ["-Wno-error=unused-result"] # GCC doesn't ignore (void)MustUse(); diff --git a/dom/canvas/test/webgl-conf/generate-wrappers-and-manifest.py b/dom/canvas/test/webgl-conf/generate-wrappers-and-manifest.py index 9774831b048f79..adc81ec1eaaff4 100755 --- a/dom/canvas/test/webgl-conf/generate-wrappers-and-manifest.py +++ b/dom/canvas/test/webgl-conf/generate-wrappers-and-manifest.py @@ -12,88 +12,92 @@ import shutil # All paths in this file are based where this file is run. -WRAPPER_TEMPLATE_FILE = 'mochi-wrapper.html.template' -MANIFEST_TEMPLATE_FILE = 'mochitest.ini.template' -ERRATA_FILE = 'mochitest-errata.ini' -DEST_MANIFEST_PATHSTR = 'generated-mochitest.ini' +WRAPPER_TEMPLATE_FILE = "mochi-wrapper.html.template" +MANIFEST_TEMPLATE_FILE = "mochitest.ini.template" +ERRATA_FILE = "mochitest-errata.ini" +DEST_MANIFEST_PATHSTR = "generated-mochitest.ini" -BASE_TEST_LIST_PATHSTR = 'checkout/00_test_list.txt' -GENERATED_PATHSTR = 'generated' -WEBGL2_TEST_MANGLE = '2_' -PATH_SEP_MANGLING = '__' +BASE_TEST_LIST_PATHSTR = "checkout/00_test_list.txt" +GENERATED_PATHSTR = "generated" +WEBGL2_TEST_MANGLE = "2_" +PATH_SEP_MANGLING = "__" SUPPORT_DIRS = [ - 'checkout', + "checkout", ] EXTRA_SUPPORT_FILES = [ - 'always-fail.html', - 'iframe-passthrough.css', - 'mochi-single.html', + "always-fail.html", + "iframe-passthrough.css", + "mochi-single.html", ] -ACCEPTABLE_ERRATA_KEYS = set([ - 'fail-if', - 'skip-if', -]) +ACCEPTABLE_ERRATA_KEYS = set( + [ + "fail-if", + "skip-if", + ] +) def ChooseSubsuite(name): # name: generated/test_2_conformance2__vertex_arrays__vertex-array-object.html - split = name.split('__') + split = name.split("__") - version = '1' - if '/test_2_' in split[0]: - version = '2' + version = "1" + if "/test_2_" in split[0]: + version = "2" - category = 'core' + category = "core" - split[0] = split[0].split('/')[1] - if 'deqp' in split[0]: - if version == '1': + split[0] = split[0].split("/")[1] + if "deqp" in split[0]: + if version == "1": # There's few enough that we'll just merge them with webgl1-ext. - category = 'ext' + category = "ext" else: - category = 'deqp' - elif 'conformance' in split[0]: - if split[1] in ('glsl', 'glsl3', 'ogles'): - category = 'ext' - elif split[1] == 'textures' and split[2] != 'misc': - category = 'ext' + category = "deqp" + elif "conformance" in split[0]: + if split[1] in ("glsl", "glsl3", "ogles"): + category = "ext" + elif split[1] == "textures" and split[2] != "misc": + category = "ext" + + return "webgl{}-{}".format(version, category) - return 'webgl{}-{}'.format(version, category) ######################################################################## # GetTestList def GetTestList(): - split = BASE_TEST_LIST_PATHSTR.rsplit('/', 1) - basePath = '.' + split = BASE_TEST_LIST_PATHSTR.rsplit("/", 1) + basePath = "." testListFile = split[-1] if len(split) == 2: basePath = split[0] allowWebGL1 = True allowWebGL2 = True - alwaysFailEntry = TestEntry('always-fail.html', True, False) + alwaysFailEntry = TestEntry("always-fail.html", True, False) testList = [alwaysFailEntry] AccumTests(basePath, testListFile, allowWebGL1, allowWebGL2, testList) for x in testList: - x.path = os.path.relpath(x.path, basePath).replace(os.sep, '/') + x.path = os.path.relpath(x.path, basePath).replace(os.sep, "/") continue return testList + ############################## # Internals def IsVersionLess(a, b): - aSplit = [int(x) for x in a.split('.')] - bSplit = [int(x) for x in b.split('.')] + aSplit = [int(x) for x in a.split(".")] + bSplit = [int(x) for x in b.split(".")] while len(aSplit) < len(bSplit): aSplit.append(0) @@ -122,12 +126,12 @@ def __init__(self, path, webgl1, webgl2): def AccumTests(pathStr, listFile, allowWebGL1, allowWebGL2, out_testList): - listPathStr = pathStr + '/' + listFile + listPathStr = pathStr + "/" + listFile - listPath = listPathStr.replace('/', os.sep) - assert os.path.exists(listPath), 'Bad `listPath`: ' + listPath + listPath = listPathStr.replace("/", os.sep) + assert os.path.exists(listPath), "Bad `listPath`: " + listPath - with open(listPath, 'r') as fIn: + with open(listPath, "r") as fIn: lineNum = 0 for line in fIn: lineNum += 1 @@ -137,59 +141,61 @@ def AccumTests(pathStr, listFile, allowWebGL1, allowWebGL2, out_testList): continue curLine = line.lstrip() - if curLine.startswith('//'): + if curLine.startswith("//"): continue - if curLine.startswith('#'): + if curLine.startswith("#"): continue webgl1 = allowWebGL1 webgl2 = allowWebGL2 - while curLine.startswith('--'): # '--min-version 1.0.2 foo.html' - (flag, curLine) = curLine.split(' ', 1) - if flag == '--min-version': - (minVersion, curLine) = curLine.split(' ', 1) + while curLine.startswith("--"): # '--min-version 1.0.2 foo.html' + (flag, curLine) = curLine.split(" ", 1) + if flag == "--min-version": + (minVersion, curLine) = curLine.split(" ", 1) if not IsVersionLess(minVersion, "2.0.0"): # >= 2.0.0 webgl1 = False break - elif flag == '--max-version': - (maxVersion, curLine) = curLine.split(' ', 1) + elif flag == "--max-version": + (maxVersion, curLine) = curLine.split(" ", 1) if IsVersionLess(maxVersion, "2.0.0"): webgl2 = False break - elif flag == '--slow': + elif flag == "--slow": continue # TODO else: - text = 'Unknown flag \'{}\': {}:{}: {}'.format(flag, listPath, - lineNum, line) + text = "Unknown flag '{}': {}:{}: {}".format( + flag, listPath, lineNum, line + ) assert False, text continue - assert(webgl1 or webgl2) + assert webgl1 or webgl2 - split = curLine.rsplit('.', 1) - assert len(split) == 2, 'Bad split for `line`: ' + line + split = curLine.rsplit(".", 1) + assert len(split) == 2, "Bad split for `line`: " + line (name, ext) = split - if ext == 'html': - newTestFilePathStr = pathStr + '/' + curLine + if ext == "html": + newTestFilePathStr = pathStr + "/" + curLine entry = TestEntry(newTestFilePathStr, webgl1, webgl2) out_testList.append(entry) continue - assert ext == 'txt', 'Bad `ext` on `line`: ' + line + assert ext == "txt", "Bad `ext` on `line`: " + line - split = curLine.rsplit('/', 1) + split = curLine.rsplit("/", 1) nextListFile = split[-1] - nextPathStr = '' + nextPathStr = "" if len(split) != 1: nextPathStr = split[0] - nextPathStr = pathStr + '/' + nextPathStr + nextPathStr = pathStr + "/" + nextPathStr AccumTests(nextPathStr, nextListFile, webgl1, webgl2, out_testList) continue return + ######################################################################## # Templates @@ -201,36 +207,37 @@ def FillTemplate(inFilePath, templateDict, outFilePath): def ImportTemplate(inFilePath): - with open(inFilePath, 'r') as f: + with open(inFilePath, "r") as f: return TemplateShell(f) def OutputFilledTemplate(templateShell, templateDict, outFilePath): spanStrList = templateShell.Fill(templateDict) - with open(outFilePath, 'w', newline='\n') as f: + with open(outFilePath, "w", newline="\n") as f: f.writelines(spanStrList) return + ############################## # Internals def WrapWithIndent(lines, indentLen): - split = lines.split('\n') + split = lines.split("\n") if len(split) == 1: return lines ret = [split[0]] - indentSpaces = ' ' * indentLen + indentSpaces = " " * indentLen for line in split[1:]: ret.append(indentSpaces + line) - return '\n'.join(ret) + return "\n".join(ret) -templateRE = re.compile('(%%.*?%%)') -assert templateRE.split(' foo = %%BAR%%;') == [' foo = ', '%%BAR%%', ';'] +templateRE = re.compile("(%%.*?%%)") +assert templateRE.split(" foo = %%BAR%%;") == [" foo = ", "%%BAR%%", ";"] class TemplateShellSpan: @@ -238,7 +245,7 @@ def __init__(self, span): self.span = span self.isLiteralSpan = True - if self.span.startswith('%%') and self.span.endswith('%%'): + if self.span.startswith("%%") and self.span.endswith("%%"): self.isLiteralSpan = False self.span = self.span[2:-2] @@ -248,7 +255,7 @@ def Fill(self, templateDict, indentLen): if self.isLiteralSpan: return self.span - assert self.span in templateDict, '\'' + self.span + '\' not in dict!' + assert self.span in templateDict, "'" + self.span + "' not in dict!" filling = templateDict[self.span] @@ -263,13 +270,13 @@ def __init__(self, iterableLines): split = templateRE.split(line) for cur in split: - isTemplateSpan = cur.startswith('%%') and cur.endswith('%%') + isTemplateSpan = cur.startswith("%%") and cur.endswith("%%") if not isTemplateSpan: curLiteralSpan.append(cur) continue if curLiteralSpan: - span = ''.join(curLiteralSpan) + span = "".join(curLiteralSpan) span = TemplateShellSpan(span) spanList.append(span) curLiteralSpan = [] @@ -282,7 +289,7 @@ def __init__(self, iterableLines): continue if curLiteralSpan: - span = ''.join(curLiteralSpan) + span = "".join(curLiteralSpan) span = TemplateShellSpan(span) spanList.append(span) @@ -300,7 +307,7 @@ def Fill(self, templateDict): # Get next `indentLen`. try: - lineStartPos = span.rindex('\n') + 1 + lineStartPos = span.rindex("\n") + 1 # let span = 'foo\nbar' # len(span) is 7 @@ -312,35 +319,36 @@ def Fill(self, templateDict): return ret + ######################################################################## # Output def IsWrapperWebGL2(wrapperPath): - return wrapperPath.startswith(GENERATED_PATHSTR + '/test_' + WEBGL2_TEST_MANGLE) + return wrapperPath.startswith(GENERATED_PATHSTR + "/test_" + WEBGL2_TEST_MANGLE) def WriteWrapper(entryPath, webgl2, templateShell, wrapperPathAccum): - mangledPath = entryPath.replace('/', PATH_SEP_MANGLING) - maybeWebGL2Mangle = '' + mangledPath = entryPath.replace("/", PATH_SEP_MANGLING) + maybeWebGL2Mangle = "" if webgl2: maybeWebGL2Mangle = WEBGL2_TEST_MANGLE # Mochitests must start with 'test_' or similar, or the test # runner will ignore our tests. # The error text is "is not a valid test". - wrapperFileName = 'test_' + maybeWebGL2Mangle + mangledPath + wrapperFileName = "test_" + maybeWebGL2Mangle + mangledPath - wrapperPath = GENERATED_PATHSTR + '/' + wrapperFileName - print('Adding wrapper: ' + wrapperPath) + wrapperPath = GENERATED_PATHSTR + "/" + wrapperFileName + print("Adding wrapper: " + wrapperPath) - args = '' + args = "" if webgl2: - args = '?webglVersion=2' + args = "?webglVersion=2" templateDict = { - 'TEST_PATH': entryPath, - 'ARGS': args, + "TEST_PATH": entryPath, + "ARGS": args, } OutputFilledTemplate(templateShell, templateDict, wrapperPath) @@ -355,7 +363,7 @@ def WriteWrapper(entryPath, webgl2, templateShell, wrapperPathAccum): def WriteWrappers(testEntryList): templateShell = ImportTemplate(WRAPPER_TEMPLATE_FILE) - generatedDirPath = GENERATED_PATHSTR.replace('/', os.sep) + generatedDirPath = GENERATED_PATHSTR.replace("/", os.sep) if not os.path.exists(generatedDirPath): os.mkdir(generatedDirPath) assert os.path.isdir(generatedDirPath) @@ -368,52 +376,52 @@ def WriteWrappers(testEntryList): WriteWrapper(entry.path, True, templateShell, wrapperPathList) continue - print('{} wrappers written.\n'.format(len(wrapperPathList))) + print("{} wrappers written.\n".format(len(wrapperPathList))) return wrapperPathList -kManifestRelPathStr = os.path.relpath('.', os.path.dirname(DEST_MANIFEST_PATHSTR)) -kManifestRelPathStr = kManifestRelPathStr.replace(os.sep, '/') +kManifestRelPathStr = os.path.relpath(".", os.path.dirname(DEST_MANIFEST_PATHSTR)) +kManifestRelPathStr = kManifestRelPathStr.replace(os.sep, "/") def ManifestPathStr(pathStr): - pathStr = kManifestRelPathStr + '/' + pathStr - return os.path.normpath(pathStr).replace(os.sep, '/') + pathStr = kManifestRelPathStr + "/" + pathStr + return os.path.normpath(pathStr).replace(os.sep, "/") def WriteManifest(wrapperPathStrList, supportPathStrList): destPathStr = DEST_MANIFEST_PATHSTR - print('Generating manifest: ' + destPathStr) + print("Generating manifest: " + destPathStr) errataMap = LoadErrata() # DEFAULT_ERRATA - defaultSectionName = 'DEFAULT' + defaultSectionName = "DEFAULT" defaultSectionLines = [] if defaultSectionName in errataMap: defaultSectionLines = errataMap[defaultSectionName] del errataMap[defaultSectionName] - defaultSectionStr = '\n'.join(defaultSectionLines) + defaultSectionStr = "\n".join(defaultSectionLines) # SUPPORT_FILES supportPathStrList = [ManifestPathStr(x) for x in supportPathStrList] supportPathStrList = sorted(supportPathStrList) - supportFilesStr = '\n'.join(supportPathStrList) + supportFilesStr = "\n".join(supportPathStrList) # MANIFEST_TESTS manifestTestLineList = [] wrapperPathStrList = sorted(wrapperPathStrList) for wrapperPathStr in wrapperPathStrList: wrapperManifestPathStr = ManifestPathStr(wrapperPathStr) - sectionName = '[' + wrapperManifestPathStr + ']' + sectionName = "[" + wrapperManifestPathStr + "]" manifestTestLineList.append(sectionName) errataLines = [] subsuite = ChooseSubsuite(wrapperPathStr) - errataLines.append('subsuite = ' + subsuite) + errataLines.append("subsuite = " + subsuite) if wrapperPathStr in errataMap: assert subsuite @@ -424,29 +432,30 @@ def WriteManifest(wrapperPathStrList, supportPathStrList): continue if errataMap: - print('Errata left in map:') + print("Errata left in map:") for x in errataMap.keys(): - print(' '*4 + x) + print(" " * 4 + x) assert False - manifestTestsStr = '\n'.join(manifestTestLineList) + manifestTestsStr = "\n".join(manifestTestLineList) # Fill the template. templateDict = { - 'DEFAULT_ERRATA': defaultSectionStr, - 'SUPPORT_FILES': supportFilesStr, - 'MANIFEST_TESTS': manifestTestsStr, + "DEFAULT_ERRATA": defaultSectionStr, + "SUPPORT_FILES": supportFilesStr, + "MANIFEST_TESTS": manifestTestsStr, } - destPath = destPathStr.replace('/', os.sep) + destPath = destPathStr.replace("/", os.sep) FillTemplate(MANIFEST_TEMPLATE_FILE, templateDict, destPath) return + ############################## # Internals -kManifestHeaderRegex = re.compile(r'[[]([^]]*)[]]') +kManifestHeaderRegex = re.compile(r"[[]([^]]*)[]]") def LoadINI(path): @@ -458,7 +467,7 @@ def LoadINI(path): ret = {} ret[curSectionName] = (lineNum, curSectionMap) - with open(path, 'r') as f: + with open(path, "r") as f: for line in f: lineNum += 1 @@ -466,23 +475,24 @@ def LoadINI(path): if not line: continue - if line[0] in [';', '#']: + if line[0] in [";", "#"]: continue - if line[0] == '[': - assert line[-1] == ']', '{}:{}'.format(path, lineNum) + if line[0] == "[": + assert line[-1] == "]", "{}:{}".format(path, lineNum) curSectionName = line[1:-1] - assert curSectionName not in ret, 'Line {}: Duplicate section: {}'.format( - lineNum, line) + assert ( + curSectionName not in ret + ), "Line {}: Duplicate section: {}".format(lineNum, line) curSectionMap = {} ret[curSectionName] = (lineNum, curSectionMap) continue - split = line.split('=', 1) + split = line.split("=", 1) key = split[0].strip() - val = '' + val = "" if len(split) == 2: val = split[1].strip() @@ -502,15 +512,16 @@ def LoadErrata(): if sectionName is None: continue - elif sectionName != 'DEFAULT': - path = sectionName.replace('/', os.sep) - assert os.path.exists(path), 'Errata line {}: Invalid file: {}'.format( - sectionLineNum, sectionName) + elif sectionName != "DEFAULT": + path = sectionName.replace("/", os.sep) + assert os.path.exists(path), "Errata line {}: Invalid file: {}".format( + sectionLineNum, sectionName + ) for (key, (lineNum, val)) in sectionMap.items(): - assert key in ACCEPTABLE_ERRATA_KEYS, 'Line {}: {}'.format(lineNum, key) + assert key in ACCEPTABLE_ERRATA_KEYS, "Line {}: {}".format(lineNum, key) - curLine = '{} = {}'.format(key, val) + curLine = "{} = {}".format(key, val) curLines.append(curLine) continue @@ -519,6 +530,7 @@ def LoadErrata(): return ret + ######################################################################## @@ -530,8 +542,8 @@ def GetSupportFileList(): continue for pathStr in ret: - path = pathStr.replace('/', os.sep) - assert os.path.exists(path), path + '\n\n\n' + 'pathStr: ' + str(pathStr) + path = pathStr.replace("/", os.sep) + assert os.path.exists(path), path + "\n\n\n" + "pathStr: " + str(pathStr) continue return ret @@ -542,16 +554,16 @@ def GetFilePathListForDir(baseDir): for root, folders, files in os.walk(baseDir): for f in files: filePath = os.path.join(root, f) - filePath = filePath.replace(os.sep, '/') + filePath = filePath.replace(os.sep, "/") ret.append(filePath) return ret -if __name__ == '__main__': +if __name__ == "__main__": file_dir = Path(__file__).parent os.chdir(str(file_dir)) - shutil.rmtree(file_dir / 'generated', True) + shutil.rmtree(file_dir / "generated", True) testEntryList = GetTestList() wrapperPathStrList = WriteWrappers(testEntryList) @@ -559,4 +571,4 @@ def GetFilePathListForDir(baseDir): supportPathStrList = GetSupportFileList() WriteManifest(wrapperPathStrList, supportPathStrList) - print('Done!') + print("Done!") diff --git a/dom/canvas/test/webgl-conf/import.py b/dom/canvas/test/webgl-conf/import.py index 542f555ff2699b..523084f9d1eab1 100755 --- a/dom/canvas/test/webgl-conf/import.py +++ b/dom/canvas/test/webgl-conf/import.py @@ -4,35 +4,37 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -assert __name__ == '__main__' +assert __name__ == "__main__" from pathlib import Path import shutil import sys -REL_PATH = '/dom/canvas/test/webgl-conf' +REL_PATH = "/dom/canvas/test/webgl-conf" REPO_DIR = Path.cwd() DIR_IN_GECKO = Path(__file__).parent -assert not REPO_DIR.samefile(DIR_IN_GECKO), 'Run this script from the source git checkout.' +assert not REPO_DIR.samefile( + DIR_IN_GECKO +), "Run this script from the source git checkout." assert DIR_IN_GECKO.as_posix().endswith(REL_PATH) # Be paranoid with rm -rf. -gecko_base_dir = DIR_IN_GECKO.as_posix()[:-len(REL_PATH)] -angle_dir = Path(gecko_base_dir, 'gfx/angle').as_posix() +gecko_base_dir = DIR_IN_GECKO.as_posix()[: -len(REL_PATH)] +angle_dir = Path(gecko_base_dir, "gfx/angle").as_posix() sys.path.append(angle_dir) from vendor_from_git import print_now, record_cherry_picks # -- -(MERGE_BASE_ORIGIN, ) = sys.argv[1:] # Not always 'origin'! +(MERGE_BASE_ORIGIN,) = sys.argv[1:] # Not always 'origin'! record_cherry_picks(DIR_IN_GECKO, MERGE_BASE_ORIGIN) # -- -src_dir = Path(REPO_DIR, 'sdk/tests') -dest_dir = Path(DIR_IN_GECKO, 'checkout') -print_now('Nuking old checkout...') +src_dir = Path(REPO_DIR, "sdk/tests") +dest_dir = Path(DIR_IN_GECKO, "checkout") +print_now("Nuking old checkout...") shutil.rmtree(dest_dir, True) -print_now('Writing new checkout...') +print_now("Writing new checkout...") shutil.copytree(src_dir, dest_dir, copy_function=shutil.copy) -print_now('Done!') +print_now("Done!") diff --git a/dom/canvas/test/webgl-mochitest/mochi-to-testcase.py b/dom/canvas/test/webgl-mochitest/mochi-to-testcase.py index dd0f3ffc3db0dc..c2e84eec071441 100644 --- a/dom/canvas/test/webgl-mochitest/mochi-to-testcase.py +++ b/dom/canvas/test/webgl-mochitest/mochi-to-testcase.py @@ -5,9 +5,9 @@ assert len(sys.argv) == 2 MOCHI_PATH = pathlib.Path(sys.argv[1]) -assert MOCHI_PATH.suffix == '.html' +assert MOCHI_PATH.suffix == ".html" -TEST_PATH = MOCHI_PATH.with_suffix('.solo.html') +TEST_PATH = MOCHI_PATH.with_suffix(".solo.html") def read_local_file(include): @@ -17,10 +17,10 @@ def read_local_file(include): try: return file_path.read_bytes() except IOError: - return b'' + return b"" -SIMPLETEST_REPLACEMENT = b''' +SIMPLETEST_REPLACEMENT = b"""
-''' +""" -INCLUDE_PATTERN = re.compile(b'\\s*') -CSS_PATTERN = re.compile(b']*)[\'"]>') +INCLUDE_PATTERN = re.compile(b"\\s*") +CSS_PATTERN = re.compile( + b"]*)['\"]>" +) -with open(TEST_PATH, 'wb') as fout: - with open(MOCHI_PATH, 'rb') as fin: +with open(TEST_PATH, "wb") as fout: + with open(MOCHI_PATH, "rb") as fin: for line in fin: skip_line = False for css in CSS_PATTERN.findall(line): skip_line = True - print('Ignoring stylesheet: ' + css.decode()) + print("Ignoring stylesheet: " + css.decode()) for inc in INCLUDE_PATTERN.findall(line): skip_line = True - if inc == b'/MochiKit/MochiKit': + if inc == b"/MochiKit/MochiKit": continue - if inc == b'/tests/SimpleTest/SimpleTest': - print('Injecting SimpleTest replacement') + if inc == b"/tests/SimpleTest/SimpleTest": + print("Injecting SimpleTest replacement") fout.write(SIMPLETEST_REPLACEMENT) continue - inc_js = inc.decode() + '.js' + inc_js = inc.decode() + ".js" inc_data = read_local_file(inc_js) if not inc_data: - print('Warning: Unknown JS file ignored: ' + inc_js) + print("Warning: Unknown JS file ignored: " + inc_js) continue - print('Injecting include: ' + inc_js) - fout.write(b'\n\n') + fout.write(b"\n\n") continue if skip_line: diff --git a/dom/chrome-webidl/moz.build b/dom/chrome-webidl/moz.build index c2d4af96083f3b..5e8c9869588cb2 100644 --- a/dom/chrome-webidl/moz.build +++ b/dom/chrome-webidl/moz.build @@ -29,68 +29,68 @@ with Files("WebExtension*.webidl"): BUG_COMPONENT = ("WebExtensions", "General") with Files("Glean*.webidl"): - BUG_COMPONENT = ('Toolkit', 'Telemetry') + BUG_COMPONENT = ("Toolkit", "Telemetry") PREPROCESSED_WEBIDL_FILES = [ - 'ChromeUtils.webidl', + "ChromeUtils.webidl", ] WEBIDL_FILES = [ - 'BrowsingContext.webidl', - 'ChannelWrapper.webidl', - 'ClonedErrorHolder.webidl', - 'DebuggerNotification.webidl', - 'DebuggerNotificationObserver.webidl', - 'DebuggerUtils.webidl', - 'DocumentL10n.webidl', - 'DOMCollectedFrames.webidl', - 'DominatorTree.webidl', - 'Flex.webidl', - 'Fluent.webidl', - 'FrameLoader.webidl', - 'HeapSnapshot.webidl', - 'InspectorUtils.webidl', - 'IOUtils.webidl', - 'IteratorResult.webidl', - 'JSActor.webidl', - 'JSProcessActor.webidl', - 'JSWindowActor.webidl', - 'L10nOverlays.webidl', - 'MatchGlob.webidl', - 'MatchPattern.webidl', - 'MediaController.webidl', - 'MessageManager.webidl', - 'MozDocumentObserver.webidl', - 'MozSharedMap.webidl', - 'MozStorageAsyncStatementParams.webidl', - 'MozStorageStatementParams.webidl', - 'MozStorageStatementRow.webidl', - 'PrecompiledScript.webidl', - 'PromiseDebugging.webidl', - 'SessionStoreUtils.webidl', - 'StructuredCloneHolder.webidl', - 'TelemetryStopwatch.webidl', - 'UserInteraction.webidl', - 'WebExtensionContentScript.webidl', - 'WebExtensionPolicy.webidl', - 'WindowGlobalActors.webidl', - 'XULFrameElement.webidl', - 'XULMenuElement.webidl', - 'XULTextElement.webidl', - 'XULTreeElement.webidl' + "BrowsingContext.webidl", + "ChannelWrapper.webidl", + "ClonedErrorHolder.webidl", + "DebuggerNotification.webidl", + "DebuggerNotificationObserver.webidl", + "DebuggerUtils.webidl", + "DocumentL10n.webidl", + "DOMCollectedFrames.webidl", + "DominatorTree.webidl", + "Flex.webidl", + "Fluent.webidl", + "FrameLoader.webidl", + "HeapSnapshot.webidl", + "InspectorUtils.webidl", + "IOUtils.webidl", + "IteratorResult.webidl", + "JSActor.webidl", + "JSProcessActor.webidl", + "JSWindowActor.webidl", + "L10nOverlays.webidl", + "MatchGlob.webidl", + "MatchPattern.webidl", + "MediaController.webidl", + "MessageManager.webidl", + "MozDocumentObserver.webidl", + "MozSharedMap.webidl", + "MozStorageAsyncStatementParams.webidl", + "MozStorageStatementParams.webidl", + "MozStorageStatementRow.webidl", + "PrecompiledScript.webidl", + "PromiseDebugging.webidl", + "SessionStoreUtils.webidl", + "StructuredCloneHolder.webidl", + "TelemetryStopwatch.webidl", + "UserInteraction.webidl", + "WebExtensionContentScript.webidl", + "WebExtensionPolicy.webidl", + "WindowGlobalActors.webidl", + "XULFrameElement.webidl", + "XULMenuElement.webidl", + "XULTextElement.webidl", + "XULTreeElement.webidl", ] -if CONFIG['MOZ_PLACES']: +if CONFIG["MOZ_PLACES"]: WEBIDL_FILES += [ - 'PlacesEvent.webidl', - 'PlacesObservers.webidl', + "PlacesEvent.webidl", + "PlacesObservers.webidl", ] -if CONFIG['MOZ_GLEAN']: +if CONFIG["MOZ_GLEAN"]: WEBIDL_FILES += [ - 'Glean.webidl', + "Glean.webidl", ] WEBIDL_FILES += [ - 'PrioEncoder.webidl', + "PrioEncoder.webidl", ] diff --git a/dom/clients/api/moz.build b/dom/clients/api/moz.build index 1e8e7163645289..4e864290fdf418 100644 --- a/dom/clients/api/moz.build +++ b/dom/clients/api/moz.build @@ -5,24 +5,21 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'Client.h', - 'Clients.h', + "Client.h", + "Clients.h", ] UNIFIED_SOURCES += [ - 'Client.cpp', - 'Clients.cpp', + "Client.cpp", + "Clients.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_MANIFESTS += [ -] +MOCHITEST_MANIFESTS += [] -BROWSER_CHROME_MANIFESTS += [ -] +BROWSER_CHROME_MANIFESTS += [] -XPCSHELL_TESTS_MANIFESTS += [ -] +XPCSHELL_TESTS_MANIFESTS += [] diff --git a/dom/clients/manager/moz.build b/dom/clients/manager/moz.build index 02f03c77d8af5c..2dfcbffd18b5cc 100644 --- a/dom/clients/manager/moz.build +++ b/dom/clients/manager/moz.build @@ -5,68 +5,65 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'ClientChannelHelper.h', - 'ClientHandle.h', - 'ClientInfo.h', - 'ClientIPCUtils.h', - 'ClientManager.h', - 'ClientManagerActors.h', - 'ClientManagerService.h', - 'ClientOpenWindowUtils.h', - 'ClientOpPromise.h', - 'ClientSource.h', - 'ClientState.h', - 'ClientThing.h', + "ClientChannelHelper.h", + "ClientHandle.h", + "ClientInfo.h", + "ClientIPCUtils.h", + "ClientManager.h", + "ClientManagerActors.h", + "ClientManagerService.h", + "ClientOpenWindowUtils.h", + "ClientOpPromise.h", + "ClientSource.h", + "ClientState.h", + "ClientThing.h", ] UNIFIED_SOURCES += [ - 'ClientChannelHelper.cpp', - 'ClientHandle.cpp', - 'ClientHandleChild.cpp', - 'ClientHandleOpChild.cpp', - 'ClientHandleOpParent.cpp', - 'ClientHandleParent.cpp', - 'ClientInfo.cpp', - 'ClientManager.cpp', - 'ClientManagerActors.cpp', - 'ClientManagerChild.cpp', - 'ClientManagerOpChild.cpp', - 'ClientManagerOpParent.cpp', - 'ClientManagerParent.cpp', - 'ClientManagerService.cpp', - 'ClientNavigateOpChild.cpp', - 'ClientNavigateOpParent.cpp', - 'ClientOpenWindowUtils.cpp', - 'ClientPrincipalUtils.cpp', - 'ClientSource.cpp', - 'ClientSourceChild.cpp', - 'ClientSourceOpChild.cpp', - 'ClientSourceOpParent.cpp', - 'ClientSourceParent.cpp', - 'ClientState.cpp', - 'ClientValidation.cpp', + "ClientChannelHelper.cpp", + "ClientHandle.cpp", + "ClientHandleChild.cpp", + "ClientHandleOpChild.cpp", + "ClientHandleOpParent.cpp", + "ClientHandleParent.cpp", + "ClientInfo.cpp", + "ClientManager.cpp", + "ClientManagerActors.cpp", + "ClientManagerChild.cpp", + "ClientManagerOpChild.cpp", + "ClientManagerOpParent.cpp", + "ClientManagerParent.cpp", + "ClientManagerService.cpp", + "ClientNavigateOpChild.cpp", + "ClientNavigateOpParent.cpp", + "ClientOpenWindowUtils.cpp", + "ClientPrincipalUtils.cpp", + "ClientSource.cpp", + "ClientSourceChild.cpp", + "ClientSourceOpChild.cpp", + "ClientSourceOpParent.cpp", + "ClientSourceParent.cpp", + "ClientState.cpp", + "ClientValidation.cpp", ] IPDL_SOURCES += [ - 'ClientIPCTypes.ipdlh', - 'PClientHandle.ipdl', - 'PClientHandleOp.ipdl', - 'PClientManager.ipdl', - 'PClientManagerOp.ipdl', - 'PClientNavigateOp.ipdl', - 'PClientSource.ipdl', - 'PClientSourceOp.ipdl', + "ClientIPCTypes.ipdlh", + "PClientHandle.ipdl", + "PClientHandleOp.ipdl", + "PClientManager.ipdl", + "PClientManagerOp.ipdl", + "PClientNavigateOp.ipdl", + "PClientSource.ipdl", + "PClientSourceOp.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_MANIFESTS += [ -] +MOCHITEST_MANIFESTS += [] -BROWSER_CHROME_MANIFESTS += [ -] +BROWSER_CHROME_MANIFESTS += [] -XPCSHELL_TESTS_MANIFESTS += [ -] +XPCSHELL_TESTS_MANIFESTS += [] diff --git a/dom/clients/moz.build b/dom/clients/moz.build index 6a291f5ab60ba3..e90f9987f7af78 100644 --- a/dom/clients/moz.build +++ b/dom/clients/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'api', - 'manager', + "api", + "manager", ] diff --git a/dom/commandhandler/moz.build b/dom/commandhandler/moz.build index 93bcc1665ac96d..1aaa78e8af25f7 100644 --- a/dom/commandhandler/moz.build +++ b/dom/commandhandler/moz.build @@ -8,31 +8,31 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: UI Events & Focus Handling") EXPORTS += [ - 'nsBaseCommandController.h', - 'nsCommandManager.h', - 'nsCommandParams.h', - 'nsControllerCommandTable.h', + "nsBaseCommandController.h", + "nsCommandManager.h", + "nsCommandParams.h", + "nsControllerCommandTable.h", ] XPIDL_SOURCES += [ - 'nsICommandManager.idl', - 'nsICommandParams.idl', - 'nsIControllerCommand.idl', - 'nsIControllerCommandTable.idl', - 'nsIControllerContext.idl', + "nsICommandManager.idl", + "nsICommandParams.idl", + "nsIControllerCommand.idl", + "nsIControllerCommandTable.idl", + "nsIControllerContext.idl", ] -XPIDL_MODULE = 'commandhandler' +XPIDL_MODULE = "commandhandler" UNIFIED_SOURCES += [ - 'nsBaseCommandController.cpp', - 'nsCommandManager.cpp', - 'nsCommandParams.cpp', - 'nsControllerCommandTable.cpp', + "nsBaseCommandController.cpp", + "nsCommandManager.cpp", + "nsCommandParams.cpp", + "nsControllerCommandTable.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/console/moz.build b/dom/console/moz.build index 93dd64e4bc8206..d23553edc3fd02 100644 --- a/dom/console/moz.build +++ b/dom/console/moz.build @@ -8,50 +8,50 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIConsoleAPIStorage.idl', + "nsIConsoleAPIStorage.idl", ] -XPIDL_MODULE = 'dom' +XPIDL_MODULE = "dom" EXPORTS += [ - 'nsIConsoleReportCollector.h', + "nsIConsoleReportCollector.h", ] EXPORTS.mozilla += [ - 'ConsoleReportCollector.h', + "ConsoleReportCollector.h", ] EXPORTS.mozilla.dom += [ - 'Console.h', - 'ConsoleInstance.h', - 'ConsoleUtils.h', + "Console.h", + "ConsoleInstance.h", + "ConsoleUtils.h", ] UNIFIED_SOURCES += [ - 'Console.cpp', - 'ConsoleInstance.cpp', - 'ConsoleReportCollector.cpp', - 'ConsoleUtils.cpp', + "Console.cpp", + "ConsoleInstance.cpp", + "ConsoleReportCollector.cpp", + "ConsoleUtils.cpp", ] EXTRA_JS_MODULES += [ - 'ConsoleAPIStorage.jsm', + "ConsoleAPIStorage.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '/docshell/base', - '/dom/base', - '/js/xpconnect/src', + "/docshell/base", + "/dom/base", + "/js/xpconnect/src", ] -MOCHITEST_MANIFESTS += [ 'tests/mochitest.ini' ] -MOCHITEST_CHROME_MANIFESTS += [ 'tests/chrome.ini' ] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/credentialmanagement/moz.build b/dom/credentialmanagement/moz.build index 5af0258bd998e3..915a9c15d48a44 100644 --- a/dom/credentialmanagement/moz.build +++ b/dom/credentialmanagement/moz.build @@ -8,18 +8,18 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Device Interfaces") EXPORTS.mozilla.dom += [ - 'Credential.h', - 'CredentialsContainer.h', + "Credential.h", + "CredentialsContainer.h", ] UNIFIED_SOURCES += [ - 'Credential.cpp', - 'CredentialsContainer.cpp', + "Credential.cpp", + "CredentialsContainer.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] diff --git a/dom/crypto/moz.build b/dom/crypto/moz.build index fc53e2a2dc44bd..dda45ec75f5d62 100644 --- a/dom/crypto/moz.build +++ b/dom/crypto/moz.build @@ -8,28 +8,28 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Security") EXPORTS.mozilla.dom += [ - 'CryptoBuffer.h', - 'CryptoKey.h', - 'KeyAlgorithmProxy.h', - 'WebCryptoCommon.h', - 'WebCryptoTask.h', + "CryptoBuffer.h", + "CryptoKey.h", + "KeyAlgorithmProxy.h", + "WebCryptoCommon.h", + "WebCryptoTask.h", ] UNIFIED_SOURCES += [ - 'CryptoBuffer.cpp', - 'CryptoKey.cpp', - 'KeyAlgorithmProxy.cpp', - 'WebCryptoTask.cpp', + "CryptoBuffer.cpp", + "CryptoKey.cpp", + "KeyAlgorithmProxy.cpp", + "WebCryptoTask.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/security/manager/ssl', - '/xpcom/build', + "/security/manager/ssl", + "/xpcom/build", ] -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] diff --git a/dom/debugger/moz.build b/dom/debugger/moz.build index 0ebcd3c33deb21..879c3ec237af5b 100644 --- a/dom/debugger/moz.build +++ b/dom/debugger/moz.build @@ -8,19 +8,19 @@ with Files("**"): BUG_COMPONENT = ("DevTools", "Debugger") EXPORTS.mozilla.dom += [ - 'CallbackDebuggerNotification.h', - 'DebuggerNotification.h', - 'DebuggerNotificationManager.h', - 'DebuggerNotificationObserver.h', - 'EventCallbackDebuggerNotification.h', + "CallbackDebuggerNotification.h", + "DebuggerNotification.h", + "DebuggerNotificationManager.h", + "DebuggerNotificationObserver.h", + "EventCallbackDebuggerNotification.h", ] UNIFIED_SOURCES += [ - 'CallbackDebuggerNotification.cpp', - 'DebuggerNotification.cpp', - 'DebuggerNotificationManager.cpp', - 'DebuggerNotificationObserver.cpp', - 'EventCallbackDebuggerNotification.cpp', + "CallbackDebuggerNotification.cpp", + "DebuggerNotification.cpp", + "DebuggerNotificationManager.cpp", + "DebuggerNotificationObserver.cpp", + "EventCallbackDebuggerNotification.cpp", ] -FINAL_LIBRARY = 'xul' \ No newline at end of file +FINAL_LIBRARY = "xul" diff --git a/dom/encoding/moz.build b/dom/encoding/moz.build index e2bc8a5a829b47..778814a3d8b548 100644 --- a/dom/encoding/moz.build +++ b/dom/encoding/moz.build @@ -8,22 +8,22 @@ with Files("**"): BUG_COMPONENT = ("Core", "Internationalization") EXPORTS.mozilla.dom += [ - 'TextDecoder.h', - 'TextEncoder.h', + "TextDecoder.h", + "TextEncoder.h", ] UNIFIED_SOURCES += [ - 'TextDecoder.cpp', - 'TextEncoder.cpp', + "TextDecoder.cpp", + "TextEncoder.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/intl/locale', + "/intl/locale", ] MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', + "test/mochitest.ini", ] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] diff --git a/dom/events/android/moz.build b/dom/events/android/moz.build index 1d09fbdc7b6724..afe41f013e230b 100644 --- a/dom/events/android/moz.build +++ b/dom/events/android/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += ['ShortcutKeyDefinitions.cpp'] +SOURCES += ["ShortcutKeyDefinitions.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/events/emacs/moz.build b/dom/events/emacs/moz.build index 1d09fbdc7b6724..afe41f013e230b 100644 --- a/dom/events/emacs/moz.build +++ b/dom/events/emacs/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += ['ShortcutKeyDefinitions.cpp'] +SOURCES += ["ShortcutKeyDefinitions.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/events/mac/moz.build b/dom/events/mac/moz.build index 1d09fbdc7b6724..afe41f013e230b 100644 --- a/dom/events/mac/moz.build +++ b/dom/events/mac/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += ['ShortcutKeyDefinitions.cpp'] +SOURCES += ["ShortcutKeyDefinitions.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/events/moz.build b/dom/events/moz.build index 003e818a407f1f..3bc019f4e530b7 100644 --- a/dom/events/moz.build +++ b/dom/events/moz.build @@ -10,183 +10,183 @@ with Files("**"): with Files("Event*"): BUG_COMPONENT = ("Core", "DOM: Events") -if CONFIG['OS_ARCH'] == 'WINNT': - DIRS += ['win'] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - DIRS += ['mac'] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - DIRS += ['android'] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - DIRS += ['unix'] +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["win"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + DIRS += ["mac"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + DIRS += ["android"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + DIRS += ["unix"] else: - DIRS += ['emacs'] + DIRS += ["emacs"] BROWSER_CHROME_MANIFESTS += [ - 'test/browser.ini', + "test/browser.ini", ] MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', - 'test/pointerevents/mochitest.ini', - 'test/pointerevents/pointerlock/mochitest.ini', + "test/mochitest.ini", + "test/pointerevents/mochitest.ini", + "test/pointerevents/pointerlock/mochitest.ini", ] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] XPIDL_SOURCES += [ - 'nsIEventListenerService.idl', + "nsIEventListenerService.idl", ] -XPIDL_MODULE = 'content_events' +XPIDL_MODULE = "content_events" EXPORTS.mozilla += [ - 'AsyncEventDispatcher.h', - 'DOMEventTargetHelper.h', - 'EventDispatcher.h', - 'EventListenerManager.h', - 'EventNameList.h', - 'EventStateManager.h', - 'EventStates.h', - 'GlobalKeyListener.h', - 'IMEContentObserver.h', - 'IMEStateManager.h', - 'InputEventOptions.h', - 'InputTypeList.h', - 'InternalMutationEvent.h', - 'JSEventHandler.h', - 'KeyEventHandler.h', - 'KeyNameList.h', - 'PendingFullscreenEvent.h', - 'PhysicalKeyCodeNameList.h', - 'ShortcutKeys.h', - 'TextComposition.h', - 'VirtualKeyCodeList.h', - 'WheelHandlingHelper.h', + "AsyncEventDispatcher.h", + "DOMEventTargetHelper.h", + "EventDispatcher.h", + "EventListenerManager.h", + "EventNameList.h", + "EventStateManager.h", + "EventStates.h", + "GlobalKeyListener.h", + "IMEContentObserver.h", + "IMEStateManager.h", + "InputEventOptions.h", + "InputTypeList.h", + "InternalMutationEvent.h", + "JSEventHandler.h", + "KeyEventHandler.h", + "KeyNameList.h", + "PendingFullscreenEvent.h", + "PhysicalKeyCodeNameList.h", + "ShortcutKeys.h", + "TextComposition.h", + "VirtualKeyCodeList.h", + "WheelHandlingHelper.h", ] EXPORTS.mozilla.dom += [ - 'AnimationEvent.h', - 'BeforeUnloadEvent.h', - 'Clipboard.h', - 'ClipboardEvent.h', - 'CommandEvent.h', - 'CompositionEvent.h', - 'ConstructibleEventTarget.h', - 'CustomEvent.h', - 'DataTransfer.h', - 'DataTransferItem.h', - 'DataTransferItemList.h', - 'DeviceMotionEvent.h', - 'DragEvent.h', - 'Event.h', - 'EventTarget.h', - 'FocusEvent.h', - 'ImageCaptureError.h', - 'InputEvent.h', - 'KeyboardEvent.h', - 'MessageEvent.h', - 'MouseEvent.h', - 'MouseScrollEvent.h', - 'MutationEvent.h', - 'NotifyPaintEvent.h', - 'PaintRequest.h', - 'PointerEvent.h', - 'PointerEventHandler.h', - 'RemoteDragStartData.h', - 'ScrollAreaEvent.h', - 'SimpleGestureEvent.h', - 'StorageEvent.h', - 'TextClause.h', - 'Touch.h', - 'TouchEvent.h', - 'TransitionEvent.h', - 'UIEvent.h', - 'WheelEvent.h', - 'XULCommandEvent.h', + "AnimationEvent.h", + "BeforeUnloadEvent.h", + "Clipboard.h", + "ClipboardEvent.h", + "CommandEvent.h", + "CompositionEvent.h", + "ConstructibleEventTarget.h", + "CustomEvent.h", + "DataTransfer.h", + "DataTransferItem.h", + "DataTransferItemList.h", + "DeviceMotionEvent.h", + "DragEvent.h", + "Event.h", + "EventTarget.h", + "FocusEvent.h", + "ImageCaptureError.h", + "InputEvent.h", + "KeyboardEvent.h", + "MessageEvent.h", + "MouseEvent.h", + "MouseScrollEvent.h", + "MutationEvent.h", + "NotifyPaintEvent.h", + "PaintRequest.h", + "PointerEvent.h", + "PointerEventHandler.h", + "RemoteDragStartData.h", + "ScrollAreaEvent.h", + "SimpleGestureEvent.h", + "StorageEvent.h", + "TextClause.h", + "Touch.h", + "TouchEvent.h", + "TransitionEvent.h", + "UIEvent.h", + "WheelEvent.h", + "XULCommandEvent.h", ] -if CONFIG['MOZ_WEBSPEECH']: - EXPORTS.mozilla.dom += ['SpeechRecognitionError.h'] +if CONFIG["MOZ_WEBSPEECH"]: + EXPORTS.mozilla.dom += ["SpeechRecognitionError.h"] UNIFIED_SOURCES += [ - 'AnimationEvent.cpp', - 'AsyncEventDispatcher.cpp', - 'BeforeUnloadEvent.cpp', - 'Clipboard.cpp', - 'ClipboardEvent.cpp', - 'CommandEvent.cpp', - 'CompositionEvent.cpp', - 'ConstructibleEventTarget.cpp', - 'ContentEventHandler.cpp', - 'CustomEvent.cpp', - 'DataTransfer.cpp', - 'DataTransferItem.cpp', - 'DataTransferItemList.cpp', - 'DeviceMotionEvent.cpp', - 'DOMEventTargetHelper.cpp', - 'DragEvent.cpp', - 'Event.cpp', - 'EventDispatcher.cpp', - 'EventListenerManager.cpp', - 'EventListenerService.cpp', - 'EventTarget.cpp', - 'FocusEvent.cpp', - 'GlobalKeyListener.cpp', - 'ImageCaptureError.cpp', - 'IMEContentObserver.cpp', - 'IMEStateManager.cpp', - 'InputEvent.cpp', - 'JSEventHandler.cpp', - 'KeyboardEvent.cpp', - 'KeyEventHandler.cpp', - 'MessageEvent.cpp', - 'MouseEvent.cpp', - 'MouseScrollEvent.cpp', - 'MutationEvent.cpp', - 'NotifyPaintEvent.cpp', - 'PaintRequest.cpp', - 'PointerEvent.cpp', - 'PointerEventHandler.cpp', - 'RemoteDragStartData.cpp', - 'ScrollAreaEvent.cpp', - 'ShortcutKeys.cpp', - 'SimpleGestureEvent.cpp', - 'StorageEvent.cpp', - 'TextClause.cpp', - 'TextComposition.cpp', - 'Touch.cpp', - 'TouchEvent.cpp', - 'TransitionEvent.cpp', - 'UIEvent.cpp', - 'WheelEvent.cpp', - 'WheelHandlingHelper.cpp', - 'XULCommandEvent.cpp', + "AnimationEvent.cpp", + "AsyncEventDispatcher.cpp", + "BeforeUnloadEvent.cpp", + "Clipboard.cpp", + "ClipboardEvent.cpp", + "CommandEvent.cpp", + "CompositionEvent.cpp", + "ConstructibleEventTarget.cpp", + "ContentEventHandler.cpp", + "CustomEvent.cpp", + "DataTransfer.cpp", + "DataTransferItem.cpp", + "DataTransferItemList.cpp", + "DeviceMotionEvent.cpp", + "DOMEventTargetHelper.cpp", + "DragEvent.cpp", + "Event.cpp", + "EventDispatcher.cpp", + "EventListenerManager.cpp", + "EventListenerService.cpp", + "EventTarget.cpp", + "FocusEvent.cpp", + "GlobalKeyListener.cpp", + "ImageCaptureError.cpp", + "IMEContentObserver.cpp", + "IMEStateManager.cpp", + "InputEvent.cpp", + "JSEventHandler.cpp", + "KeyboardEvent.cpp", + "KeyEventHandler.cpp", + "MessageEvent.cpp", + "MouseEvent.cpp", + "MouseScrollEvent.cpp", + "MutationEvent.cpp", + "NotifyPaintEvent.cpp", + "PaintRequest.cpp", + "PointerEvent.cpp", + "PointerEventHandler.cpp", + "RemoteDragStartData.cpp", + "ScrollAreaEvent.cpp", + "ShortcutKeys.cpp", + "SimpleGestureEvent.cpp", + "StorageEvent.cpp", + "TextClause.cpp", + "TextComposition.cpp", + "Touch.cpp", + "TouchEvent.cpp", + "TransitionEvent.cpp", + "UIEvent.cpp", + "WheelEvent.cpp", + "WheelHandlingHelper.cpp", + "XULCommandEvent.cpp", ] # nsEventStateManager.cpp should be built separately because of Mac OS X headers. SOURCES += [ - 'EventStateManager.cpp', + "EventStateManager.cpp", ] -if CONFIG['MOZ_WEBSPEECH']: - UNIFIED_SOURCES += ['SpeechRecognitionError.cpp'] +if CONFIG["MOZ_WEBSPEECH"]: + UNIFIED_SOURCES += ["SpeechRecognitionError.cpp"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/docshell/base', - '/dom/base', - '/dom/html', - '/dom/storage', - '/dom/xml', - '/dom/xul', - '/js/xpconnect/wrappers', - '/layout/forms', - '/layout/generic', - '/layout/xul', - '/layout/xul/tree/', + "/docshell/base", + "/dom/base", + "/dom/html", + "/dom/storage", + "/dom/xml", + "/dom/xul", + "/js/xpconnect/wrappers", + "/layout/forms", + "/layout/generic", + "/layout/xul", + "/layout/xul/tree/", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/events/unix/moz.build b/dom/events/unix/moz.build index 1d09fbdc7b6724..afe41f013e230b 100644 --- a/dom/events/unix/moz.build +++ b/dom/events/unix/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += ['ShortcutKeyDefinitions.cpp'] +SOURCES += ["ShortcutKeyDefinitions.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/events/win/moz.build b/dom/events/win/moz.build index 1d09fbdc7b6724..afe41f013e230b 100644 --- a/dom/events/win/moz.build +++ b/dom/events/win/moz.build @@ -4,6 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += ['ShortcutKeyDefinitions.cpp'] +SOURCES += ["ShortcutKeyDefinitions.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/fetch/moz.build b/dom/fetch/moz.build index 558e0855a25b6f..d6be68726a54b5 100644 --- a/dom/fetch/moz.build +++ b/dom/fetch/moz.build @@ -8,58 +8,58 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Networking") EXPORTS.mozilla.dom += [ - 'BodyExtractor.h', - 'ChannelInfo.h', - 'Fetch.h', - 'FetchDriver.h', - 'FetchIPCTypes.h', - 'FetchObserver.h', - 'FetchStreamReader.h', - 'FetchUtil.h', - 'Headers.h', - 'InternalHeaders.h', - 'InternalRequest.h', - 'InternalResponse.h', - 'Request.h', - 'Response.h', + "BodyExtractor.h", + "ChannelInfo.h", + "Fetch.h", + "FetchDriver.h", + "FetchIPCTypes.h", + "FetchObserver.h", + "FetchStreamReader.h", + "FetchUtil.h", + "Headers.h", + "InternalHeaders.h", + "InternalRequest.h", + "InternalResponse.h", + "Request.h", + "Response.h", ] UNIFIED_SOURCES += [ - 'BodyExtractor.cpp', - 'ChannelInfo.cpp', - 'EmptyBody.cpp', - 'Fetch.cpp', - 'FetchDriver.cpp', - 'FetchObserver.cpp', - 'FetchStreamReader.cpp', - 'FetchUtil.cpp', - 'Headers.cpp', - 'InternalHeaders.cpp', - 'InternalRequest.cpp', - 'InternalResponse.cpp', - 'Request.cpp', - 'Response.cpp', + "BodyExtractor.cpp", + "ChannelInfo.cpp", + "EmptyBody.cpp", + "Fetch.cpp", + "FetchDriver.cpp", + "FetchObserver.cpp", + "FetchStreamReader.cpp", + "FetchUtil.cpp", + "Headers.cpp", + "InternalHeaders.cpp", + "InternalRequest.cpp", + "InternalResponse.cpp", + "Request.cpp", + "Response.cpp", ] IPDL_SOURCES += [ - 'ChannelInfo.ipdlh', - 'FetchTypes.ipdlh', + "ChannelInfo.ipdlh", + "FetchTypes.ipdlh", ] LOCAL_INCLUDES += [ # For nsDOMSerializer - '/dom/base', + "/dom/base", # For HttpBaseChannel.h dependencies - '/netwerk/base', + "/netwerk/base", # For nsDataHandler.h - '/netwerk/protocol/data', + "/netwerk/protocol/data", # For HttpBaseChannel.h - '/netwerk/protocol/http', + "/netwerk/protocol/http", ] -BROWSER_CHROME_MANIFESTS += [ 'tests/browser.ini' ] -MOCHITEST_MANIFESTS += [ 'tests/mochitest.ini' ] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/dom/file/ipc/moz.build b/dom/file/ipc/moz.build index cdbffb8016c6ab..ae62ffa2dcd973 100644 --- a/dom/file/ipc/moz.build +++ b/dom/file/ipc/moz.build @@ -8,67 +8,67 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: File") XPIDL_SOURCES += [ - 'mozIRemoteLazyInputStream.idl', + "mozIRemoteLazyInputStream.idl", ] -XPIDL_MODULE = 'dom' +XPIDL_MODULE = "dom" EXPORTS.mozilla.dom += [ - 'FileCreatorChild.h', - 'FileCreatorParent.h', - 'IPCBlobUtils.h', - 'TemporaryIPCBlobChild.h', - 'TemporaryIPCBlobParent.h', + "FileCreatorChild.h", + "FileCreatorParent.h", + "IPCBlobUtils.h", + "TemporaryIPCBlobChild.h", + "TemporaryIPCBlobParent.h", ] EXPORTS.mozilla += [ - 'RemoteLazyInputStream.h', - 'RemoteLazyInputStreamChild.h', - 'RemoteLazyInputStreamParent.h', - 'RemoteLazyInputStreamStorage.h', - 'RemoteLazyInputStreamThread.h', - 'RemoteLazyInputStreamUtils.h', + "RemoteLazyInputStream.h", + "RemoteLazyInputStreamChild.h", + "RemoteLazyInputStreamParent.h", + "RemoteLazyInputStreamStorage.h", + "RemoteLazyInputStreamThread.h", + "RemoteLazyInputStreamUtils.h", ] UNIFIED_SOURCES += [ - 'FileCreatorChild.cpp', - 'FileCreatorParent.cpp', - 'IPCBlobUtils.cpp', - 'RemoteLazyInputStream.cpp', - 'RemoteLazyInputStreamChild.cpp', - 'RemoteLazyInputStreamParent.cpp', - 'RemoteLazyInputStreamStorage.cpp', - 'RemoteLazyInputStreamThread.cpp', - 'RemoteLazyInputStreamUtils.cpp', - 'TemporaryIPCBlobChild.cpp', - 'TemporaryIPCBlobParent.cpp', + "FileCreatorChild.cpp", + "FileCreatorParent.cpp", + "IPCBlobUtils.cpp", + "RemoteLazyInputStream.cpp", + "RemoteLazyInputStreamChild.cpp", + "RemoteLazyInputStreamParent.cpp", + "RemoteLazyInputStreamStorage.cpp", + "RemoteLazyInputStreamThread.cpp", + "RemoteLazyInputStreamUtils.cpp", + "TemporaryIPCBlobChild.cpp", + "TemporaryIPCBlobParent.cpp", ] IPDL_SOURCES += [ - 'BlobTypes.ipdlh', - 'IPCBlob.ipdlh', - 'PFileCreator.ipdl', - 'PRemoteLazyInputStream.ipdl', - 'PTemporaryIPCBlob.ipdl', + "BlobTypes.ipdlh", + "IPCBlob.ipdlh", + "PFileCreator.ipdl", + "PRemoteLazyInputStream.ipdl", + "PTemporaryIPCBlob.ipdl", ] LOCAL_INCLUDES += [ - '/dom/file', - '/dom/ipc', - '/xpcom/build', + "/dom/file", + "/dom/ipc", + "/xpcom/build", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -BROWSER_CHROME_MANIFESTS += ['tests/browser.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] diff --git a/dom/file/moz.build b/dom/file/moz.build index 4b7295231e76c0..2a5831454b9362 100644 --- a/dom/file/moz.build +++ b/dom/file/moz.build @@ -7,56 +7,56 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: File") -DIRS += ['ipc', 'uri' ] +DIRS += ["ipc", "uri"] EXPORTS.mozilla.dom += [ - 'BaseBlobImpl.h', - 'Blob.h', - 'BlobImpl.h', - 'BlobSet.h', - 'EmptyBlobImpl.h', - 'File.h', - 'FileBlobImpl.h', - 'FileCreatorHelper.h', - 'FileList.h', - 'FileReader.h', - 'FileReaderSync.h', - 'MemoryBlobImpl.h', - 'MultipartBlobImpl.h', - 'MutableBlobStorage.h', - 'MutableBlobStreamListener.h', - 'StreamBlobImpl.h', + "BaseBlobImpl.h", + "Blob.h", + "BlobImpl.h", + "BlobSet.h", + "EmptyBlobImpl.h", + "File.h", + "FileBlobImpl.h", + "FileCreatorHelper.h", + "FileList.h", + "FileReader.h", + "FileReaderSync.h", + "MemoryBlobImpl.h", + "MultipartBlobImpl.h", + "MutableBlobStorage.h", + "MutableBlobStreamListener.h", + "StreamBlobImpl.h", ] UNIFIED_SOURCES += [ - 'BaseBlobImpl.cpp', - 'Blob.cpp', - 'BlobImpl.cpp', - 'BlobSet.cpp', - 'EmptyBlobImpl.cpp', - 'File.cpp', - 'FileBlobImpl.cpp', - 'FileCreatorHelper.cpp', - 'FileList.cpp', - 'FileReader.cpp', - 'FileReaderSync.cpp', - 'MemoryBlobImpl.cpp', - 'MultipartBlobImpl.cpp', - 'MutableBlobStorage.cpp', - 'MutableBlobStreamListener.cpp', - 'StreamBlobImpl.cpp', - 'StringBlobImpl.cpp', - 'TemporaryFileBlobImpl.cpp', + "BaseBlobImpl.cpp", + "Blob.cpp", + "BlobImpl.cpp", + "BlobSet.cpp", + "EmptyBlobImpl.cpp", + "File.cpp", + "FileBlobImpl.cpp", + "FileCreatorHelper.cpp", + "FileList.cpp", + "FileReader.cpp", + "FileReaderSync.cpp", + "MemoryBlobImpl.cpp", + "MultipartBlobImpl.cpp", + "MutableBlobStorage.cpp", + "MutableBlobStreamListener.cpp", + "StreamBlobImpl.cpp", + "StringBlobImpl.cpp", + "TemporaryFileBlobImpl.cpp", ] LOCAL_INCLUDES += [ - '/dom/file/ipc', + "/dom/file/ipc", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell.ini"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/file/uri/moz.build b/dom/file/uri/moz.build index 8b63d0b2c828d9..50fc4044445650 100644 --- a/dom/file/uri/moz.build +++ b/dom/file/uri/moz.build @@ -8,29 +8,29 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: File") EXPORTS.mozilla.dom += [ - 'BlobURL.h', - 'BlobURLInputStream.h', - 'BlobURLProtocolHandler.h', - 'FontTableURIProtocolHandler.h', + "BlobURL.h", + "BlobURLInputStream.h", + "BlobURLProtocolHandler.h", + "FontTableURIProtocolHandler.h", ] UNIFIED_SOURCES += [ - 'BlobURL.cpp', - 'BlobURLChannel.cpp', - 'BlobURLInputStream.cpp', - 'BlobURLProtocolHandler.cpp', - 'FontTableURIProtocolHandler.cpp', + "BlobURL.cpp", + "BlobURLChannel.cpp", + "BlobURLInputStream.cpp", + "BlobURLProtocolHandler.cpp", + "FontTableURIProtocolHandler.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '/dom/file', - '/netwerk/base', + "/dom/file", + "/netwerk/base", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/filehandle/moz.build b/dom/filehandle/moz.build index bc637652ebbfb1..929163b3d39b8e 100644 --- a/dom/filehandle/moz.build +++ b/dom/filehandle/moz.build @@ -8,28 +8,28 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: File") EXPORTS.mozilla.dom.filehandle += [ - 'ActorsParent.h', - 'SerializationHelpers.h', + "ActorsParent.h", + "SerializationHelpers.h", ] EXPORTS.mozilla.dom += [ - 'FileHandleStorage.h', + "FileHandleStorage.h", ] UNIFIED_SOURCES += [ - 'ActorsParent.cpp', + "ActorsParent.cpp", ] IPDL_SOURCES += [ - 'PBackgroundFileHandle.ipdl', - 'PBackgroundFileRequest.ipdl', - 'PBackgroundMutableFile.ipdl', + "PBackgroundFileHandle.ipdl", + "PBackgroundFileRequest.ipdl", + "PBackgroundMutableFile.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '../base', + "../base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/filesystem/compat/moz.build b/dom/filesystem/compat/moz.build index fc3f9482f450a2..6757a73d9e6e16 100644 --- a/dom/filesystem/compat/moz.build +++ b/dom/filesystem/compat/moz.build @@ -4,27 +4,27 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] EXPORTS.mozilla.dom += [ - 'FileSystem.h', - 'FileSystemDirectoryEntry.h', - 'FileSystemDirectoryReader.h', - 'FileSystemEntry.h', - 'FileSystemFileEntry.h', + "FileSystem.h", + "FileSystemDirectoryEntry.h", + "FileSystemDirectoryReader.h", + "FileSystemEntry.h", + "FileSystemFileEntry.h", ] UNIFIED_SOURCES += [ - 'CallbackRunnables.cpp', - 'FileSystem.cpp', - 'FileSystemDirectoryEntry.cpp', - 'FileSystemDirectoryReader.cpp', - 'FileSystemEntry.cpp', - 'FileSystemFileEntry.cpp', - 'FileSystemRootDirectoryEntry.cpp', - 'FileSystemRootDirectoryReader.cpp', + "CallbackRunnables.cpp", + "FileSystem.cpp", + "FileSystemDirectoryEntry.cpp", + "FileSystemDirectoryReader.cpp", + "FileSystemEntry.cpp", + "FileSystemFileEntry.cpp", + "FileSystemRootDirectoryEntry.cpp", + "FileSystemRootDirectoryReader.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/dom/filesystem/compat/tests/moz.build b/dom/filesystem/compat/tests/moz.build index 3b13ba431aa97d..7c990fbc62babe 100644 --- a/dom/filesystem/compat/tests/moz.build +++ b/dom/filesystem/compat/tests/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['mochitest.ini'] +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/dom/filesystem/moz.build b/dom/filesystem/moz.build index aabcce257bafe1..8287f857770373 100644 --- a/dom/filesystem/moz.build +++ b/dom/filesystem/moz.build @@ -7,44 +7,44 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: File") -DIRS += ['compat'] +DIRS += ["compat"] -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] EXPORTS.mozilla.dom += [ - 'Directory.h', - 'FileSystemBase.h', - 'FileSystemRequestParent.h', - 'FileSystemSecurity.h', - 'FileSystemTaskBase.h', - 'FileSystemUtils.h', - 'GetFilesHelper.h', - 'OSFileSystem.h', + "Directory.h", + "FileSystemBase.h", + "FileSystemRequestParent.h", + "FileSystemSecurity.h", + "FileSystemTaskBase.h", + "FileSystemUtils.h", + "GetFilesHelper.h", + "OSFileSystem.h", ] UNIFIED_SOURCES += [ - 'Directory.cpp', - 'FileSystemBase.cpp', - 'FileSystemRequestParent.cpp', - 'FileSystemSecurity.cpp', - 'FileSystemTaskBase.cpp', - 'FileSystemUtils.cpp', - 'GetDirectoryListingTask.cpp', - 'GetFileOrDirectoryTask.cpp', - 'GetFilesHelper.cpp', - 'GetFilesTask.cpp', - 'OSFileSystem.cpp', + "Directory.cpp", + "FileSystemBase.cpp", + "FileSystemRequestParent.cpp", + "FileSystemSecurity.cpp", + "FileSystemTaskBase.cpp", + "FileSystemUtils.cpp", + "GetDirectoryListingTask.cpp", + "GetFileOrDirectoryTask.cpp", + "GetFilesHelper.cpp", + "GetFilesTask.cpp", + "OSFileSystem.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" IPDL_SOURCES += [ - 'PFileSystemParams.ipdlh', - 'PFileSystemRequest.ipdl', + "PFileSystemParams.ipdlh", + "PFileSystemRequest.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] diff --git a/dom/filesystem/tests/moz.build b/dom/filesystem/tests/moz.build index 3b13ba431aa97d..7c990fbc62babe 100644 --- a/dom/filesystem/tests/moz.build +++ b/dom/filesystem/tests/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['mochitest.ini'] +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/dom/flex/moz.build b/dom/flex/moz.build index 9445ee667719dd..802ac5b18cb90a 100644 --- a/dom/flex/moz.build +++ b/dom/flex/moz.build @@ -7,22 +7,22 @@ with Files("**"): BUG_COMPONENT = ("Core", "CSS Parsing and Computation") -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] EXPORTS.mozilla.dom += [ - 'Flex.h', - 'FlexItemValues.h', - 'FlexLineValues.h', + "Flex.h", + "FlexItemValues.h", + "FlexLineValues.h", ] UNIFIED_SOURCES += [ - 'Flex.cpp', - 'FlexItemValues.cpp', - 'FlexLineValues.cpp', + "Flex.cpp", + "FlexItemValues.cpp", + "FlexLineValues.cpp", ] LOCAL_INCLUDES += [ - '/layout/generic', + "/layout/generic", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/gamepad/moz.build b/dom/gamepad/moz.build index 04314f94e3fe53..927f688c27abcf 100644 --- a/dom/gamepad/moz.build +++ b/dom/gamepad/moz.build @@ -8,83 +8,73 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Device Interfaces") IPDL_SOURCES += [ - 'ipc/GamepadEventTypes.ipdlh', - 'ipc/PGamepadEventChannel.ipdl', - 'ipc/PGamepadTestChannel.ipdl' + "ipc/GamepadEventTypes.ipdlh", + "ipc/PGamepadEventChannel.ipdl", + "ipc/PGamepadTestChannel.ipdl", ] EXPORTS.mozilla.dom += [ - 'Gamepad.h', - 'GamepadButton.h', - 'GamepadHapticActuator.h', - 'GamepadLightIndicator.h', - 'GamepadManager.h', - 'GamepadMonitoring.h', - 'GamepadPlatformService.h', - 'GamepadPose.h', - 'GamepadPoseState.h', - 'GamepadRemapping.h', - 'GamepadServiceTest.h', - 'GamepadTouch.h', - 'GamepadTouchState.h', - 'ipc/GamepadEventChannelChild.h', - 'ipc/GamepadEventChannelParent.h', - 'ipc/GamepadMessageUtils.h', - 'ipc/GamepadServiceType.h', - 'ipc/GamepadTestChannelChild.h', - 'ipc/GamepadTestChannelParent.h' + "Gamepad.h", + "GamepadButton.h", + "GamepadHapticActuator.h", + "GamepadLightIndicator.h", + "GamepadManager.h", + "GamepadMonitoring.h", + "GamepadPlatformService.h", + "GamepadPose.h", + "GamepadPoseState.h", + "GamepadRemapping.h", + "GamepadServiceTest.h", + "GamepadTouch.h", + "GamepadTouchState.h", + "ipc/GamepadEventChannelChild.h", + "ipc/GamepadEventChannelParent.h", + "ipc/GamepadMessageUtils.h", + "ipc/GamepadServiceType.h", + "ipc/GamepadTestChannelChild.h", + "ipc/GamepadTestChannelParent.h", ] UNIFIED_SOURCES = [ - 'Gamepad.cpp', - 'GamepadButton.cpp', - 'GamepadHapticActuator.cpp', - 'GamepadLightIndicator.cpp', - 'GamepadManager.cpp', - 'GamepadPlatformService.cpp', - 'GamepadPose.cpp', - 'GamepadRemapping.cpp', - 'GamepadServiceTest.cpp', - 'GamepadTouch.cpp', - 'ipc/GamepadEventChannelChild.cpp', - 'ipc/GamepadEventChannelParent.cpp', - 'ipc/GamepadTestChannelChild.cpp', - 'ipc/GamepadTestChannelParent.cpp' + "Gamepad.cpp", + "GamepadButton.cpp", + "GamepadHapticActuator.cpp", + "GamepadLightIndicator.cpp", + "GamepadManager.cpp", + "GamepadPlatformService.cpp", + "GamepadPose.cpp", + "GamepadRemapping.cpp", + "GamepadServiceTest.cpp", + "GamepadTouch.cpp", + "ipc/GamepadEventChannelChild.cpp", + "ipc/GamepadEventChannelParent.cpp", + "ipc/GamepadTestChannelChild.cpp", + "ipc/GamepadTestChannelParent.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - UNIFIED_SOURCES += [ - 'cocoa/CocoaGamepad.cpp' - ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - UNIFIED_SOURCES += [ - 'windows/WindowsGamepad.cpp' - ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - UNIFIED_SOURCES += [ - 'android/AndroidGamepad.cpp' - ] -elif CONFIG['OS_ARCH'] == 'Linux': - UNIFIED_SOURCES += [ - 'linux/LinuxGamepad.cpp' - ] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + UNIFIED_SOURCES += ["cocoa/CocoaGamepad.cpp"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + UNIFIED_SOURCES += ["windows/WindowsGamepad.cpp"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + UNIFIED_SOURCES += ["android/AndroidGamepad.cpp"] +elif CONFIG["OS_ARCH"] == "Linux": + UNIFIED_SOURCES += ["linux/LinuxGamepad.cpp"] else: - UNIFIED_SOURCES += [ - 'fallback/FallbackGamepad.cpp' - ] + UNIFIED_SOURCES += ["fallback/FallbackGamepad.cpp"] LOCAL_INCLUDES += [ - 'ipc', + "ipc", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -CFLAGS += CONFIG['GLIB_CFLAGS'] -CFLAGS += CONFIG['MOZ_DBUS_GLIB_CFLAGS'] -CXXFLAGS += CONFIG['GLIB_CFLAGS'] -CXXFLAGS += CONFIG['MOZ_DBUS_GLIB_CFLAGS'] +CFLAGS += CONFIG["GLIB_CFLAGS"] +CFLAGS += CONFIG["MOZ_DBUS_GLIB_CFLAGS"] +CXXFLAGS += CONFIG["GLIB_CFLAGS"] +CXXFLAGS += CONFIG["MOZ_DBUS_GLIB_CFLAGS"] diff --git a/dom/geolocation/moz.build b/dom/geolocation/moz.build index 3914dae497de0d..5c7f7471818cf2 100644 --- a/dom/geolocation/moz.build +++ b/dom/geolocation/moz.build @@ -8,50 +8,50 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Geolocation") EXPORTS += [ - 'nsGeoPositionIPCSerialiser.h', + "nsGeoPositionIPCSerialiser.h", ] EXPORTS.mozilla.dom += [ - 'Geolocation.h', - 'GeolocationCoordinates.h', - 'GeolocationPosition.h', - 'GeolocationPositionError.h', + "Geolocation.h", + "GeolocationCoordinates.h", + "GeolocationPosition.h", + "GeolocationPositionError.h", ] SOURCES += [ - 'Geolocation.cpp', - 'GeolocationCoordinates.cpp', - 'GeolocationPosition.cpp', - 'GeolocationPositionError.cpp', + "Geolocation.cpp", + "GeolocationCoordinates.cpp", + "GeolocationPosition.cpp", + "GeolocationPositionError.cpp", ] UNIFIED_SOURCES += [ - 'MLSFallback.cpp', + "MLSFallback.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', - '/dom/ipc', + "/dom/base", + "/dom/ipc", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/dom/system/android', + "/dom/system/android", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/dom/system/mac', + "/dom/system/mac", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": LOCAL_INCLUDES += [ - '/dom/system/windows', + "/dom/system/windows", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - if CONFIG['MOZ_GPSD']: +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + if CONFIG["MOZ_GPSD"]: LOCAL_INCLUDES += [ - '/dom/system/linux', + "/dom/system/linux", ] - DEFINES['MOZ_GPSD'] = True + DEFINES["MOZ_GPSD"] = True diff --git a/dom/grid/moz.build b/dom/grid/moz.build index ce16cc44322bc9..64d54fd8c62816 100644 --- a/dom/grid/moz.build +++ b/dom/grid/moz.build @@ -7,30 +7,30 @@ with Files("**"): BUG_COMPONENT = ("Core", "CSS Parsing and Computation") -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] EXPORTS.mozilla.dom += [ - 'Grid.h', - 'GridArea.h', - 'GridDimension.h', - 'GridLine.h', - 'GridLines.h', - 'GridTrack.h', - 'GridTracks.h', + "Grid.h", + "GridArea.h", + "GridDimension.h", + "GridLine.h", + "GridLines.h", + "GridTrack.h", + "GridTracks.h", ] UNIFIED_SOURCES += [ - 'Grid.cpp', - 'GridArea.cpp', - 'GridDimension.cpp', - 'GridLine.cpp', - 'GridLines.cpp', - 'GridTrack.cpp', - 'GridTracks.cpp', + "Grid.cpp", + "GridArea.cpp", + "GridDimension.cpp", + "GridLine.cpp", + "GridLines.cpp", + "GridTrack.cpp", + "GridTracks.cpp", ] LOCAL_INCLUDES += [ - '/layout/generic', + "/layout/generic", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/html/input/moz.build b/dom/html/input/moz.build index 7c7e72f43e49fc..f71a29cb0cadcb 100644 --- a/dom/html/input/moz.build +++ b/dom/html/input/moz.build @@ -5,36 +5,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'ButtonInputTypes.h', - 'CheckableInputTypes.h', - 'ColorInputType.h', - 'DateTimeInputTypes.h', - 'FileInputType.h', - 'HiddenInputType.h', - 'InputType.h', - 'NumericInputTypes.h', - 'SingleLineTextInputTypes.h', + "ButtonInputTypes.h", + "CheckableInputTypes.h", + "ColorInputType.h", + "DateTimeInputTypes.h", + "FileInputType.h", + "HiddenInputType.h", + "InputType.h", + "NumericInputTypes.h", + "SingleLineTextInputTypes.h", ] UNIFIED_SOURCES += [ - 'CheckableInputTypes.cpp', - 'DateTimeInputTypes.cpp', - 'FileInputType.cpp', - 'InputType.cpp', - 'NumericInputTypes.cpp', - 'SingleLineTextInputTypes.cpp', + "CheckableInputTypes.cpp", + "DateTimeInputTypes.cpp", + "FileInputType.cpp", + "InputType.cpp", + "NumericInputTypes.cpp", + "SingleLineTextInputTypes.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/base', - '/dom/html', - '/layout/forms', + "/dom/base", + "/dom/html", + "/layout/forms", ] -FINAL_LIBRARY = 'xul' - -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +FINAL_LIBRARY = "xul" +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/html/moz.build b/dom/html/moz.build index b0977f9af31a11..32da1462414da8 100644 --- a/dom/html/moz.build +++ b/dom/html/moz.build @@ -7,258 +7,258 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") -DIRS += ['input'] +DIRS += ["input"] MOCHITEST_MANIFESTS += [ - 'test/dialog/mochitest.ini', - 'test/forms/mochitest.ini', - 'test/mochitest.ini', + "test/dialog/mochitest.ini", + "test/forms/mochitest.ini", + "test/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'test/chrome.ini', - 'test/forms/chrome.ini', + "test/chrome.ini", + "test/forms/chrome.ini", ] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] XPIDL_SOURCES += [ - 'nsIFormSubmitObserver.idl', - 'nsIMenuBuilder.idl', + "nsIFormSubmitObserver.idl", + "nsIMenuBuilder.idl", ] -XPIDL_MODULE = 'content_html' +XPIDL_MODULE = "content_html" EXPORTS += [ - 'nsGenericHTMLElement.h', - 'nsGenericHTMLFrameElement.h', - 'nsHTMLDNSPrefetch.h', - 'nsHTMLDocument.h', - 'nsIConstraintValidation.h', - 'nsIForm.h', - 'nsIFormControl.h', - 'nsIHTMLCollection.h', - 'nsIRadioGroupContainer.h', - 'nsIRadioVisitor.h', + "nsGenericHTMLElement.h", + "nsGenericHTMLFrameElement.h", + "nsHTMLDNSPrefetch.h", + "nsHTMLDocument.h", + "nsIConstraintValidation.h", + "nsIForm.h", + "nsIFormControl.h", + "nsIHTMLCollection.h", + "nsIRadioGroupContainer.h", + "nsIRadioVisitor.h", ] EXPORTS.mozilla += [ - 'TextControlElement.h', - 'TextControlState.h', - 'TextInputListener.h', + "TextControlElement.h", + "TextControlState.h", + "TextInputListener.h", ] EXPORTS.mozilla.dom += [ - 'ElementInternals.h', - 'HTMLAllCollection.h', - 'HTMLAnchorElement.h', - 'HTMLAreaElement.h', - 'HTMLAudioElement.h', - 'HTMLBodyElement.h', - 'HTMLBRElement.h', - 'HTMLButtonElement.h', - 'HTMLCanvasElement.h', - 'HTMLDataElement.h', - 'HTMLDataListElement.h', - 'HTMLDetailsElement.h', - 'HTMLDialogElement.h', - 'HTMLDivElement.h', - 'HTMLEmbedElement.h', - 'HTMLFieldSetElement.h', - 'HTMLFontElement.h', - 'HTMLFormControlsCollection.h', - 'HTMLFormElement.h', - 'HTMLFormSubmission.h', - 'HTMLFrameElement.h', - 'HTMLFrameSetElement.h', - 'HTMLHeadingElement.h', - 'HTMLHRElement.h', - 'HTMLIFrameElement.h', - 'HTMLImageElement.h', - 'HTMLInputElement.h', - 'HTMLLabelElement.h', - 'HTMLLegendElement.h', - 'HTMLLIElement.h', - 'HTMLLinkElement.h', - 'HTMLMapElement.h', - 'HTMLMarqueeElement.h', - 'HTMLMediaElement.h', - 'HTMLMenuElement.h', - 'HTMLMenuItemElement.h', - 'HTMLMetaElement.h', - 'HTMLMeterElement.h', - 'HTMLModElement.h', - 'HTMLObjectElement.h', - 'HTMLOptGroupElement.h', - 'HTMLOptionElement.h', - 'HTMLOptionsCollection.h', - 'HTMLOutputElement.h', - 'HTMLParagraphElement.h', - 'HTMLPictureElement.h', - 'HTMLPreElement.h', - 'HTMLProgressElement.h', - 'HTMLScriptElement.h', - 'HTMLSelectElement.h', - 'HTMLSharedElement.h', - 'HTMLSharedListElement.h', - 'HTMLSlotElement.h', - 'HTMLSourceElement.h', - 'HTMLSpanElement.h', - 'HTMLStyleElement.h', - 'HTMLSummaryElement.h', - 'HTMLTableCaptionElement.h', - 'HTMLTableCellElement.h', - 'HTMLTableColElement.h', - 'HTMLTableElement.h', - 'HTMLTableRowElement.h', - 'HTMLTableSectionElement.h', - 'HTMLTemplateElement.h', - 'HTMLTextAreaElement.h', - 'HTMLTimeElement.h', - 'HTMLTitleElement.h', - 'HTMLTrackElement.h', - 'HTMLUnknownElement.h', - 'HTMLVideoElement.h', - 'ImageDocument.h', - 'MediaError.h', - 'nsBrowserElement.h', - 'PlayPromise.h', - 'RadioNodeList.h', - 'TextTrackManager.h', - 'TimeRanges.h', - 'ValidityState.h', + "ElementInternals.h", + "HTMLAllCollection.h", + "HTMLAnchorElement.h", + "HTMLAreaElement.h", + "HTMLAudioElement.h", + "HTMLBodyElement.h", + "HTMLBRElement.h", + "HTMLButtonElement.h", + "HTMLCanvasElement.h", + "HTMLDataElement.h", + "HTMLDataListElement.h", + "HTMLDetailsElement.h", + "HTMLDialogElement.h", + "HTMLDivElement.h", + "HTMLEmbedElement.h", + "HTMLFieldSetElement.h", + "HTMLFontElement.h", + "HTMLFormControlsCollection.h", + "HTMLFormElement.h", + "HTMLFormSubmission.h", + "HTMLFrameElement.h", + "HTMLFrameSetElement.h", + "HTMLHeadingElement.h", + "HTMLHRElement.h", + "HTMLIFrameElement.h", + "HTMLImageElement.h", + "HTMLInputElement.h", + "HTMLLabelElement.h", + "HTMLLegendElement.h", + "HTMLLIElement.h", + "HTMLLinkElement.h", + "HTMLMapElement.h", + "HTMLMarqueeElement.h", + "HTMLMediaElement.h", + "HTMLMenuElement.h", + "HTMLMenuItemElement.h", + "HTMLMetaElement.h", + "HTMLMeterElement.h", + "HTMLModElement.h", + "HTMLObjectElement.h", + "HTMLOptGroupElement.h", + "HTMLOptionElement.h", + "HTMLOptionsCollection.h", + "HTMLOutputElement.h", + "HTMLParagraphElement.h", + "HTMLPictureElement.h", + "HTMLPreElement.h", + "HTMLProgressElement.h", + "HTMLScriptElement.h", + "HTMLSelectElement.h", + "HTMLSharedElement.h", + "HTMLSharedListElement.h", + "HTMLSlotElement.h", + "HTMLSourceElement.h", + "HTMLSpanElement.h", + "HTMLStyleElement.h", + "HTMLSummaryElement.h", + "HTMLTableCaptionElement.h", + "HTMLTableCellElement.h", + "HTMLTableColElement.h", + "HTMLTableElement.h", + "HTMLTableRowElement.h", + "HTMLTableSectionElement.h", + "HTMLTemplateElement.h", + "HTMLTextAreaElement.h", + "HTMLTimeElement.h", + "HTMLTitleElement.h", + "HTMLTrackElement.h", + "HTMLUnknownElement.h", + "HTMLVideoElement.h", + "ImageDocument.h", + "MediaError.h", + "nsBrowserElement.h", + "PlayPromise.h", + "RadioNodeList.h", + "TextTrackManager.h", + "TimeRanges.h", + "ValidityState.h", ] UNIFIED_SOURCES += [ - 'ElementInternals.cpp', - 'HTMLAllCollection.cpp', - 'HTMLAnchorElement.cpp', - 'HTMLAreaElement.cpp', - 'HTMLAudioElement.cpp', - 'HTMLBodyElement.cpp', - 'HTMLBRElement.cpp', - 'HTMLButtonElement.cpp', - 'HTMLCanvasElement.cpp', - 'HTMLDataElement.cpp', - 'HTMLDataListElement.cpp', - 'HTMLDetailsElement.cpp', - 'HTMLDialogElement.cpp', - 'HTMLDivElement.cpp', - 'HTMLElement.cpp', - 'HTMLEmbedElement.cpp', - 'HTMLFieldSetElement.cpp', - 'HTMLFontElement.cpp', - 'HTMLFormControlsCollection.cpp', - 'HTMLFormElement.cpp', - 'HTMLFormSubmission.cpp', - 'HTMLFrameElement.cpp', - 'HTMLFrameSetElement.cpp', - 'HTMLHeadingElement.cpp', - 'HTMLHRElement.cpp', - 'HTMLIFrameElement.cpp', - 'HTMLImageElement.cpp', - 'HTMLInputElement.cpp', - 'HTMLLabelElement.cpp', - 'HTMLLegendElement.cpp', - 'HTMLLIElement.cpp', - 'HTMLLinkElement.cpp', - 'HTMLMapElement.cpp', - 'HTMLMarqueeElement.cpp', - 'HTMLMediaElement.cpp', - 'HTMLMenuElement.cpp', - 'HTMLMenuItemElement.cpp', - 'HTMLMetaElement.cpp', - 'HTMLMeterElement.cpp', - 'HTMLModElement.cpp', - 'HTMLObjectElement.cpp', - 'HTMLOptGroupElement.cpp', - 'HTMLOptionElement.cpp', - 'HTMLOptionsCollection.cpp', - 'HTMLOutputElement.cpp', - 'HTMLParagraphElement.cpp', - 'HTMLPictureElement.cpp', - 'HTMLPreElement.cpp', - 'HTMLProgressElement.cpp', - 'HTMLScriptElement.cpp', - 'HTMLSelectElement.cpp', - 'HTMLSharedElement.cpp', - 'HTMLSharedListElement.cpp', - 'HTMLSlotElement.cpp', - 'HTMLSourceElement.cpp', - 'HTMLSpanElement.cpp', - 'HTMLStyleElement.cpp', - 'HTMLSummaryElement.cpp', - 'HTMLTableCaptionElement.cpp', - 'HTMLTableCellElement.cpp', - 'HTMLTableColElement.cpp', - 'HTMLTableElement.cpp', - 'HTMLTableRowElement.cpp', - 'HTMLTableSectionElement.cpp', - 'HTMLTemplateElement.cpp', - 'HTMLTextAreaElement.cpp', - 'HTMLTimeElement.cpp', - 'HTMLTitleElement.cpp', - 'HTMLTrackElement.cpp', - 'HTMLUnknownElement.cpp', - 'HTMLVideoElement.cpp', - 'ImageDocument.cpp', - 'MediaDocument.cpp', - 'MediaError.cpp', - 'nsBrowserElement.cpp', - 'nsDOMStringMap.cpp', - 'nsGenericHTMLElement.cpp', - 'nsGenericHTMLFrameElement.cpp', - 'nsHTMLContentSink.cpp', - 'nsHTMLDNSPrefetch.cpp', - 'nsHTMLDocument.cpp', - 'nsIConstraintValidation.cpp', - 'nsRadioVisitor.cpp', - 'PlayPromise.cpp', - 'RadioNodeList.cpp', - 'TextControlState.cpp', - 'TextTrackManager.cpp', - 'TimeRanges.cpp', - 'ValidityState.cpp', - 'VideoDocument.cpp', + "ElementInternals.cpp", + "HTMLAllCollection.cpp", + "HTMLAnchorElement.cpp", + "HTMLAreaElement.cpp", + "HTMLAudioElement.cpp", + "HTMLBodyElement.cpp", + "HTMLBRElement.cpp", + "HTMLButtonElement.cpp", + "HTMLCanvasElement.cpp", + "HTMLDataElement.cpp", + "HTMLDataListElement.cpp", + "HTMLDetailsElement.cpp", + "HTMLDialogElement.cpp", + "HTMLDivElement.cpp", + "HTMLElement.cpp", + "HTMLEmbedElement.cpp", + "HTMLFieldSetElement.cpp", + "HTMLFontElement.cpp", + "HTMLFormControlsCollection.cpp", + "HTMLFormElement.cpp", + "HTMLFormSubmission.cpp", + "HTMLFrameElement.cpp", + "HTMLFrameSetElement.cpp", + "HTMLHeadingElement.cpp", + "HTMLHRElement.cpp", + "HTMLIFrameElement.cpp", + "HTMLImageElement.cpp", + "HTMLInputElement.cpp", + "HTMLLabelElement.cpp", + "HTMLLegendElement.cpp", + "HTMLLIElement.cpp", + "HTMLLinkElement.cpp", + "HTMLMapElement.cpp", + "HTMLMarqueeElement.cpp", + "HTMLMediaElement.cpp", + "HTMLMenuElement.cpp", + "HTMLMenuItemElement.cpp", + "HTMLMetaElement.cpp", + "HTMLMeterElement.cpp", + "HTMLModElement.cpp", + "HTMLObjectElement.cpp", + "HTMLOptGroupElement.cpp", + "HTMLOptionElement.cpp", + "HTMLOptionsCollection.cpp", + "HTMLOutputElement.cpp", + "HTMLParagraphElement.cpp", + "HTMLPictureElement.cpp", + "HTMLPreElement.cpp", + "HTMLProgressElement.cpp", + "HTMLScriptElement.cpp", + "HTMLSelectElement.cpp", + "HTMLSharedElement.cpp", + "HTMLSharedListElement.cpp", + "HTMLSlotElement.cpp", + "HTMLSourceElement.cpp", + "HTMLSpanElement.cpp", + "HTMLStyleElement.cpp", + "HTMLSummaryElement.cpp", + "HTMLTableCaptionElement.cpp", + "HTMLTableCellElement.cpp", + "HTMLTableColElement.cpp", + "HTMLTableElement.cpp", + "HTMLTableRowElement.cpp", + "HTMLTableSectionElement.cpp", + "HTMLTemplateElement.cpp", + "HTMLTextAreaElement.cpp", + "HTMLTimeElement.cpp", + "HTMLTitleElement.cpp", + "HTMLTrackElement.cpp", + "HTMLUnknownElement.cpp", + "HTMLVideoElement.cpp", + "ImageDocument.cpp", + "MediaDocument.cpp", + "MediaError.cpp", + "nsBrowserElement.cpp", + "nsDOMStringMap.cpp", + "nsGenericHTMLElement.cpp", + "nsGenericHTMLFrameElement.cpp", + "nsHTMLContentSink.cpp", + "nsHTMLDNSPrefetch.cpp", + "nsHTMLDocument.cpp", + "nsIConstraintValidation.cpp", + "nsRadioVisitor.cpp", + "PlayPromise.cpp", + "RadioNodeList.cpp", + "TextControlState.cpp", + "TextTrackManager.cpp", + "TimeRanges.cpp", + "ValidityState.cpp", + "VideoDocument.cpp", ] SOURCES += [ # Includes npapi.h. - 'PluginDocument.cpp', + "PluginDocument.cpp", ] EXTRA_JS_MODULES += [ - 'HTMLMenuBuilder.jsm', + "HTMLMenuBuilder.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/caps', - '/docshell/base', - '/dom/base', - '/dom/canvas', - '/dom/html/input', - '/dom/media', - '/dom/security', - '/dom/xul', - '/image', - '/layout/forms', - '/layout/generic', - '/layout/style', - '/layout/tables', - '/layout/xul', - '/netwerk/base', + "/caps", + "/docshell/base", + "/dom/base", + "/dom/canvas", + "/dom/html/input", + "/dom/media", + "/dom/security", + "/dom/xul", + "/image", + "/layout/forms", + "/layout/generic", + "/layout/style", + "/layout/tables", + "/layout/xul", + "/netwerk/base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['MOZ_ANDROID_HLS_SUPPORT']: - DEFINES['MOZ_ANDROID_HLS_SUPPORT'] = True +if CONFIG["MOZ_ANDROID_HLS_SUPPORT"]: + DEFINES["MOZ_ANDROID_HLS_SUPPORT"] = True -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/indexedDB/moz.build b/dom/indexedDB/moz.build index eb84dbd9b12199..8a1a5fe12c9e3f 100644 --- a/dom/indexedDB/moz.build +++ b/dom/indexedDB/moz.build @@ -8,121 +8,121 @@ with Files("**"): BUG_COMPONENT = ("Core", "Storage: IndexedDB") MOCHITEST_MANIFESTS += [ - 'test/mochitest-intl-api.ini', - 'test/mochitest.ini', + "test/mochitest-intl-api.ini", + "test/mochitest.ini", ] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] XPCSHELL_TESTS_MANIFESTS += [ - 'test/unit/xpcshell-child-process.ini', - 'test/unit/xpcshell-parent-process.ini' + "test/unit/xpcshell-child-process.ini", + "test/unit/xpcshell-parent-process.ini", ] -TEST_DIRS += ['test/gtest'] +TEST_DIRS += ["test/gtest"] EXPORTS.mozilla.dom += [ - 'FileInfoFwd.h', - 'FlippedOnce.h', - 'IDBCursor.h', - 'IDBCursorType.h', - 'IDBDatabase.h', - 'IDBEvents.h', - 'IDBFactory.h', - 'IDBFileHandle.h', - 'IDBFileRequest.h', - 'IDBIndex.h', - 'IDBKeyRange.h', - 'IDBMutableFile.h', - 'IDBObjectStore.h', - 'IDBRequest.h', - 'IDBTransaction.h', - 'IndexedDatabase.h', - 'IndexedDatabaseManager.h', - 'SafeRefPtr.h', + "FileInfoFwd.h", + "FlippedOnce.h", + "IDBCursor.h", + "IDBCursorType.h", + "IDBDatabase.h", + "IDBEvents.h", + "IDBFactory.h", + "IDBFileHandle.h", + "IDBFileRequest.h", + "IDBIndex.h", + "IDBKeyRange.h", + "IDBMutableFile.h", + "IDBObjectStore.h", + "IDBRequest.h", + "IDBTransaction.h", + "IndexedDatabase.h", + "IndexedDatabaseManager.h", + "SafeRefPtr.h", ] EXPORTS.mozilla.dom.indexedDB += [ - 'ActorsChild.h', - 'ActorsParent.h', - 'IDBResult.h', - 'Key.h', - 'KeyPath.h', - 'PermissionRequestBase.h', - 'SerializationHelpers.h', - 'ThreadLocal.h', + "ActorsChild.h", + "ActorsParent.h", + "IDBResult.h", + "Key.h", + "KeyPath.h", + "PermissionRequestBase.h", + "SerializationHelpers.h", + "ThreadLocal.h", ] UNIFIED_SOURCES += [ - 'ActorsChild.cpp', - 'ActorsParentCommon.cpp', - 'DBSchema.cpp', - 'FileInfo.cpp', - 'IDBCursor.cpp', - 'IDBCursorType.cpp', - 'IDBDatabase.cpp', - 'IDBEvents.cpp', - 'IDBFactory.cpp', - 'IDBFileHandle.cpp', - 'IDBFileRequest.cpp', - 'IDBIndex.cpp', - 'IDBKeyRange.cpp', - 'IDBMutableFile.cpp', - 'IDBObjectStore.cpp', - 'IDBRequest.cpp', - 'IDBTransaction.cpp', - 'IndexedDatabase.cpp', - 'IndexedDatabaseManager.cpp', - 'IndexedDBCommon.cpp', - 'KeyPath.cpp', - 'PermissionRequestBase.cpp', - 'ProfilerHelpers.cpp', - 'ReportInternalError.cpp', - 'SchemaUpgrades.cpp', - 'ScriptErrorHelper.cpp', + "ActorsChild.cpp", + "ActorsParentCommon.cpp", + "DBSchema.cpp", + "FileInfo.cpp", + "IDBCursor.cpp", + "IDBCursorType.cpp", + "IDBDatabase.cpp", + "IDBEvents.cpp", + "IDBFactory.cpp", + "IDBFileHandle.cpp", + "IDBFileRequest.cpp", + "IDBIndex.cpp", + "IDBKeyRange.cpp", + "IDBMutableFile.cpp", + "IDBObjectStore.cpp", + "IDBRequest.cpp", + "IDBTransaction.cpp", + "IndexedDatabase.cpp", + "IndexedDatabaseManager.cpp", + "IndexedDBCommon.cpp", + "KeyPath.cpp", + "PermissionRequestBase.cpp", + "ProfilerHelpers.cpp", + "ReportInternalError.cpp", + "SchemaUpgrades.cpp", + "ScriptErrorHelper.cpp", ] SOURCES += [ - 'ActorsParent.cpp', # This file is huge. - 'Key.cpp', # We disable a warning on this file only + "ActorsParent.cpp", # This file is huge. + "Key.cpp", # We disable a warning on this file only ] IPDL_SOURCES += [ - 'PBackgroundIDBCursor.ipdl', - 'PBackgroundIDBDatabase.ipdl', - 'PBackgroundIDBDatabaseFile.ipdl', - 'PBackgroundIDBDatabaseRequest.ipdl', - 'PBackgroundIDBFactory.ipdl', - 'PBackgroundIDBFactoryRequest.ipdl', - 'PBackgroundIDBRequest.ipdl', - 'PBackgroundIDBSharedTypes.ipdlh', - 'PBackgroundIDBTransaction.ipdl', - 'PBackgroundIDBVersionChangeTransaction.ipdl', - 'PBackgroundIndexedDBUtils.ipdl', + "PBackgroundIDBCursor.ipdl", + "PBackgroundIDBDatabase.ipdl", + "PBackgroundIDBDatabaseFile.ipdl", + "PBackgroundIDBDatabaseRequest.ipdl", + "PBackgroundIDBFactory.ipdl", + "PBackgroundIDBFactoryRequest.ipdl", + "PBackgroundIDBRequest.ipdl", + "PBackgroundIDBSharedTypes.ipdlh", + "PBackgroundIDBTransaction.ipdl", + "PBackgroundIDBVersionChangeTransaction.ipdl", + "PBackgroundIndexedDBUtils.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): +if CONFIG["CC_TYPE"] in ("clang", "gcc"): # Suppress gcc warning about a comparison being always false due to the # range of the data type - SOURCES['Key.cpp'].flags += ['-Wno-error=type-limits'] - CXXFLAGS += ['-Wno-error=shadow'] + SOURCES["Key.cpp"].flags += ["-Wno-error=type-limits"] + CXXFLAGS += ["-Wno-error=shadow"] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/storage', - '/ipc/glue', - '/third_party/sqlite3/src', - '/xpcom/build', + "/dom/base", + "/dom/storage", + "/ipc/glue", + "/third_party/sqlite3/src", + "/xpcom/build", ] XPIDL_SOURCES += [ - 'nsIIDBPermissionsRequest.idl', + "nsIIDBPermissionsRequest.idl", ] -XPIDL_MODULE = 'dom_indexeddb' +XPIDL_MODULE = "dom_indexeddb" diff --git a/dom/indexedDB/test/gtest/moz.build b/dom/indexedDB/test/gtest/moz.build index f8fae9c62dd91c..c6d1569c2fcf26 100644 --- a/dom/indexedDB/test/gtest/moz.build +++ b/dom/indexedDB/test/gtest/moz.build @@ -3,21 +3,21 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES = [ - 'TestIDBResult.cpp', + "TestIDBResult.cpp", ] # not UNIFIED_SOURCES because TestKey.cpp has classes in an anonymous namespace # which result in a GCC error when used in tests, cf. gfx/tests/gtest/moz.build SOURCES = [ - 'TestFileInfo.cpp', - 'TestKey.cpp', - 'TestSafeRefPtr.cpp', + "TestFileInfo.cpp", + "TestKey.cpp", + "TestSafeRefPtr.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" LOCAL_INCLUDES += [ - '/dom/indexedDB', + "/dom/indexedDB", ] diff --git a/dom/interfaces/base/moz.build b/dom/interfaces/base/moz.build index d6f2f19591f8ff..3749363ea1eb89 100644 --- a/dom/interfaces/base/moz.build +++ b/dom/interfaces/base/moz.build @@ -8,27 +8,26 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'domstubs.idl', - 'nsIBrowser.idl', - 'nsIBrowserChild.idl', - 'nsIBrowserDOMWindow.idl', - 'nsIBrowserUsage.idl', - 'nsIContentPermissionPrompt.idl', - 'nsIContentPrefService2.idl', - 'nsIContentProcess.idl', - 'nsIDOMChromeWindow.idl', - 'nsIDOMGlobalPropertyInitializer.idl', - 'nsIDOMWindow.idl', - 'nsIDOMWindowUtils.idl', - 'nsIFocusManager.idl', - 'nsIPermissionDelegateHandler.idl', - 'nsIQueryContentEventResult.idl', - 'nsIRemoteTab.idl', - 'nsIServiceWorkerManager.idl', - 'nsIStructuredCloneContainer.idl', - 'nsITextInputProcessor.idl', - 'nsITextInputProcessorCallback.idl', + "domstubs.idl", + "nsIBrowser.idl", + "nsIBrowserChild.idl", + "nsIBrowserDOMWindow.idl", + "nsIBrowserUsage.idl", + "nsIContentPermissionPrompt.idl", + "nsIContentPrefService2.idl", + "nsIContentProcess.idl", + "nsIDOMChromeWindow.idl", + "nsIDOMGlobalPropertyInitializer.idl", + "nsIDOMWindow.idl", + "nsIDOMWindowUtils.idl", + "nsIFocusManager.idl", + "nsIPermissionDelegateHandler.idl", + "nsIQueryContentEventResult.idl", + "nsIRemoteTab.idl", + "nsIServiceWorkerManager.idl", + "nsIStructuredCloneContainer.idl", + "nsITextInputProcessor.idl", + "nsITextInputProcessorCallback.idl", ] -XPIDL_MODULE = 'dom_base' - +XPIDL_MODULE = "dom_base" diff --git a/dom/interfaces/events/moz.build b/dom/interfaces/events/moz.build index 82f75cebc94d46..8d0d35b064a811 100644 --- a/dom/interfaces/events/moz.build +++ b/dom/interfaces/events/moz.build @@ -8,8 +8,7 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Events") XPIDL_SOURCES += [ - 'nsIDOMEventListener.idl', + "nsIDOMEventListener.idl", ] -XPIDL_MODULE = 'dom_events' - +XPIDL_MODULE = "dom_events" diff --git a/dom/interfaces/geolocation/moz.build b/dom/interfaces/geolocation/moz.build index 6d2b3738fecddc..22917ef311db62 100644 --- a/dom/interfaces/geolocation/moz.build +++ b/dom/interfaces/geolocation/moz.build @@ -8,11 +8,10 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Geolocation") XPIDL_SOURCES += [ - 'nsIDOMGeoPosition.idl', - 'nsIDOMGeoPositionCallback.idl', - 'nsIDOMGeoPositionCoords.idl', - 'nsIDOMGeoPositionErrorCallback.idl', + "nsIDOMGeoPosition.idl", + "nsIDOMGeoPositionCallback.idl", + "nsIDOMGeoPositionCoords.idl", + "nsIDOMGeoPositionErrorCallback.idl", ] -XPIDL_MODULE = 'dom_geolocation' - +XPIDL_MODULE = "dom_geolocation" diff --git a/dom/interfaces/html/moz.build b/dom/interfaces/html/moz.build index 735ebf522f1d74..516bce30e203fa 100644 --- a/dom/interfaces/html/moz.build +++ b/dom/interfaces/html/moz.build @@ -8,9 +8,8 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIDOMMozBrowserFrame.idl', - 'nsIMozBrowserFrame.idl', + "nsIDOMMozBrowserFrame.idl", + "nsIMozBrowserFrame.idl", ] -XPIDL_MODULE = 'dom_html' - +XPIDL_MODULE = "dom_html" diff --git a/dom/interfaces/notification/moz.build b/dom/interfaces/notification/moz.build index 07e6d2074c398f..a4e9cfc9d493f2 100644 --- a/dom/interfaces/notification/moz.build +++ b/dom/interfaces/notification/moz.build @@ -8,8 +8,7 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Push Notifications") XPIDL_SOURCES += [ - 'nsINotificationStorage.idl', + "nsINotificationStorage.idl", ] -XPIDL_MODULE = 'dom_notification' - +XPIDL_MODULE = "dom_notification" diff --git a/dom/interfaces/payments/moz.build b/dom/interfaces/payments/moz.build index a05864dd95cf77..f7e1229a82c827 100644 --- a/dom/interfaces/payments/moz.build +++ b/dom/interfaces/payments/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsIPaymentActionResponse.idl', - 'nsIPaymentAddress.idl', - 'nsIPaymentRequest.idl', - 'nsIPaymentRequestService.idl', - 'nsIPaymentUIService.idl', + "nsIPaymentActionResponse.idl", + "nsIPaymentAddress.idl", + "nsIPaymentRequest.idl", + "nsIPaymentRequestService.idl", + "nsIPaymentUIService.idl", ] -XPIDL_MODULE = 'dom_payments' +XPIDL_MODULE = "dom_payments" diff --git a/dom/interfaces/push/moz.build b/dom/interfaces/push/moz.build index 4833a0d62765e4..b544cdf1955e33 100644 --- a/dom/interfaces/push/moz.build +++ b/dom/interfaces/push/moz.build @@ -8,9 +8,9 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Push Notifications") XPIDL_SOURCES += [ - 'nsIPushErrorReporter.idl', - 'nsIPushNotifier.idl', - 'nsIPushService.idl', + "nsIPushErrorReporter.idl", + "nsIPushNotifier.idl", + "nsIPushService.idl", ] -XPIDL_MODULE = 'dom_push' +XPIDL_MODULE = "dom_push" diff --git a/dom/interfaces/security/moz.build b/dom/interfaces/security/moz.build index 185e1b63305752..e58715b360fdf4 100644 --- a/dom/interfaces/security/moz.build +++ b/dom/interfaces/security/moz.build @@ -8,10 +8,9 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Security") XPIDL_SOURCES += [ - 'nsIContentSecurityManager.idl', - 'nsIContentSecurityPolicy.idl', - 'nsIReferrerInfo.idl' + "nsIContentSecurityManager.idl", + "nsIContentSecurityPolicy.idl", + "nsIReferrerInfo.idl", ] -XPIDL_MODULE = 'dom_security' - +XPIDL_MODULE = "dom_security" diff --git a/dom/interfaces/sidebar/moz.build b/dom/interfaces/sidebar/moz.build index ee9df75762295f..4bcfea139f5799 100644 --- a/dom/interfaces/sidebar/moz.build +++ b/dom/interfaces/sidebar/moz.build @@ -8,8 +8,7 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIWebProtocolHandlerRegistrar.idl', + "nsIWebProtocolHandlerRegistrar.idl", ] -XPIDL_MODULE = 'dom_sidebar' - +XPIDL_MODULE = "dom_sidebar" diff --git a/dom/interfaces/storage/moz.build b/dom/interfaces/storage/moz.build index 4178eef3de6dae..be637fadffd0f0 100644 --- a/dom/interfaces/storage/moz.build +++ b/dom/interfaces/storage/moz.build @@ -8,8 +8,8 @@ with Files("**"): BUG_COMPONENT = ("Core", "Storage: localStorage & sessionStorage") XPIDL_SOURCES += [ - 'nsIDOMStorageManager.idl', - 'nsIStorageActivityService.idl', + "nsIDOMStorageManager.idl", + "nsIStorageActivityService.idl", ] -XPIDL_MODULE = 'dom_storage' +XPIDL_MODULE = "dom_storage" diff --git a/dom/interfaces/xul/moz.build b/dom/interfaces/xul/moz.build index 25fbe4d0e353b3..5c7b5d3a9d2448 100644 --- a/dom/interfaces/xul/moz.build +++ b/dom/interfaces/xul/moz.build @@ -8,17 +8,16 @@ with Files("**"): BUG_COMPONENT = ("Core", "XUL") XPIDL_SOURCES += [ - 'nsIDOMXULButtonElement.idl', - 'nsIDOMXULCommandDispatcher.idl', - 'nsIDOMXULContainerElement.idl', - 'nsIDOMXULControlElement.idl', - 'nsIDOMXULMenuListElement.idl', - 'nsIDOMXULMultSelectCntrlEl.idl', - 'nsIDOMXULRadioGroupElement.idl', - 'nsIDOMXULRelatedElement.idl', - 'nsIDOMXULSelectCntrlEl.idl', - 'nsIDOMXULSelectCntrlItemEl.idl', + "nsIDOMXULButtonElement.idl", + "nsIDOMXULCommandDispatcher.idl", + "nsIDOMXULContainerElement.idl", + "nsIDOMXULControlElement.idl", + "nsIDOMXULMenuListElement.idl", + "nsIDOMXULMultSelectCntrlEl.idl", + "nsIDOMXULRadioGroupElement.idl", + "nsIDOMXULRelatedElement.idl", + "nsIDOMXULSelectCntrlEl.idl", + "nsIDOMXULSelectCntrlItemEl.idl", ] -XPIDL_MODULE = 'dom_xul' - +XPIDL_MODULE = "dom_xul" diff --git a/dom/ipc/fuzztest/moz.build b/dom/ipc/fuzztest/moz.build index 472a2679293bf9..831c9fd9a9c63d 100644 --- a/dom/ipc/fuzztest/moz.build +++ b/dom/ipc/fuzztest/moz.build @@ -4,19 +4,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingContentParentIPC') +Library("FuzzingContentParentIPC") LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -SOURCES += [ - 'content_parent_ipc_libfuzz.cpp' -] +SOURCES += ["content_parent_ipc_libfuzz.cpp"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/ipc/jsactor/moz.build b/dom/ipc/jsactor/moz.build index cd630feea1a93d..6bc66df923a8bf 100644 --- a/dom/ipc/jsactor/moz.build +++ b/dom/ipc/jsactor/moz.build @@ -5,38 +5,38 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'JSActor.h', - 'JSActorManager.h', - 'JSActorService.h', - 'JSProcessActorChild.h', - 'JSProcessActorParent.h', - 'JSProcessActorProtocol.h', - 'JSWindowActorChild.h', - 'JSWindowActorParent.h', - 'JSWindowActorProtocol.h', + "JSActor.h", + "JSActorManager.h", + "JSActorService.h", + "JSProcessActorChild.h", + "JSProcessActorParent.h", + "JSProcessActorProtocol.h", + "JSWindowActorChild.h", + "JSWindowActorParent.h", + "JSWindowActorProtocol.h", ] EXPORTS += [ - 'nsQueryActor.h', + "nsQueryActor.h", ] UNIFIED_SOURCES += [ - 'JSActor.cpp', - 'JSActorManager.cpp', - 'JSActorService.cpp', - 'JSProcessActorChild.cpp', - 'JSProcessActorParent.cpp', - 'JSProcessActorProtocol.cpp', - 'JSWindowActorChild.cpp', - 'JSWindowActorParent.cpp', - 'JSWindowActorProtocol.cpp', + "JSActor.cpp", + "JSActorManager.cpp", + "JSActorService.cpp", + "JSProcessActorChild.cpp", + "JSProcessActorParent.cpp", + "JSProcessActorProtocol.cpp", + "JSWindowActorChild.cpp", + "JSWindowActorParent.cpp", + "JSWindowActorProtocol.cpp", ] LOCAL_INCLUDES += [ - '/js/xpconnect/loader', - '/js/xpconnect/src', + "/js/xpconnect/loader", + "/js/xpconnect/src", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/ipc/moz.build b/dom/ipc/moz.build index 9a178c4f8783ca..57d289b658ee18 100644 --- a/dom/ipc/moz.build +++ b/dom/ipc/moz.build @@ -7,238 +7,236 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Content Processes") -DIRS += ['jsactor'] +DIRS += ["jsactor"] XPIDL_SOURCES += [ - 'nsIDOMProcessChild.idl', - 'nsIDOMProcessParent.idl', - 'nsIHangReport.idl', + "nsIDOMProcessChild.idl", + "nsIDOMProcessParent.idl", + "nsIHangReport.idl", ] -XPIDL_MODULE = 'dom' +XPIDL_MODULE = "dom" EXTRA_JS_MODULES += [ - 'ManifestMessagesChild.jsm', + "ManifestMessagesChild.jsm", ] EXPORTS.mozilla.dom.ipc += [ - 'IdType.h', - 'MemMapSnapshot.h', - 'SharedMap.h', - 'SharedMapChangeEvent.h', - 'SharedStringMap.h', - 'StringTable.h', - 'StructuredCloneData.h', + "IdType.h", + "MemMapSnapshot.h", + "SharedMap.h", + "SharedMapChangeEvent.h", + "SharedStringMap.h", + "StringTable.h", + "StructuredCloneData.h", ] EXPORTS.mozilla.dom += [ - 'BrowserBridgeChild.h', - 'BrowserBridgeHost.h', - 'BrowserBridgeParent.h', - 'BrowserChild.h', - 'BrowserHost.h', - 'BrowserParent.h', - 'ClonedErrorHolder.h', - 'CoalescedInputData.h', - 'CoalescedMouseData.h', - 'CoalescedWheelData.h', - 'ContentChild.h', - 'ContentParent.h', - 'ContentProcess.h', - 'ContentProcessManager.h', - 'CSPMessageUtils.h', - 'DocShellMessageUtils.h', - 'EffectsInfo.h', - 'FilePickerParent.h', - 'InProcessChild.h', - 'InProcessParent.h', - 'MaybeDiscarded.h', - 'MemoryReportRequest.h', - 'NativeThreadId.h', - 'PermissionMessageUtils.h', - 'ProcessActor.h', - 'PropertyBagUtils.h', - 'ReferrerInfoUtils.h', - 'RefMessageBodyService.h', - 'RemoteBrowser.h', - 'RemoteType.h', - 'RemoteWebProgress.h', - 'RemoteWebProgressRequest.h', - 'SharedMessageBody.h', - 'TabContext.h', - 'TabMessageTypes.h', - 'TabMessageUtils.h', - 'URLClassifierChild.h', - 'URLClassifierParent.h', - 'UserActivationIPCUtils.h', - 'WindowGlobalActor.h', - 'WindowGlobalChild.h', - 'WindowGlobalParent.h', + "BrowserBridgeChild.h", + "BrowserBridgeHost.h", + "BrowserBridgeParent.h", + "BrowserChild.h", + "BrowserHost.h", + "BrowserParent.h", + "ClonedErrorHolder.h", + "CoalescedInputData.h", + "CoalescedMouseData.h", + "CoalescedWheelData.h", + "ContentChild.h", + "ContentParent.h", + "ContentProcess.h", + "ContentProcessManager.h", + "CSPMessageUtils.h", + "DocShellMessageUtils.h", + "EffectsInfo.h", + "FilePickerParent.h", + "InProcessChild.h", + "InProcessParent.h", + "MaybeDiscarded.h", + "MemoryReportRequest.h", + "NativeThreadId.h", + "PermissionMessageUtils.h", + "ProcessActor.h", + "PropertyBagUtils.h", + "ReferrerInfoUtils.h", + "RefMessageBodyService.h", + "RemoteBrowser.h", + "RemoteType.h", + "RemoteWebProgress.h", + "RemoteWebProgressRequest.h", + "SharedMessageBody.h", + "TabContext.h", + "TabMessageTypes.h", + "TabMessageUtils.h", + "URLClassifierChild.h", + "URLClassifierParent.h", + "UserActivationIPCUtils.h", + "WindowGlobalActor.h", + "WindowGlobalChild.h", + "WindowGlobalParent.h", ] EXPORTS.mozilla += [ - 'PreallocatedProcessManager.h', - 'ProcessHangMonitor.h', - 'ProcessHangMonitorIPC.h', - 'ProcessPriorityManager.h', + "PreallocatedProcessManager.h", + "ProcessHangMonitor.h", + "ProcessHangMonitorIPC.h", + "ProcessPriorityManager.h", ] UNIFIED_SOURCES += [ - 'BrowserBridgeChild.cpp', - 'BrowserBridgeHost.cpp', - 'BrowserBridgeParent.cpp', - 'BrowserChild.cpp', - 'BrowserHost.cpp', - 'BrowserParent.cpp', - 'ClonedErrorHolder.cpp', - 'CoalescedMouseData.cpp', - 'CoalescedWheelData.cpp', - 'ColorPickerParent.cpp', - 'ContentParent.cpp', - 'ContentProcess.cpp', - 'ContentProcessManager.cpp', - 'CSPMessageUtils.cpp', - 'DocShellMessageUtils.cpp', - 'FilePickerParent.cpp', - 'InProcessImpl.cpp', - 'MemMapSnapshot.cpp', - 'MemoryReportRequest.cpp', - 'MMPrinter.cpp', - 'PermissionMessageUtils.cpp', - 'PreallocatedProcessManager.cpp', - 'ProcessActor.cpp', - 'ProcessPriorityManager.cpp', - 'PropertyBagUtils.cpp', - 'ReferrerInfoUtils.cpp', - 'RefMessageBodyService.cpp', - 'RemoteBrowser.cpp', - 'RemoteWebProgress.cpp', - 'RemoteWebProgressRequest.cpp', - 'SharedMap.cpp', - 'SharedMessageBody.cpp', - 'SharedStringMap.cpp', - 'StructuredCloneData.cpp', - 'TabContext.cpp', - 'TabMessageUtils.cpp', - 'URLClassifierParent.cpp', - 'WindowGlobalActor.cpp', - 'WindowGlobalChild.cpp', - 'WindowGlobalParent.cpp', + "BrowserBridgeChild.cpp", + "BrowserBridgeHost.cpp", + "BrowserBridgeParent.cpp", + "BrowserChild.cpp", + "BrowserHost.cpp", + "BrowserParent.cpp", + "ClonedErrorHolder.cpp", + "CoalescedMouseData.cpp", + "CoalescedWheelData.cpp", + "ColorPickerParent.cpp", + "ContentParent.cpp", + "ContentProcess.cpp", + "ContentProcessManager.cpp", + "CSPMessageUtils.cpp", + "DocShellMessageUtils.cpp", + "FilePickerParent.cpp", + "InProcessImpl.cpp", + "MemMapSnapshot.cpp", + "MemoryReportRequest.cpp", + "MMPrinter.cpp", + "PermissionMessageUtils.cpp", + "PreallocatedProcessManager.cpp", + "ProcessActor.cpp", + "ProcessPriorityManager.cpp", + "PropertyBagUtils.cpp", + "ReferrerInfoUtils.cpp", + "RefMessageBodyService.cpp", + "RemoteBrowser.cpp", + "RemoteWebProgress.cpp", + "RemoteWebProgressRequest.cpp", + "SharedMap.cpp", + "SharedMessageBody.cpp", + "SharedStringMap.cpp", + "StructuredCloneData.cpp", + "TabContext.cpp", + "TabMessageUtils.cpp", + "URLClassifierParent.cpp", + "WindowGlobalActor.cpp", + "WindowGlobalChild.cpp", + "WindowGlobalParent.cpp", ] # ContentChild.cpp cannot be compiled in unified mode on linux due to Time conflict SOURCES += [ - 'ContentChild.cpp', - 'ProcessHangMonitor.cpp', + "ContentChild.cpp", + "ProcessHangMonitor.cpp", ] PREPROCESSED_IPDL_SOURCES += [ - 'PBrowser.ipdl', - 'PBrowserBridge.ipdl', - 'PContent.ipdl', + "PBrowser.ipdl", + "PBrowserBridge.ipdl", + "PContent.ipdl", ] IPDL_SOURCES += [ - 'DOMTypes.ipdlh', - 'MemoryReportTypes.ipdlh', - 'PColorPicker.ipdl', - 'PContentPermission.ipdlh', - 'PContentPermissionRequest.ipdl', - 'PCycleCollectWithLogs.ipdl', - 'PFilePicker.ipdl', - 'PInProcess.ipdl', - 'PLoginReputation.ipdl', - 'PPluginWidget.ipdl', - 'PProcessHangMonitor.ipdl', - 'PrefsTypes.ipdlh', - 'PTabContext.ipdlh', - 'PURLClassifier.ipdl', - 'PURLClassifierInfo.ipdlh', - 'PURLClassifierLocal.ipdl', - 'PWindowGlobal.ipdl', - 'ServiceWorkerConfiguration.ipdlh', - 'WindowGlobalTypes.ipdlh', + "DOMTypes.ipdlh", + "MemoryReportTypes.ipdlh", + "PColorPicker.ipdl", + "PContentPermission.ipdlh", + "PContentPermissionRequest.ipdl", + "PCycleCollectWithLogs.ipdl", + "PFilePicker.ipdl", + "PInProcess.ipdl", + "PLoginReputation.ipdl", + "PPluginWidget.ipdl", + "PProcessHangMonitor.ipdl", + "PrefsTypes.ipdlh", + "PTabContext.ipdlh", + "PURLClassifier.ipdl", + "PURLClassifierInfo.ipdlh", + "PURLClassifierLocal.ipdl", + "PWindowGlobal.ipdl", + "ServiceWorkerConfiguration.ipdlh", + "WindowGlobalTypes.ipdlh", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['MOZ_SANDBOX'] and (CONFIG['OS_TARGET'] in ['Darwin', 'Linux']): +if CONFIG["MOZ_SANDBOX"] and (CONFIG["OS_TARGET"] in ["Darwin", "Linux"]): USE_LIBS += [ - 'mozsandbox', + "mozsandbox", ] LOCAL_INCLUDES += [ - '/caps', - '/chrome', - '/docshell/base', - '/dom/base', - '/dom/bindings', - '/dom/events', - '/dom/filesystem', - '/dom/geolocation', - '/dom/media/webrtc', - '/dom/media/webspeech/synth/ipc', - '/dom/security', - '/dom/storage', - '/extensions/spellcheck/src', - '/gfx/2d', - '/hal/sandbox', - '/js/xpconnect/loader', - '/js/xpconnect/src', - '/layout/base', - '/media/webrtc', - '/netwerk/base', - '/netwerk/protocol/http', - '/toolkit/components/printingui/ipc', - '/toolkit/crashreporter', - '/toolkit/xre', - '/uriloader/exthandler', - '/widget', - '/xpcom/base', - '/xpcom/threads', + "/caps", + "/chrome", + "/docshell/base", + "/dom/base", + "/dom/bindings", + "/dom/events", + "/dom/filesystem", + "/dom/geolocation", + "/dom/media/webrtc", + "/dom/media/webspeech/synth/ipc", + "/dom/security", + "/dom/storage", + "/extensions/spellcheck/src", + "/gfx/2d", + "/hal/sandbox", + "/js/xpconnect/loader", + "/js/xpconnect/src", + "/layout/base", + "/media/webrtc", + "/netwerk/base", + "/netwerk/protocol/http", + "/toolkit/components/printingui/ipc", + "/toolkit/crashreporter", + "/toolkit/xre", + "/uriloader/exthandler", + "/widget", + "/xpcom/base", + "/xpcom/threads", ] -if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["MOZ_SANDBOX"] and CONFIG["OS_ARCH"] == "WINNT": LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", ] -if CONFIG['OS_ARCH'] != 'WINNT': +if CONFIG["OS_ARCH"] != "WINNT": LOCAL_INCLUDES += [ - '/modules/libjar', + "/modules/libjar", ] -DEFINES['BIN_SUFFIX'] = '"%s"' % CONFIG['BIN_SUFFIX'] +DEFINES["BIN_SUFFIX"] = '"%s"' % CONFIG["BIN_SUFFIX"] -DEFINES['MOZ_APP_NAME'] = '"%s"' % CONFIG['MOZ_APP_NAME'] +DEFINES["MOZ_APP_NAME"] = '"%s"' % CONFIG["MOZ_APP_NAME"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - DEFINES['MOZ_ENABLE_FREETYPE'] = True +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + DEFINES["MOZ_ENABLE_FREETYPE"] = True -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] BROWSER_CHROME_MANIFESTS += [ - 'tests/browser.ini', - 'tests/JSProcessActor/browser.ini', - 'tests/JSWindowActor/browser.ini', + "tests/browser.ini", + "tests/JSProcessActor/browser.ini", + "tests/JSWindowActor/browser.ini", ] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell.ini"] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['FUZZING'] and CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'fuzztest' - ] +if CONFIG["FUZZING"] and CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["fuzztest"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/jsurl/moz.build b/dom/jsurl/moz.build index 7ec6523cbaf716..261b511e9f9f29 100644 --- a/dom/jsurl/moz.build +++ b/dom/jsurl/moz.build @@ -8,11 +8,11 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Navigation") EXPORTS += [ - 'nsJSProtocolHandler.h', + "nsJSProtocolHandler.h", ] UNIFIED_SOURCES += [ - 'nsJSProtocolHandler.cpp', + "nsJSProtocolHandler.cpp", ] LOCAL_INCLUDES += [ @@ -20,8 +20,8 @@ LOCAL_INCLUDES += [ "/netwerk/base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] diff --git a/dom/l10n/moz.build b/dom/l10n/moz.build index 6866a4fdb6f781..0beb3d668e8e22 100644 --- a/dom/l10n/moz.build +++ b/dom/l10n/moz.build @@ -8,28 +8,28 @@ with Files("**"): BUG_COMPONENT = ("Core", "Internationalization") EXPORTS.mozilla.dom += [ - 'DocumentL10n.h', - 'DOMLocalization.h', - 'L10nMutations.h', - 'L10nOverlays.h', + "DocumentL10n.h", + "DOMLocalization.h", + "L10nMutations.h", + "L10nOverlays.h", ] UNIFIED_SOURCES += [ - 'DocumentL10n.cpp', - 'DOMLocalization.cpp', - 'L10nMutations.cpp', - 'L10nOverlays.cpp', + "DocumentL10n.cpp", + "DOMLocalization.cpp", + "L10nMutations.cpp", + "L10nOverlays.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/mochitest/chrome.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/mochitest/browser.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/mochitest/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/mochitest/browser.ini"] -if CONFIG['ENABLE_TESTS']: - DIRS += ['tests/gtest'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["tests/gtest"] diff --git a/dom/l10n/tests/gtest/moz.build b/dom/l10n/tests/gtest/moz.build index d5d288db7950c0..0e1e2173a61605 100644 --- a/dom/l10n/tests/gtest/moz.build +++ b/dom/l10n/tests/gtest/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestL10nOverlays.cpp', + "TestL10nOverlays.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/locales/moz.build b/dom/locales/moz.build index 9379e7d5e76ced..5065f9b0e126c6 100644 --- a/dom/locales/moz.build +++ b/dom/locales/moz.build @@ -55,14 +55,14 @@ with Files("en-US/chrome/xslt/**"): with Files("en-US/chrome/plugins.properties"): BUG_COMPONENT = ("Core", "Plug-ins") -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] RESOURCE_FILES.locale.layout += [ - 'en-US/chrome/layout/HtmlForm.properties', - 'en-US/chrome/layout/MediaDocument.properties', - 'en-US/chrome/layout/xmlparser.properties', + "en-US/chrome/layout/HtmlForm.properties", + "en-US/chrome/layout/MediaDocument.properties", + "en-US/chrome/layout/xmlparser.properties", ] RESOURCE_FILES.locale.dom += [ - 'en-US/chrome/dom/dom.properties', + "en-US/chrome/dom/dom.properties", ] diff --git a/dom/localstorage/moz.build b/dom/localstorage/moz.build index 0c42e1d6367647..f86139ad117d19 100644 --- a/dom/localstorage/moz.build +++ b/dom/localstorage/moz.build @@ -7,74 +7,72 @@ with Files("**"): BUG_COMPONENT = ("Core", "Storage: localStorage & sessionStorage") -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -XPCSHELL_TESTS_MANIFESTS += [ - 'test/unit/xpcshell.ini' -] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] TEST_HARNESS_FILES.xpcshell.dom.localstorage.test.unit += [ - 'test/unit/databaseShadowing-shared.js', + "test/unit/databaseShadowing-shared.js", ] -TEST_DIRS += ['test/gtest'] +TEST_DIRS += ["test/gtest"] XPIDL_SOURCES += [ - 'nsILocalStorageManager.idl', + "nsILocalStorageManager.idl", ] -XPIDL_MODULE = 'dom_localstorage' +XPIDL_MODULE = "dom_localstorage" EXPORTS.mozilla.dom.localstorage += [ - 'ActorsParent.h', - 'SerializationHelpers.h', + "ActorsParent.h", + "SerializationHelpers.h", ] EXPORTS.mozilla.dom += [ - 'LocalStorageCommon.h', - 'LocalStorageManager2.h', - 'LSObject.h', - 'LSObserver.h', - 'LSSnapshot.h', - 'LSValue.h', - 'LSWriteOptimizer.h', - 'LSWriteOptimizerImpl.h', - 'SnappyUtils.h', + "LocalStorageCommon.h", + "LocalStorageManager2.h", + "LSObject.h", + "LSObserver.h", + "LSSnapshot.h", + "LSValue.h", + "LSWriteOptimizer.h", + "LSWriteOptimizerImpl.h", + "SnappyUtils.h", ] UNIFIED_SOURCES += [ - 'ActorsChild.cpp', - 'ActorsParent.cpp', - 'LocalStorageCommon.cpp', - 'LocalStorageManager2.cpp', - 'LSDatabase.cpp', - 'LSObject.cpp', - 'LSObserver.cpp', - 'LSSnapshot.cpp', - 'LSValue.cpp', - 'LSWriteOptimizer.cpp', - 'ReportInternalError.cpp', - 'SnappyUtils.cpp', + "ActorsChild.cpp", + "ActorsParent.cpp", + "LocalStorageCommon.cpp", + "LocalStorageManager2.cpp", + "LSDatabase.cpp", + "LSObject.cpp", + "LSObserver.cpp", + "LSSnapshot.cpp", + "LSValue.cpp", + "LSWriteOptimizer.cpp", + "ReportInternalError.cpp", + "SnappyUtils.cpp", ] IPDL_SOURCES += [ - 'PBackgroundLSDatabase.ipdl', - 'PBackgroundLSObserver.ipdl', - 'PBackgroundLSRequest.ipdl', - 'PBackgroundLSSharedTypes.ipdlh', - 'PBackgroundLSSimpleRequest.ipdl', - 'PBackgroundLSSnapshot.ipdl', + "PBackgroundLSDatabase.ipdl", + "PBackgroundLSObserver.ipdl", + "PBackgroundLSRequest.ipdl", + "PBackgroundLSSharedTypes.ipdlh", + "PBackgroundLSSimpleRequest.ipdl", + "PBackgroundLSSnapshot.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['GNU_CXX']: - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["GNU_CXX"]: + CXXFLAGS += ["-Wno-error=shadow"] LOCAL_INCLUDES += [ - '/dom/file/ipc', + "/dom/file/ipc", ] diff --git a/dom/localstorage/test/gtest/moz.build b/dom/localstorage/test/gtest/moz.build index 17d77e7de5a38a..f9d3c7bb0b2dc7 100644 --- a/dom/localstorage/test/gtest/moz.build +++ b/dom/localstorage/test/gtest/moz.build @@ -5,13 +5,13 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES = [ - 'TestLocalStorage.cpp', + "TestLocalStorage.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" LOCAL_INCLUDES += [ - '/dom/localstorage', + "/dom/localstorage", ] diff --git a/dom/manifest/moz.build b/dom/manifest/moz.build index f08125fdd2c6c9..edc133654dd1dc 100644 --- a/dom/manifest/moz.build +++ b/dom/manifest/moz.build @@ -8,14 +8,14 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") EXTRA_JS_MODULES += [ - 'ImageObjectProcessor.jsm', - 'Manifest.jsm', - 'ManifestFinder.jsm', - 'ManifestIcons.jsm', - 'ManifestObtainer.jsm', - 'ManifestProcessor.jsm', - 'ValueExtractor.jsm', + "ImageObjectProcessor.jsm", + "Manifest.jsm", + "ManifestFinder.jsm", + "ManifestIcons.jsm", + "ManifestObtainer.jsm", + "ManifestProcessor.jsm", + "ValueExtractor.jsm", ] -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] diff --git a/dom/mathml/moz.build b/dom/mathml/moz.build index bf0016bb9ad6af..619568d9a2150b 100644 --- a/dom/mathml/moz.build +++ b/dom/mathml/moz.build @@ -8,21 +8,19 @@ with Files("**"): BUG_COMPONENT = ("Core", "MathML") EXPORTS.mozilla.dom += [ - 'MathMLElement.h', + "MathMLElement.h", ] UNIFIED_SOURCES += [ - 'MathMLElement.cpp', - 'MathMLElementFactory.cpp', + "MathMLElement.cpp", + "MathMLElementFactory.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -MOCHITEST_MANIFESTS += [ - 'tests/mochitest/mochitest.ini' -] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] diff --git a/dom/media/bridge/moz.build b/dom/media/bridge/moz.build index c9dd69ef46e2e7..b5b8188dc1aab0 100644 --- a/dom/media/bridge/moz.build +++ b/dom/media/bridge/moz.build @@ -5,34 +5,34 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'IPeerConnection.idl', + "IPeerConnection.idl", ] -XPIDL_MODULE = 'peerconnection' +XPIDL_MODULE = "peerconnection" SOURCES += [ - 'MediaModule.cpp', + "MediaModule.cpp", ] LOCAL_INCLUDES += [ - '/dom/media/webrtc/', - '/dom/media/webrtc/common/time_profiling', - '/dom/media/webrtc/jsapi', - '/dom/media/webrtc/libwebrtcglue', - '/dom/media/webrtc/transport', - '/dom/media/webrtc/transportbridge', - '/ipc/chromium/src', - '/media/webrtc/', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "/dom/media/webrtc/", + "/dom/media/webrtc/common/time_profiling", + "/dom/media/webrtc/jsapi", + "/dom/media/webrtc/libwebrtcglue", + "/dom/media/webrtc/transport", + "/dom/media/webrtc/transportbridge", + "/ipc/chromium/src", + "/media/webrtc/", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/doctor/gtest/moz.build b/dom/media/doctor/gtest/moz.build index 359015143e460c..4e992670988570 100644 --- a/dom/media/doctor/gtest/moz.build +++ b/dom/media/doctor/gtest/moz.build @@ -4,19 +4,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'TestMultiWriterQueue.cpp', - 'TestRollingNumber.cpp', + "TestMultiWriterQueue.cpp", + "TestRollingNumber.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/media/doctor', + "/dom/media/doctor", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/doctor/moz.build b/dom/media/doctor/moz.build index d8a7422fe27b6d..effcea35a45d30 100644 --- a/dom/media/doctor/moz.build +++ b/dom/media/doctor/moz.build @@ -5,34 +5,34 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. TEST_DIRS += [ - 'gtest', + "gtest", ] # Needed for plugin IPC types required by nsPluginHost -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") EXPORTS += [ - 'DDLogCategory.h', - 'DDLoggedTypeTraits.h', - 'DDLogObject.h', - 'DDLogValue.h', - 'DecoderDoctorDiagnostics.h', - 'DecoderDoctorLogger.h', + "DDLogCategory.h", + "DDLoggedTypeTraits.h", + "DDLogObject.h", + "DDLogValue.h", + "DecoderDoctorDiagnostics.h", + "DecoderDoctorLogger.h", ] UNIFIED_SOURCES += [ - 'DDLifetime.cpp', - 'DDLifetimes.cpp', - 'DDLogCategory.cpp', - 'DDLogMessage.cpp', - 'DDLogObject.cpp', - 'DDLogUtils.cpp', - 'DDLogValue.cpp', - 'DDMediaLog.cpp', - 'DDMediaLogs.cpp', - 'DDTimeStamp.cpp', - 'DecoderDoctorDiagnostics.cpp', - 'DecoderDoctorLogger.cpp', + "DDLifetime.cpp", + "DDLifetimes.cpp", + "DDLogCategory.cpp", + "DDLogMessage.cpp", + "DDLogObject.cpp", + "DDLogUtils.cpp", + "DDLogValue.cpp", + "DDMediaLog.cpp", + "DDMediaLogs.cpp", + "DDTimeStamp.cpp", + "DecoderDoctorDiagnostics.cpp", + "DecoderDoctorLogger.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/eme/mediadrm/moz.build b/dom/media/eme/mediadrm/moz.build index c01f7a52e22891..c8f433f25f60fe 100644 --- a/dom/media/eme/mediadrm/moz.build +++ b/dom/media/eme/mediadrm/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla += [ - 'MediaDrmCDMCallbackProxy.h', - 'MediaDrmCDMProxy.h', - 'MediaDrmProxySupport.h', + "MediaDrmCDMCallbackProxy.h", + "MediaDrmCDMProxy.h", + "MediaDrmProxySupport.h", ] UNIFIED_SOURCES += [ - 'MediaDrmCDMCallbackProxy.cpp', - 'MediaDrmCDMProxy.cpp', - 'MediaDrmProxySupport.cpp', + "MediaDrmCDMCallbackProxy.cpp", + "MediaDrmCDMProxy.cpp", + "MediaDrmProxySupport.cpp", ] -FINAL_LIBRARY = 'xul' \ No newline at end of file +FINAL_LIBRARY = "xul" diff --git a/dom/media/eme/moz.build b/dom/media/eme/moz.build index a2e35deb4fb1e3..728e60bdd18979 100644 --- a/dom/media/eme/moz.build +++ b/dom/media/eme/moz.build @@ -5,44 +5,44 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'MediaEncryptedEvent.h', - 'MediaKeyError.h', - 'MediaKeyMessageEvent.h', - 'MediaKeys.h', - 'MediaKeySession.h', - 'MediaKeyStatusMap.h', - 'MediaKeySystemAccess.h', - 'MediaKeySystemAccessManager.h', - 'MediaKeySystemAccessPermissionRequest.h', + "MediaEncryptedEvent.h", + "MediaKeyError.h", + "MediaKeyMessageEvent.h", + "MediaKeys.h", + "MediaKeySession.h", + "MediaKeyStatusMap.h", + "MediaKeySystemAccess.h", + "MediaKeySystemAccessManager.h", + "MediaKeySystemAccessPermissionRequest.h", ] EXPORTS.mozilla += [ - 'CDMCaps.h', - 'CDMProxy.h', - 'DecryptorProxyCallback.h', - 'DetailedPromise.h', - 'EMEUtils.h', + "CDMCaps.h", + "CDMProxy.h", + "DecryptorProxyCallback.h", + "DetailedPromise.h", + "EMEUtils.h", ] UNIFIED_SOURCES += [ - 'CDMCaps.cpp', - 'DetailedPromise.cpp', - 'EMEUtils.cpp', - 'MediaEncryptedEvent.cpp', - 'MediaKeyError.cpp', - 'MediaKeyMessageEvent.cpp', - 'MediaKeys.cpp', - 'MediaKeySession.cpp', - 'MediaKeyStatusMap.cpp', - 'MediaKeySystemAccess.cpp', - 'MediaKeySystemAccessManager.cpp', - 'MediaKeySystemAccessPermissionRequest.cpp', + "CDMCaps.cpp", + "DetailedPromise.cpp", + "EMEUtils.cpp", + "MediaEncryptedEvent.cpp", + "MediaKeyError.cpp", + "MediaKeyMessageEvent.cpp", + "MediaKeys.cpp", + "MediaKeySession.cpp", + "MediaKeyStatusMap.cpp", + "MediaKeySystemAccess.cpp", + "MediaKeySystemAccessManager.cpp", + "MediaKeySystemAccessPermissionRequest.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - DIRS += ['mediadrm'] - LOCAL_INCLUDES += ['/dom/media/platforms/android'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + DIRS += ["mediadrm"] + LOCAL_INCLUDES += ["/dom/media/platforms/android"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/encoder/moz.build b/dom/media/encoder/moz.build index dd1730553fc585..c4a8cb61552893 100644 --- a/dom/media/encoder/moz.build +++ b/dom/media/encoder/moz.build @@ -4,45 +4,47 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('*'): - BUG_COMPONENT = ('Core', 'Audio/Video: Recording') +with Files("*"): + BUG_COMPONENT = ("Core", "Audio/Video: Recording") EXPORTS += [ - 'ContainerWriter.h', - 'EncodedFrame.h', - 'MediaEncoder.h', - 'OpusTrackEncoder.h', - 'TrackEncoder.h', - 'TrackMetadataBase.h', + "ContainerWriter.h", + "EncodedFrame.h", + "MediaEncoder.h", + "OpusTrackEncoder.h", + "TrackEncoder.h", + "TrackMetadataBase.h", ] UNIFIED_SOURCES += [ - 'MediaEncoder.cpp', - 'Muxer.cpp', - 'OpusTrackEncoder.cpp', - 'TrackEncoder.cpp', + "MediaEncoder.cpp", + "Muxer.cpp", + "OpusTrackEncoder.cpp", + "TrackEncoder.cpp", ] -if CONFIG['MOZ_WEBM_ENCODER']: - EXPORTS += ['VP8TrackEncoder.h', +if CONFIG["MOZ_WEBM_ENCODER"]: + EXPORTS += [ + "VP8TrackEncoder.h", ] - UNIFIED_SOURCES += ['VP8TrackEncoder.cpp', + UNIFIED_SOURCES += [ + "VP8TrackEncoder.cpp", ] - LOCAL_INCLUDES += ['/media/libyuv/libyuv/include'] + LOCAL_INCLUDES += ["/media/libyuv/libyuv/include"] -DEFINES['TRACING'] = True +DEFINES["TRACING"] = True -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/media', - '/ipc/chromium/src', + "/dom/media", + "/ipc/chromium/src", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Suppress some GCC warnings being treated as errors: # - about attributes on forward declarations for types that are already # defined, which complains about an important MOZ_EXPORT for android::AString -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=attributes'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=attributes"] diff --git a/dom/media/fake-cdm/moz.build b/dom/media/fake-cdm/moz.build index ef54e912958fad..00faf01c75e9b1 100644 --- a/dom/media/fake-cdm/moz.build +++ b/dom/media/fake-cdm/moz.build @@ -4,25 +4,25 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_TARGET = 'dist/bin/gmp-fake/1.0' +FINAL_TARGET = "dist/bin/gmp-fake/1.0" FINAL_TARGET_FILES += [ - 'manifest.json', + "manifest.json", ] SOURCES += [ - 'cdm-fake.cpp', - 'cdm-test-decryptor.cpp', - 'cdm-test-storage.cpp', + "cdm-fake.cpp", + "cdm-test-decryptor.cpp", + "cdm-test-storage.cpp", ] -DEFINES['CDM_IMPLEMENTATION'] = True +DEFINES["CDM_IMPLEMENTATION"] = True SharedLibrary("fake") -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": OS_LIBS += [ - 'ole32', + "ole32", ] USE_STATIC_LIBS = True diff --git a/dom/media/flac/moz.build b/dom/media/flac/moz.build index 7f1ec2bce57a11..35b73bee145bc2 100644 --- a/dom/media/flac/moz.build +++ b/dom/media/flac/moz.build @@ -5,23 +5,23 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'FlacDecoder.h', - 'FlacDemuxer.h', - 'FlacFrameParser.h', + "FlacDecoder.h", + "FlacDemuxer.h", + "FlacFrameParser.h", ] UNIFIED_SOURCES += [ - 'FlacDecoder.cpp', - 'FlacDemuxer.cpp', - 'FlacFrameParser.cpp', + "FlacDecoder.cpp", + "FlacDemuxer.cpp", + "FlacFrameParser.cpp", ] -CXXFLAGS += CONFIG['MOZ_LIBVPX_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_LIBVPX_CFLAGS"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/fuzz/moz.build b/dom/media/fuzz/moz.build index f6df831598b386..f672bbf93cf81c 100644 --- a/dom/media/fuzz/moz.build +++ b/dom/media/fuzz/moz.build @@ -4,26 +4,26 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingMedia') +Library("FuzzingMedia") SOURCES += [ - 'FuzzMedia.cpp', + "FuzzMedia.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/media', - '/dom/media/encoder', - '/dom/media/gmp', - '/dom/media/hls', - '/dom/media/mp4', - '/dom/media/ogg', - '/dom/media/platforms', - '/dom/media/platforms/agnostic', + "/dom/media", + "/dom/media/encoder", + "/dom/media/gmp", + "/dom/media/hls", + "/dom/media/mp4", + "/dom/media/ogg", + "/dom/media/platforms", + "/dom/media/platforms/agnostic", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/gmp-plugin-openh264/moz.build b/dom/media/gmp-plugin-openh264/moz.build index 32a3d5389933b8..69c512bccab05a 100644 --- a/dom/media/gmp-plugin-openh264/moz.build +++ b/dom/media/gmp-plugin-openh264/moz.build @@ -6,21 +6,21 @@ # largely a copy of dom/media/gmp-fake/moz.build -FINAL_TARGET = 'dist/bin/gmp-fakeopenh264/1.0' +FINAL_TARGET = "dist/bin/gmp-fakeopenh264/1.0" FINAL_TARGET_FILES += [ - 'fakeopenh264.info', + "fakeopenh264.info", ] SOURCES += [ - 'gmp-fake-openh264.cpp', + "gmp-fake-openh264.cpp", ] SharedLibrary("fakeopenh264") -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": OS_LIBS += [ - 'ole32', + "ole32", ] USE_STATIC_LIBS = True diff --git a/dom/media/gmp/moz.build b/dom/media/gmp/moz.build index 0741e9355d5b88..a492131a09c50c 100644 --- a/dom/media/gmp/moz.build +++ b/dom/media/gmp/moz.build @@ -4,149 +4,149 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPIDL_MODULE = 'content_geckomediaplugins' +XPIDL_MODULE = "content_geckomediaplugins" XPIDL_SOURCES += [ - 'mozIGeckoMediaPluginChromeService.idl', - 'mozIGeckoMediaPluginService.idl', + "mozIGeckoMediaPluginChromeService.idl", + "mozIGeckoMediaPluginService.idl", ] EXPORTS += [ - 'ChromiumCDMCallback.h', - 'ChromiumCDMParent.h', - 'ChromiumCDMProxy.h', - 'DecryptJob.h', - 'gmp-api/gmp-entrypoints.h', - 'gmp-api/gmp-errors.h', - 'gmp-api/gmp-platform.h', - 'gmp-api/gmp-sanitized-cdm-exports.h', - 'gmp-api/gmp-storage.h', - 'gmp-api/gmp-video-codec.h', - 'gmp-api/gmp-video-decode.h', - 'gmp-api/gmp-video-encode.h', - 'gmp-api/gmp-video-frame-encoded.h', - 'gmp-api/gmp-video-frame-i420.h', - 'gmp-api/gmp-video-frame.h', - 'gmp-api/gmp-video-host.h', - 'gmp-api/gmp-video-plane.h', - 'GMPCallbackBase.h', - 'GMPChild.h', - 'GMPContentChild.h', - 'GMPContentParent.h', - 'GMPCrashHelper.h', - 'GMPCrashHelperHolder.h', - 'GMPLoader.h', - 'GMPMessageUtils.h', - 'GMPParent.h', - 'GMPPlatform.h', - 'GMPProcessChild.h', - 'GMPProcessParent.h', - 'GMPService.h', - 'GMPServiceChild.h', - 'GMPServiceParent.h', - 'GMPSharedMemManager.h', - 'GMPStorage.h', - 'GMPStorageChild.h', - 'GMPStorageParent.h', - 'GMPTimerChild.h', - 'GMPTimerParent.h', - 'GMPUtils.h', - 'GMPVideoDecoderChild.h', - 'GMPVideoDecoderParent.h', - 'GMPVideoDecoderProxy.h', - 'GMPVideoEncodedFrameImpl.h', - 'GMPVideoEncoderChild.h', - 'GMPVideoEncoderParent.h', - 'GMPVideoEncoderProxy.h', - 'GMPVideoHost.h', - 'GMPVideoi420FrameImpl.h', - 'GMPVideoPlaneImpl.h', - 'widevine-adapter/content_decryption_module.h', - 'widevine-adapter/content_decryption_module_export.h', - 'widevine-adapter/content_decryption_module_ext.h', - 'widevine-adapter/content_decryption_module_proxy.h', + "ChromiumCDMCallback.h", + "ChromiumCDMParent.h", + "ChromiumCDMProxy.h", + "DecryptJob.h", + "gmp-api/gmp-entrypoints.h", + "gmp-api/gmp-errors.h", + "gmp-api/gmp-platform.h", + "gmp-api/gmp-sanitized-cdm-exports.h", + "gmp-api/gmp-storage.h", + "gmp-api/gmp-video-codec.h", + "gmp-api/gmp-video-decode.h", + "gmp-api/gmp-video-encode.h", + "gmp-api/gmp-video-frame-encoded.h", + "gmp-api/gmp-video-frame-i420.h", + "gmp-api/gmp-video-frame.h", + "gmp-api/gmp-video-host.h", + "gmp-api/gmp-video-plane.h", + "GMPCallbackBase.h", + "GMPChild.h", + "GMPContentChild.h", + "GMPContentParent.h", + "GMPCrashHelper.h", + "GMPCrashHelperHolder.h", + "GMPLoader.h", + "GMPMessageUtils.h", + "GMPParent.h", + "GMPPlatform.h", + "GMPProcessChild.h", + "GMPProcessParent.h", + "GMPService.h", + "GMPServiceChild.h", + "GMPServiceParent.h", + "GMPSharedMemManager.h", + "GMPStorage.h", + "GMPStorageChild.h", + "GMPStorageParent.h", + "GMPTimerChild.h", + "GMPTimerParent.h", + "GMPUtils.h", + "GMPVideoDecoderChild.h", + "GMPVideoDecoderParent.h", + "GMPVideoDecoderProxy.h", + "GMPVideoEncodedFrameImpl.h", + "GMPVideoEncoderChild.h", + "GMPVideoEncoderParent.h", + "GMPVideoEncoderProxy.h", + "GMPVideoHost.h", + "GMPVideoi420FrameImpl.h", + "GMPVideoPlaneImpl.h", + "widevine-adapter/content_decryption_module.h", + "widevine-adapter/content_decryption_module_export.h", + "widevine-adapter/content_decryption_module_ext.h", + "widevine-adapter/content_decryption_module_proxy.h", ] UNIFIED_SOURCES += [ - 'CDMStorageIdProvider.cpp', - 'ChromiumCDMAdapter.cpp', - 'ChromiumCDMCallbackProxy.cpp', - 'ChromiumCDMChild.cpp', - 'ChromiumCDMParent.cpp', - 'ChromiumCDMProxy.cpp', - 'DecryptJob.cpp', - 'GMPChild.cpp', - 'GMPContentChild.cpp', - 'GMPContentParent.cpp', - 'GMPCrashHelper.cpp', - 'GMPCrashHelperHolder.cpp', - 'GMPDiskStorage.cpp', - 'GMPLoader.cpp', - 'GMPMemoryStorage.cpp', - 'GMPParent.cpp', - 'GMPPlatform.cpp', - 'GMPProcessChild.cpp', - 'GMPProcessParent.cpp', - 'GMPService.cpp', - 'GMPServiceChild.cpp', - 'GMPServiceParent.cpp', - 'GMPSharedMemManager.cpp', - 'GMPStorageChild.cpp', - 'GMPStorageParent.cpp', - 'GMPTimerChild.cpp', - 'GMPTimerParent.cpp', - 'GMPUtils.cpp', - 'GMPVideoDecoderChild.cpp', - 'GMPVideoDecoderParent.cpp', - 'GMPVideoEncodedFrameImpl.cpp', - 'GMPVideoEncoderChild.cpp', - 'GMPVideoEncoderParent.cpp', - 'GMPVideoHost.cpp', - 'GMPVideoi420FrameImpl.cpp', - 'GMPVideoPlaneImpl.cpp' + "CDMStorageIdProvider.cpp", + "ChromiumCDMAdapter.cpp", + "ChromiumCDMCallbackProxy.cpp", + "ChromiumCDMChild.cpp", + "ChromiumCDMParent.cpp", + "ChromiumCDMProxy.cpp", + "DecryptJob.cpp", + "GMPChild.cpp", + "GMPContentChild.cpp", + "GMPContentParent.cpp", + "GMPCrashHelper.cpp", + "GMPCrashHelperHolder.cpp", + "GMPDiskStorage.cpp", + "GMPLoader.cpp", + "GMPMemoryStorage.cpp", + "GMPParent.cpp", + "GMPPlatform.cpp", + "GMPProcessChild.cpp", + "GMPProcessParent.cpp", + "GMPService.cpp", + "GMPServiceChild.cpp", + "GMPServiceParent.cpp", + "GMPSharedMemManager.cpp", + "GMPStorageChild.cpp", + "GMPStorageParent.cpp", + "GMPTimerChild.cpp", + "GMPTimerParent.cpp", + "GMPUtils.cpp", + "GMPVideoDecoderChild.cpp", + "GMPVideoDecoderParent.cpp", + "GMPVideoEncodedFrameImpl.cpp", + "GMPVideoEncoderChild.cpp", + "GMPVideoEncoderParent.cpp", + "GMPVideoHost.cpp", + "GMPVideoi420FrameImpl.cpp", + "GMPVideoPlaneImpl.cpp", ] DIRS += [ - 'rlz', - 'widevine-adapter', + "rlz", + "widevine-adapter", ] IPDL_SOURCES += [ - 'GMPTypes.ipdlh', - 'PChromiumCDM.ipdl', - 'PGMP.ipdl', - 'PGMPContent.ipdl', - 'PGMPService.ipdl', - 'PGMPStorage.ipdl', - 'PGMPTimer.ipdl', - 'PGMPVideoDecoder.ipdl', - 'PGMPVideoEncoder.ipdl', + "GMPTypes.ipdlh", + "PChromiumCDM.ipdl", + "PGMP.ipdl", + "PGMPContent.ipdl", + "PGMPService.ipdl", + "PGMPStorage.ipdl", + "PGMPTimer.ipdl", + "PGMPVideoDecoder.ipdl", + "PGMPVideoEncoder.ipdl", ] -if CONFIG['OS_TARGET'] in ['WINNT', 'Darwin']: - DEFINES['SUPPORT_STORAGE_ID'] = 1; +if CONFIG["OS_TARGET"] in ["WINNT", "Darwin"]: + DEFINES["SUPPORT_STORAGE_ID"] = 1 # comment this out to use Unsafe Shmem for more performance -DEFINES['GMP_SAFE_SHMEM'] = True +DEFINES["GMP_SAFE_SHMEM"] = True -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['MOZ_SANDBOX']: +if CONFIG["MOZ_SANDBOX"]: # For sandbox includes and the include dependencies those have LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # dom/media/webrtc/transport so we work with --disable-webrtc LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport', - '/xpcom/base', - '/xpcom/build', - '/xpcom/threads', + "/dom/media/webrtc/transport", + "/xpcom/base", + "/xpcom/build", + "/xpcom/threads", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/gmp/widevine-adapter/moz.build b/dom/media/gmp/widevine-adapter/moz.build index 90d78d5da9c83a..0ed0ff04aeffb3 100644 --- a/dom/media/gmp/widevine-adapter/moz.build +++ b/dom/media/gmp/widevine-adapter/moz.build @@ -5,24 +5,20 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'WidevineFileIO.cpp', - 'WidevineUtils.cpp', - 'WidevineVideoFrame.cpp', + "WidevineFileIO.cpp", + "WidevineUtils.cpp", + "WidevineVideoFrame.cpp", ] -EXPORTS += [ - 'WidevineFileIO.h', - 'WidevineUtils.h', - 'WidevineVideoFrame.h' -] +EXPORTS += ["WidevineFileIO.h", "WidevineUtils.h", "WidevineVideoFrame.h"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/media/gmp', + "/dom/media/gmp", ] -if CONFIG['CC_TYPE'] == 'clang': - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] == "clang": + CXXFLAGS += ["-Wno-error=shadow"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/dom/media/gtest/moz.build b/dom/media/gtest/moz.build index 53268713599f34..12fc70c8a0d867 100644 --- a/dom/media/gtest/moz.build +++ b/dom/media/gtest/moz.build @@ -4,125 +4,125 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") -DEFINES['ENABLE_SET_CUBEB_BACKEND'] = True +DEFINES["ENABLE_SET_CUBEB_BACKEND"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/common/', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "/dom/media/webrtc/common/", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] UNIFIED_SOURCES += [ - 'MockMediaResource.cpp', - 'TestAudioBuffers.cpp', - 'TestAudioCallbackDriver.cpp', - 'TestAudioCompactor.cpp', - 'TestAudioDriftCorrection.cpp', - 'TestAudioMixer.cpp', - 'TestAudioPacketizer.cpp', - 'TestAudioRingBuffer.cpp', - 'TestAudioSegment.cpp', - 'TestAudioTrackEncoder.cpp', - 'TestAudioTrackGraph.cpp', - 'TestBenchmarkStorage.cpp', - 'TestBitWriter.cpp', - 'TestBlankVideoDataCreator.cpp', - 'TestBufferReader.cpp', - 'TestDataMutex.cpp', - 'TestDecoderBenchmark.cpp', - 'TestDriftCompensation.cpp', - 'TestDynamicResampler.cpp', - 'TestGMPUtils.cpp', - 'TestGroupId.cpp', - 'TestIntervalSet.cpp', - 'TestKeyValueStorage.cpp', - 'TestMediaDataDecoder.cpp', - 'TestMediaDataEncoder.cpp', - 'TestMediaEventSource.cpp', - 'TestMediaMIMETypes.cpp', - 'TestMediaSpan.cpp', - 'TestMP3Demuxer.cpp', - 'TestMP4Demuxer.cpp', - 'TestMuxer.cpp', - 'TestOpusParser.cpp', - 'TestRust.cpp', - 'TestTimeUnit.cpp', - 'TestVideoSegment.cpp', - 'TestVideoUtils.cpp', - 'TestVPXDecoding.cpp', - 'TestWebMBuffered.cpp', + "MockMediaResource.cpp", + "TestAudioBuffers.cpp", + "TestAudioCallbackDriver.cpp", + "TestAudioCompactor.cpp", + "TestAudioDriftCorrection.cpp", + "TestAudioMixer.cpp", + "TestAudioPacketizer.cpp", + "TestAudioRingBuffer.cpp", + "TestAudioSegment.cpp", + "TestAudioTrackEncoder.cpp", + "TestAudioTrackGraph.cpp", + "TestBenchmarkStorage.cpp", + "TestBitWriter.cpp", + "TestBlankVideoDataCreator.cpp", + "TestBufferReader.cpp", + "TestDataMutex.cpp", + "TestDecoderBenchmark.cpp", + "TestDriftCompensation.cpp", + "TestDynamicResampler.cpp", + "TestGMPUtils.cpp", + "TestGroupId.cpp", + "TestIntervalSet.cpp", + "TestKeyValueStorage.cpp", + "TestMediaDataDecoder.cpp", + "TestMediaDataEncoder.cpp", + "TestMediaEventSource.cpp", + "TestMediaMIMETypes.cpp", + "TestMediaSpan.cpp", + "TestMP3Demuxer.cpp", + "TestMP4Demuxer.cpp", + "TestMuxer.cpp", + "TestOpusParser.cpp", + "TestRust.cpp", + "TestTimeUnit.cpp", + "TestVideoSegment.cpp", + "TestVideoUtils.cpp", + "TestVPXDecoding.cpp", + "TestWebMBuffered.cpp", ] -if CONFIG['MOZ_WEBM_ENCODER']: +if CONFIG["MOZ_WEBM_ENCODER"]: UNIFIED_SOURCES += [ - 'TestVideoTrackEncoder.cpp', - 'TestWebMWriter.cpp', - 'YUVBufferGenerator.cpp', + "TestVideoTrackEncoder.cpp", + "TestWebMWriter.cpp", + "YUVBufferGenerator.cpp", ] LOCAL_INCLUDES += [ - '/gfx/2d/', + "/gfx/2d/", ] -if CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'TestCDMStorage.cpp', - 'TestGMPCrossOrigin.cpp', - 'TestGMPRemoveAndDelete.cpp', + "TestCDMStorage.cpp", + "TestGMPCrossOrigin.cpp", + "TestGMPRemoveAndDelete.cpp", ] -if CONFIG['MOZ_WEBRTC'] and CONFIG['OS_TARGET'] != 'Android': +if CONFIG["MOZ_WEBRTC"] and CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'TestAudioDeviceEnumerator.cpp', - 'TestVideoFrameConverter.cpp', + "TestAudioDeviceEnumerator.cpp", + "TestVideoFrameConverter.cpp", ] TEST_HARNESS_FILES.gtest += [ - '../test/gizmo-frag.mp4', - '../test/gizmo.mp4', - '../test/vp9cake.webm', - 'dash_dashinit.mp4', - 'id3v2header.mp3', - 'negative_duration.mp4', - 'noise.mp3', - 'noise_vbr.mp3', - 'short-zero-in-moov.mp4', - 'short-zero-inband.mov', - 'small-shot-false-positive.mp3', - 'small-shot-partial-xing.mp3', - 'small-shot.mp3', - 'test.webm', - 'test_case_1224361.vp8.ivf', - 'test_case_1224363.vp8.ivf', - 'test_case_1224369.vp8.ivf', - 'test_vbri.mp3', + "../test/gizmo-frag.mp4", + "../test/gizmo.mp4", + "../test/vp9cake.webm", + "dash_dashinit.mp4", + "id3v2header.mp3", + "negative_duration.mp4", + "noise.mp3", + "noise_vbr.mp3", + "short-zero-in-moov.mp4", + "short-zero-inband.mov", + "small-shot-false-positive.mp3", + "small-shot-partial-xing.mp3", + "small-shot.mp3", + "test.webm", + "test_case_1224361.vp8.ivf", + "test_case_1224363.vp8.ivf", + "test_case_1224369.vp8.ivf", + "test_vbri.mp3", ] TEST_DIRS += [ - 'mp4_demuxer', + "mp4_demuxer", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/media', - '/dom/media/encoder', - '/dom/media/gmp', - '/dom/media/mp4', - '/dom/media/platforms', - '/dom/media/platforms/agnostic', - '/dom/media/webrtc', - '/security/certverifier', + "/dom/media", + "/dom/media/encoder", + "/dom/media/gmp", + "/dom/media/mp4", + "/dom/media/platforms", + "/dom/media/platforms/agnostic", + "/dom/media/webrtc", + "/security/certverifier", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['CC_TYPE'] in ('clang', 'clang-cl'): +if CONFIG["CC_TYPE"] in ("clang", "clang-cl"): CXXFLAGS += [ - '-Wno-inconsistent-missing-override', - '-Wno-unused-private-field', + "-Wno-inconsistent-missing-override", + "-Wno-unused-private-field", ] diff --git a/dom/media/gtest/mp4_demuxer/moz.build b/dom/media/gtest/mp4_demuxer/moz.build index a6cc77d22f4fca..5fbe4a461b31d7 100644 --- a/dom/media/gtest/mp4_demuxer/moz.build +++ b/dom/media/gtest/mp4_demuxer/moz.build @@ -4,61 +4,63 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('mp4_demuxer_gtest') +Library("mp4_demuxer_gtest") -if CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_TARGET"] != "Android": SOURCES += [ - 'TestParser.cpp', + "TestParser.cpp", ] SOURCES += [ - 'TestInterval.cpp', + "TestInterval.cpp", ] TEST_HARNESS_FILES.gtest += [ - 'test_case_1156505.mp4', - 'test_case_1181213.mp4', - 'test_case_1181215.mp4', - 'test_case_1181223.mp4', - 'test_case_1181719.mp4', - 'test_case_1185230.mp4', - 'test_case_1187067.mp4', - 'test_case_1200326.mp4', - 'test_case_1204580.mp4', - 'test_case_1216748.mp4', - 'test_case_1296473.mp4', - 'test_case_1296532.mp4', - 'test_case_1301065-harder.mp4', - 'test_case_1301065-i64max.mp4', - 'test_case_1301065-i64min.mp4', - 'test_case_1301065-max-ez.mp4', - 'test_case_1301065-max-ok.mp4', - 'test_case_1301065-overfl.mp4', - 'test_case_1301065-u32max.mp4', - 'test_case_1301065-u64max.mp4', - 'test_case_1301065.mp4', - 'test_case_1329061.mov', - 'test_case_1351094.mp4', - 'test_case_1380468.mp4', - 'test_case_1388991.mp4', - 'test_case_1389299.mp4', - 'test_case_1389527.mp4', - 'test_case_1395244.mp4', - 'test_case_1410565.mp4', - 'test_case_1513651-2-sample-description-entries.mp4', - 'test_case_1519617-cenc-init-with-track_id-0.mp4', - 'test_case_1519617-track2-trafs-removed.mp4', - 'test_case_1519617-video-has-track_id-0.mp4', + "test_case_1156505.mp4", + "test_case_1181213.mp4", + "test_case_1181215.mp4", + "test_case_1181223.mp4", + "test_case_1181719.mp4", + "test_case_1185230.mp4", + "test_case_1187067.mp4", + "test_case_1200326.mp4", + "test_case_1204580.mp4", + "test_case_1216748.mp4", + "test_case_1296473.mp4", + "test_case_1296532.mp4", + "test_case_1301065-harder.mp4", + "test_case_1301065-i64max.mp4", + "test_case_1301065-i64min.mp4", + "test_case_1301065-max-ez.mp4", + "test_case_1301065-max-ok.mp4", + "test_case_1301065-overfl.mp4", + "test_case_1301065-u32max.mp4", + "test_case_1301065-u64max.mp4", + "test_case_1301065.mp4", + "test_case_1329061.mov", + "test_case_1351094.mp4", + "test_case_1380468.mp4", + "test_case_1388991.mp4", + "test_case_1389299.mp4", + "test_case_1389527.mp4", + "test_case_1395244.mp4", + "test_case_1410565.mp4", + "test_case_1513651-2-sample-description-entries.mp4", + "test_case_1519617-cenc-init-with-track_id-0.mp4", + "test_case_1519617-track2-trafs-removed.mp4", + "test_case_1519617-video-has-track_id-0.mp4", ] -UNIFIED_SOURCES += ['TestMP4.cpp',] +UNIFIED_SOURCES += [ + "TestMP4.cpp", +] TEST_HARNESS_FILES.gtest += [ - '../../test/street.mp4', + "../../test/street.mp4", ] LOCAL_INCLUDES += [ - '../../mp4', - '/toolkit/components/telemetry/tests/gtest', + "../../mp4", + "/toolkit/components/telemetry/tests/gtest", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/media/hls/moz.build b/dom/media/hls/moz.build index c3dba1ebda82b5..21b717e76f7c8d 100644 --- a/dom/media/hls/moz.build +++ b/dom/media/hls/moz.build @@ -5,23 +5,23 @@ EXPORTS += [ - 'HLSDecoder.h', - 'HLSDemuxer.h', - 'HLSUtils.h', + "HLSDecoder.h", + "HLSDemuxer.h", + "HLSUtils.h", ] UNIFIED_SOURCES += [ - 'HLSDecoder.cpp', - 'HLSDemuxer.cpp', - 'HLSUtils.cpp', + "HLSDecoder.cpp", + "HLSDemuxer.cpp", + "HLSUtils.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/imagecapture/moz.build b/dom/media/imagecapture/moz.build index 25687e07e1068a..fdbc73430df06f 100644 --- a/dom/media/imagecapture/moz.build +++ b/dom/media/imagecapture/moz.build @@ -4,15 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXPORTS.mozilla.dom += [ - 'ImageCapture.h' -] +EXPORTS.mozilla.dom += ["ImageCapture.h"] UNIFIED_SOURCES += [ - 'CaptureTask.cpp', - 'ImageCapture.cpp', + "CaptureTask.cpp", + "ImageCapture.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/ipc/moz.build b/dom/media/ipc/moz.build index 82f228a7b30a69..019842b1032026 100644 --- a/dom/media/ipc/moz.build +++ b/dom/media/ipc/moz.build @@ -6,61 +6,61 @@ IPDL_SOURCES += [ - 'PMediaDecoderParams.ipdlh', - 'PRDD.ipdl', - 'PRemoteDecoder.ipdl', - 'PRemoteDecoderManager.ipdl', + "PMediaDecoderParams.ipdlh", + "PRDD.ipdl", + "PRemoteDecoder.ipdl", + "PRemoteDecoderManager.ipdl", ] EXPORTS.mozilla += [ - 'IRemoteDecoderChild.h', - 'RDDChild.h', - 'RDDParent.h', - 'RDDProcessHost.h', - 'RDDProcessImpl.h', - 'RDDProcessManager.h', - 'RemoteDecoderChild.h', - 'RemoteDecoderManagerChild.h', - 'RemoteDecoderManagerParent.h', - 'RemoteDecoderModule.h', - 'RemoteDecoderParent.h', - 'RemoteImageHolder.h', - 'RemoteMediaData.h', - 'RemoteMediaDataDecoder.h', - 'ShmemRecycleAllocator.h', + "IRemoteDecoderChild.h", + "RDDChild.h", + "RDDParent.h", + "RDDProcessHost.h", + "RDDProcessImpl.h", + "RDDProcessManager.h", + "RemoteDecoderChild.h", + "RemoteDecoderManagerChild.h", + "RemoteDecoderManagerParent.h", + "RemoteDecoderModule.h", + "RemoteDecoderParent.h", + "RemoteImageHolder.h", + "RemoteMediaData.h", + "RemoteMediaDataDecoder.h", + "ShmemRecycleAllocator.h", ] EXPORTS.mozilla.dom += [ - 'MediaIPCUtils.h', + "MediaIPCUtils.h", ] SOURCES += [ - 'RDDChild.cpp', - 'RDDParent.cpp', - 'RDDProcessHost.cpp', - 'RDDProcessImpl.cpp', - 'RDDProcessManager.cpp', - 'RemoteAudioDecoder.cpp', - 'RemoteDecoderChild.cpp', - 'RemoteDecoderManagerChild.cpp', - 'RemoteDecoderManagerParent.cpp', - 'RemoteDecoderModule.cpp', - 'RemoteDecoderParent.cpp', - 'RemoteImageHolder.cpp', - 'RemoteMediaData.cpp', - 'RemoteMediaDataDecoder.cpp', - 'RemoteVideoDecoder.cpp', + "RDDChild.cpp", + "RDDParent.cpp", + "RDDProcessHost.cpp", + "RDDProcessImpl.cpp", + "RDDProcessManager.cpp", + "RemoteAudioDecoder.cpp", + "RemoteDecoderChild.cpp", + "RemoteDecoderManagerChild.cpp", + "RemoteDecoderManagerParent.cpp", + "RemoteDecoderModule.cpp", + "RemoteDecoderParent.cpp", + "RemoteImageHolder.cpp", + "RemoteMediaData.cpp", + "RemoteMediaDataDecoder.cpp", + "RemoteVideoDecoder.cpp", ] # so we can include nsMacUtilsImpl.h in RDDParent.cpp for sandboxing LOCAL_INCLUDES += [ - '/xpcom/base', + "/xpcom/base", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/mediacapabilities/moz.build b/dom/media/mediacapabilities/moz.build index ce3da942c5712c..bafd3de59c14b3 100644 --- a/dom/media/mediacapabilities/moz.build +++ b/dom/media/mediacapabilities/moz.build @@ -4,31 +4,29 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'MediaCapabilities.h', + "MediaCapabilities.h", ] EXPORTS.mozilla += [ - 'BenchmarkStorageChild.h', - 'BenchmarkStorageParent.h', - 'KeyValueStorage.h', + "BenchmarkStorageChild.h", + "BenchmarkStorageParent.h", + "KeyValueStorage.h", ] EXPORTS += [ - 'DecoderBenchmark.h', + "DecoderBenchmark.h", ] UNIFIED_SOURCES += [ - 'BenchmarkStorageChild.cpp', - 'BenchmarkStorageParent.cpp', - 'DecoderBenchmark.cpp', - 'KeyValueStorage.cpp', - 'MediaCapabilities.cpp', + "BenchmarkStorageChild.cpp", + "BenchmarkStorageParent.cpp", + "DecoderBenchmark.cpp", + "KeyValueStorage.cpp", + "MediaCapabilities.cpp", ] -IPDL_SOURCES += [ - 'PBenchmarkStorage.ipdl' -] +IPDL_SOURCES += ["PBenchmarkStorage.ipdl"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/mediacontrol/moz.build b/dom/media/mediacontrol/moz.build index f0e2e4eee20ff2..54b3c40a36f9c1 100644 --- a/dom/media/mediacontrol/moz.build +++ b/dom/media/mediacontrol/moz.build @@ -4,40 +4,40 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'AudioFocusManager.h', - 'ContentMediaController.h', - 'ContentPlaybackController.h', - 'FetchImageHelper.h', - 'MediaControlKeyManager.h', - 'MediaControlKeySource.h', - 'MediaController.h', - 'MediaControlService.h', - 'MediaControlUtils.h', - 'MediaPlaybackStatus.h', - 'MediaStatusManager.h', + "AudioFocusManager.h", + "ContentMediaController.h", + "ContentPlaybackController.h", + "FetchImageHelper.h", + "MediaControlKeyManager.h", + "MediaControlKeySource.h", + "MediaController.h", + "MediaControlService.h", + "MediaControlUtils.h", + "MediaPlaybackStatus.h", + "MediaStatusManager.h", ] EXPORTS.ipc += [ - 'MediaControlIPC.h', + "MediaControlIPC.h", ] UNIFIED_SOURCES += [ - 'AudioFocusManager.cpp', - 'ContentMediaController.cpp', - 'ContentPlaybackController.cpp', - 'FetchImageHelper.cpp', - 'MediaControlKeyManager.cpp', - 'MediaControlKeySource.cpp', - 'MediaController.cpp', - 'MediaControlService.cpp', - 'MediaControlUtils.cpp', - 'MediaPlaybackStatus.cpp', - 'MediaStatusManager.cpp', + "AudioFocusManager.cpp", + "ContentMediaController.cpp", + "ContentPlaybackController.cpp", + "FetchImageHelper.cpp", + "MediaControlKeyManager.cpp", + "MediaControlKeySource.cpp", + "MediaController.cpp", + "MediaControlService.cpp", + "MediaControlUtils.cpp", + "MediaPlaybackStatus.cpp", + "MediaStatusManager.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['ENABLE_TESTS']: - DIRS += ['tests/gtest'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["tests/gtest"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/mediacontrol/tests/gtest/moz.build b/dom/media/mediacontrol/tests/gtest/moz.build index 663883698761b4..ec0c3ae6585c83 100644 --- a/dom/media/mediacontrol/tests/gtest/moz.build +++ b/dom/media/mediacontrol/tests/gtest/moz.build @@ -5,25 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestAudioFocusManager.cpp', - 'TestMediaController.cpp', - 'TestMediaControlService.cpp', - 'TestMediaKeysEvent.cpp', + "TestAudioFocusManager.cpp", + "TestMediaController.cpp", + "TestMediaControlService.cpp", + "TestMediaKeysEvent.cpp", ] -if CONFIG['MOZ_APPLEMEDIA']: - UNIFIED_SOURCES += [ - 'TestMediaKeysEventMac.mm', - 'TestMediaKeysEventMediaCenter.mm' - ] +if CONFIG["MOZ_APPLEMEDIA"]: + UNIFIED_SOURCES += ["TestMediaKeysEventMac.mm", "TestMediaKeysEventMediaCenter.mm"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/media/mediacontrol', + "/dom/media/mediacontrol", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/mediasession/moz.build b/dom/media/mediasession/moz.build index 9e0155ba3252a2..3bb00442622f5c 100644 --- a/dom/media/mediasession/moz.build +++ b/dom/media/mediasession/moz.build @@ -4,19 +4,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] EXPORTS.mozilla.dom += [ - 'MediaMetadata.h', - 'MediaSession.h', - 'MediaSessionIPCUtils.h', + "MediaMetadata.h", + "MediaSession.h", + "MediaSessionIPCUtils.h", ] UNIFIED_SOURCES += [ - 'MediaMetadata.cpp', - 'MediaSession.cpp', + "MediaMetadata.cpp", + "MediaSession.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/mediasink/moz.build b/dom/media/mediasink/moz.build index fba2abb269c5c7..6d5f90834eb9e0 100644 --- a/dom/media/mediasink/moz.build +++ b/dom/media/mediasink/moz.build @@ -5,19 +5,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'AudioSink.cpp', - 'AudioSinkWrapper.cpp', - 'DecodedStream.cpp', - 'VideoSink.cpp', + "AudioSink.cpp", + "AudioSinkWrapper.cpp", + "DecodedStream.cpp", + "VideoSink.cpp", ] -EXPORTS += [ - 'MediaSink.h' -] +EXPORTS += ["MediaSink.h"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/mediasource/gtest/moz.build b/dom/media/mediasource/gtest/moz.build index 079a206aace538..474bd156e987bf 100644 --- a/dom/media/mediasource/gtest/moz.build +++ b/dom/media/mediasource/gtest/moz.build @@ -5,13 +5,13 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestContainerParser.cpp', - 'TestExtractVPXCodecDetails.cpp', + "TestContainerParser.cpp", + "TestExtractVPXCodecDetails.cpp", ] LOCAL_INCLUDES += [ - '/dom/media', - '/dom/media/mediasource', + "/dom/media", + "/dom/media/mediasource", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/media/mediasource/moz.build b/dom/media/mediasource/moz.build index 5275a7af44c4fb..48e17fbaa681a9 100644 --- a/dom/media/mediasource/moz.build +++ b/dom/media/mediasource/moz.build @@ -3,43 +3,43 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] EXPORTS += [ - 'AsyncEventRunner.h', - 'MediaSourceDecoder.h', - 'MediaSourceDemuxer.h', - 'SourceBufferAttributes.h', - 'SourceBufferTask.h', - 'TrackBuffersManager.h', + "AsyncEventRunner.h", + "MediaSourceDecoder.h", + "MediaSourceDemuxer.h", + "SourceBufferAttributes.h", + "SourceBufferTask.h", + "TrackBuffersManager.h", ] EXPORTS.mozilla.dom += [ - 'MediaSource.h', - 'SourceBuffer.h', - 'SourceBufferList.h', + "MediaSource.h", + "SourceBuffer.h", + "SourceBufferList.h", ] UNIFIED_SOURCES += [ - 'ContainerParser.cpp', - 'MediaSource.cpp', - 'MediaSourceDecoder.cpp', - 'MediaSourceDemuxer.cpp', - 'MediaSourceUtils.cpp', - 'ResourceQueue.cpp', - 'SourceBuffer.cpp', - 'SourceBufferList.cpp', - 'SourceBufferResource.cpp', - 'TrackBuffersManager.cpp', + "ContainerParser.cpp", + "MediaSource.cpp", + "MediaSourceDecoder.cpp", + "MediaSourceDemuxer.cpp", + "MediaSourceUtils.cpp", + "ResourceQueue.cpp", + "SourceBuffer.cpp", + "SourceBufferList.cpp", + "SourceBufferResource.cpp", + "TrackBuffersManager.cpp", ] TEST_DIRS += [ - 'gtest', + "gtest", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/moz.build b/dom/media/moz.build index 8b0c9b340d0125..7d93eb1b0cecee 100644 --- a/dom/media/moz.build +++ b/dom/media/moz.build @@ -3,406 +3,400 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") -with Files('*'): - BUG_COMPONENT = ('Core', 'Audio/Video') +with Files("*"): + BUG_COMPONENT = ("Core", "Audio/Video") -with Files('test/**'): - BUG_COMPONENT = ('Core', 'Audio/Video: Playback') +with Files("test/**"): + BUG_COMPONENT = ("Core", "Audio/Video: Playback") -with Files('gtest/TestGMP*'): - BUG_COMPONENT = ('Core', 'Audio/Video: GMP') +with Files("gtest/TestGMP*"): + BUG_COMPONENT = ("Core", "Audio/Video: GMP") -with Files('tests/**'): - BUG_COMPONENT = ('Core', 'WebRTC') +with Files("tests/**"): + BUG_COMPONENT = ("Core", "WebRTC") -component_signaling = ('Core', 'WebRTC: Signaling') -with Files('IdpSandbox.jsm'): +component_signaling = ("Core", "WebRTC: Signaling") +with Files("IdpSandbox.jsm"): BUG_COMPONENT = component_signaling -with Files('PeerConnection*'): +with Files("PeerConnection*"): BUG_COMPONENT = component_signaling -component_av = ('Core', 'WebRTC: Audio/Video') -with Files('GetUserMedia*'): +component_av = ("Core", "WebRTC: Audio/Video") +with Files("GetUserMedia*"): BUG_COMPONENT = component_av DIRS += [ - 'doctor', - 'eme', - 'encoder', - 'fake-cdm', - 'flac', - 'gmp', - 'gmp-plugin-openh264', - 'imagecapture', - 'ipc', - 'mediacapabilities', - 'mediacontrol', - 'mediasink', - 'mediasource', - 'mediasession', - 'mp3', - 'ogg', - 'platforms', - 'systemservices', - 'wave', - 'webaudio', - 'webm', - 'webrtc', - 'webspeech', - 'webvtt', + "doctor", + "eme", + "encoder", + "fake-cdm", + "flac", + "gmp", + "gmp-plugin-openh264", + "imagecapture", + "ipc", + "mediacapabilities", + "mediacontrol", + "mediasink", + "mediasource", + "mediasession", + "mp3", + "ogg", + "platforms", + "systemservices", + "wave", + "webaudio", + "webm", + "webrtc", + "webspeech", + "webvtt", ] -if CONFIG['MOZ_ANDROID_HLS_SUPPORT']: - DIRS += ['hls'] +if CONFIG["MOZ_ANDROID_HLS_SUPPORT"]: + DIRS += ["hls"] -if CONFIG['MOZ_FMP4']: - DIRS += ['mp4'] +if CONFIG["MOZ_FMP4"]: + DIRS += ["mp4"] -if CONFIG['MOZ_WEBRTC']: - DIRS += ['bridge'] +if CONFIG["MOZ_WEBRTC"]: + DIRS += ["bridge"] TEST_DIRS += [ - 'gtest', + "gtest", ] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -if CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'fuzz' - ] +if CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["fuzz"] -if CONFIG['MOZ_WEBRTC_SIGNALING']: - if CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'webrtc/tests/fuzztests' - ] +if CONFIG["MOZ_WEBRTC_SIGNALING"]: + if CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["webrtc/tests/fuzztests"] MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', - 'webrtc/tests/mochitests/identity/mochitest.ini', + "test/mochitest.ini", + "webrtc/tests/mochitests/identity/mochitest.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'mediacontrol/tests/browser.ini', - 'mediasession/test/browser.ini', + "mediacontrol/tests/browser.ini", + "mediasession/test/browser.ini", ] -if CONFIG['MOZ_WEBRTC']: - MOCHITEST_MANIFESTS += ['webrtc/tests/mochitests/mochitest.ini'] +if CONFIG["MOZ_WEBRTC"]: + MOCHITEST_MANIFESTS += ["webrtc/tests/mochitests/mochitest.ini"] XPIDL_SOURCES += [ - 'nsIAudioDeviceInfo.idl', - 'nsIAutoplay.idl', - 'nsIDOMNavigatorUserMedia.idl', - 'nsIMediaManager.idl', + "nsIAudioDeviceInfo.idl", + "nsIAutoplay.idl", + "nsIDOMNavigatorUserMedia.idl", + "nsIMediaManager.idl", ] -XPIDL_MODULE = 'dom_media' +XPIDL_MODULE = "dom_media" EXPORTS += [ - '../../third_party/rust/audio_thread_priority/audio_thread_priority.h', - 'ADTSDecoder.h', - 'ADTSDemuxer.h', - 'AsyncLogger.h', - 'AudibilityMonitor.h', - 'AudioBufferUtils.h', - 'AudioChannelFormat.h', - 'AudioCompactor.h', - 'AudioConfig.h', - 'AudioConverter.h', - 'AudioDeviceInfo.h', - 'AudioDriftCorrection.h', - 'AudioMixer.h', - 'AudioPacketizer.h', - 'AudioRingBuffer.h', - 'AudioSampleFormat.h', - 'AudioSegment.h', - 'AudioStream.h', - 'AudioThreadRegistry.h', - 'AutoplayPolicy.h', - 'BackgroundVideoDecodingPermissionObserver.h', - 'Benchmark.h', - 'BitReader.h', - 'BitWriter.h', - 'BufferMediaResource.h', - 'BufferReader.h', - 'ByteWriter.h', - 'ChannelMediaDecoder.h', - 'CrossGraphPort.h', - 'CubebUtils.h', - 'DecoderTraits.h', - 'DOMMediaStream.h', - 'DriftCompensation.h', - 'DynamicResampler.h', - 'FileBlockCache.h', - 'ForwardedInputTrack.h', - 'FrameStatistics.h', - 'GVAutoplayPermissionRequest.h', - 'GVAutoplayRequestStatusIPC.h', - 'GVAutoplayRequestUtils.h', - 'ImageToI420.h', - 'Intervals.h', - 'MediaCache.h', - 'MediaContainerType.h', - 'MediaData.h', - 'MediaDataDemuxer.h', - 'MediaDecoder.h', - 'MediaDecoderOwner.h', - 'MediaDecoderStateMachine.h', - 'MediaEventSource.h', - 'MediaFormatReader.h', - 'MediaInfo.h', - 'MediaMetadataManager.h', - 'MediaMIMETypes.h', - 'MediaPlaybackDelayPolicy.h', - 'MediaPromiseDefs.h', - 'MediaQueue.h', - 'MediaRecorder.h', - 'MediaResource.h', - 'MediaResourceCallback.h', - 'MediaResult.h', - 'MediaSegment.h', - 'MediaShutdownManager.h', - 'MediaSpan.h', - 'MediaStatistics.h', - 'MediaStreamWindowCapturer.h', - 'MediaTimer.h', - 'MediaTrack.h', - 'MediaTrackGraph.h', - 'MediaTrackList.h', - 'MediaTrackListener.h', - 'MemoryBlockCache.h', - 'MPSCQueue.h', - 'nsIDocumentActivity.h', - 'PrincipalChangeObserver.h', - 'PrincipalHandle.h', - 'QueueObject.h', - 'SeekJob.h', - 'SeekTarget.h', - 'SelfRef.h', - 'SharedBuffer.h', - 'ThreadPoolCOMListener.h', - 'TimeUnits.h', - 'Tracing.h', - 'VideoFrameContainer.h', - 'VideoLimits.h', - 'VideoSegment.h', - 'VideoUtils.h', - 'VorbisUtils.h', - 'WavDumper.h', - 'XiphExtradata.h', + "../../third_party/rust/audio_thread_priority/audio_thread_priority.h", + "ADTSDecoder.h", + "ADTSDemuxer.h", + "AsyncLogger.h", + "AudibilityMonitor.h", + "AudioBufferUtils.h", + "AudioChannelFormat.h", + "AudioCompactor.h", + "AudioConfig.h", + "AudioConverter.h", + "AudioDeviceInfo.h", + "AudioDriftCorrection.h", + "AudioMixer.h", + "AudioPacketizer.h", + "AudioRingBuffer.h", + "AudioSampleFormat.h", + "AudioSegment.h", + "AudioStream.h", + "AudioThreadRegistry.h", + "AutoplayPolicy.h", + "BackgroundVideoDecodingPermissionObserver.h", + "Benchmark.h", + "BitReader.h", + "BitWriter.h", + "BufferMediaResource.h", + "BufferReader.h", + "ByteWriter.h", + "ChannelMediaDecoder.h", + "CrossGraphPort.h", + "CubebUtils.h", + "DecoderTraits.h", + "DOMMediaStream.h", + "DriftCompensation.h", + "DynamicResampler.h", + "FileBlockCache.h", + "ForwardedInputTrack.h", + "FrameStatistics.h", + "GVAutoplayPermissionRequest.h", + "GVAutoplayRequestStatusIPC.h", + "GVAutoplayRequestUtils.h", + "ImageToI420.h", + "Intervals.h", + "MediaCache.h", + "MediaContainerType.h", + "MediaData.h", + "MediaDataDemuxer.h", + "MediaDecoder.h", + "MediaDecoderOwner.h", + "MediaDecoderStateMachine.h", + "MediaEventSource.h", + "MediaFormatReader.h", + "MediaInfo.h", + "MediaMetadataManager.h", + "MediaMIMETypes.h", + "MediaPlaybackDelayPolicy.h", + "MediaPromiseDefs.h", + "MediaQueue.h", + "MediaRecorder.h", + "MediaResource.h", + "MediaResourceCallback.h", + "MediaResult.h", + "MediaSegment.h", + "MediaShutdownManager.h", + "MediaSpan.h", + "MediaStatistics.h", + "MediaStreamWindowCapturer.h", + "MediaTimer.h", + "MediaTrack.h", + "MediaTrackGraph.h", + "MediaTrackList.h", + "MediaTrackListener.h", + "MemoryBlockCache.h", + "MPSCQueue.h", + "nsIDocumentActivity.h", + "PrincipalChangeObserver.h", + "PrincipalHandle.h", + "QueueObject.h", + "SeekJob.h", + "SeekTarget.h", + "SelfRef.h", + "SharedBuffer.h", + "ThreadPoolCOMListener.h", + "TimeUnits.h", + "Tracing.h", + "VideoFrameContainer.h", + "VideoLimits.h", + "VideoSegment.h", + "VideoUtils.h", + "VorbisUtils.h", + "WavDumper.h", + "XiphExtradata.h", ] EXPORTS.mozilla += [ - 'MediaManager.h', - 'UnderrunHandler.h', + "MediaManager.h", + "UnderrunHandler.h", ] EXPORTS.mozilla.media.webrtc += [ - 'webrtc/WebrtcGlobal.h', - 'webrtc/WebrtcIPCTraits.h', + "webrtc/WebrtcGlobal.h", + "webrtc/WebrtcIPCTraits.h", ] -if not CONFIG['MOZ_WEBRTC']: - EXPORTS.transport += [ - 'webrtc/transport/runnable_utils.h', - ] +if not CONFIG["MOZ_WEBRTC"]: + EXPORTS.transport += [ + "webrtc/transport/runnable_utils.h", + ] EXPORTS.mozilla.dom += [ - 'AudioDeviceInfo.h', - 'AudioStreamTrack.h', - 'AudioTrack.h', - 'AudioTrackList.h', - 'CanvasCaptureMediaStream.h', - 'GetUserMediaRequest.h', - 'MediaDeviceInfo.h', - 'MediaDevices.h', - 'MediaStreamError.h', - 'MediaStreamTrack.h', - 'TextTrack.h', - 'TextTrackCue.h', - 'TextTrackCueList.h', - 'TextTrackList.h', - 'TextTrackRegion.h', - 'VideoPlaybackQuality.h', - 'VideoStreamTrack.h', - 'VideoTrack.h', - 'VideoTrackList.h', - 'webrtc/MediaTransportChild.h', - 'webrtc/MediaTransportParent.h', + "AudioDeviceInfo.h", + "AudioStreamTrack.h", + "AudioTrack.h", + "AudioTrackList.h", + "CanvasCaptureMediaStream.h", + "GetUserMediaRequest.h", + "MediaDeviceInfo.h", + "MediaDevices.h", + "MediaStreamError.h", + "MediaStreamTrack.h", + "TextTrack.h", + "TextTrackCue.h", + "TextTrackCueList.h", + "TextTrackList.h", + "TextTrackRegion.h", + "VideoPlaybackQuality.h", + "VideoStreamTrack.h", + "VideoTrack.h", + "VideoTrackList.h", + "webrtc/MediaTransportChild.h", + "webrtc/MediaTransportParent.h", ] UNIFIED_SOURCES += [ - 'ADTSDecoder.cpp', - 'ADTSDemuxer.cpp', - 'AudioCaptureTrack.cpp', - 'AudioChannelFormat.cpp', - 'AudioCompactor.cpp', - 'AudioConfig.cpp', - 'AudioConverter.cpp', - 'AudioDeviceInfo.cpp', - 'AudioRingBuffer.cpp', - 'AudioSegment.cpp', - 'AudioStream.cpp', - 'AudioStreamTrack.cpp', - 'AudioTrack.cpp', - 'AudioTrackList.cpp', - 'AutoplayPolicy.cpp', - 'BackgroundVideoDecodingPermissionObserver.cpp', - 'BaseMediaResource.cpp', - 'Benchmark.cpp', - 'BitReader.cpp', - 'BitWriter.cpp', - 'CanvasCaptureMediaStream.cpp', - 'ChannelMediaDecoder.cpp', - 'ChannelMediaResource.cpp', - 'CloneableWithRangeMediaResource.cpp', - 'CrossGraphPort.cpp', - 'DOMMediaStream.cpp', - 'DynamicResampler.cpp', - 'FileBlockCache.cpp', - 'FileMediaResource.cpp', - 'ForwardedInputTrack.cpp', - 'GetUserMediaRequest.cpp', - 'GraphDriver.cpp', - 'GraphRunner.cpp', - 'GVAutoplayPermissionRequest.cpp', - 'ImageToI420.cpp', - 'MediaCache.cpp', - 'MediaContainerType.cpp', - 'MediaData.cpp', - 'MediaDecoder.cpp', - 'MediaDecoderStateMachine.cpp', - 'MediaDeviceInfo.cpp', - 'MediaDevices.cpp', - 'MediaFormatReader.cpp', - 'MediaInfo.cpp', - 'MediaManager.cpp', - 'MediaMIMETypes.cpp', - 'MediaPlaybackDelayPolicy.cpp', - 'MediaRecorder.cpp', - 'MediaResource.cpp', - 'MediaShutdownManager.cpp', - 'MediaStreamError.cpp', - 'MediaStreamTrack.cpp', - 'MediaStreamWindowCapturer.cpp', - 'MediaTimer.cpp', - 'MediaTrack.cpp', - 'MediaTrackGraph.cpp', - 'MediaTrackList.cpp', - 'MediaTrackListener.cpp', - 'MemoryBlockCache.cpp', - 'QueueObject.cpp', - 'ReaderProxy.cpp', - 'SeekJob.cpp', - 'TextTrack.cpp', - 'TextTrackCue.cpp', - 'TextTrackCueList.cpp', - 'TextTrackList.cpp', - 'TextTrackRegion.cpp', - 'Tracing.cpp', - 'VideoFrameContainer.cpp', - 'VideoPlaybackQuality.cpp', - 'VideoSegment.cpp', - 'VideoStreamTrack.cpp', - 'VideoTrack.cpp', - 'VideoTrackList.cpp', - 'VideoUtils.cpp', - 'WebVTTListener.cpp', - 'XiphExtradata.cpp', + "ADTSDecoder.cpp", + "ADTSDemuxer.cpp", + "AudioCaptureTrack.cpp", + "AudioChannelFormat.cpp", + "AudioCompactor.cpp", + "AudioConfig.cpp", + "AudioConverter.cpp", + "AudioDeviceInfo.cpp", + "AudioRingBuffer.cpp", + "AudioSegment.cpp", + "AudioStream.cpp", + "AudioStreamTrack.cpp", + "AudioTrack.cpp", + "AudioTrackList.cpp", + "AutoplayPolicy.cpp", + "BackgroundVideoDecodingPermissionObserver.cpp", + "BaseMediaResource.cpp", + "Benchmark.cpp", + "BitReader.cpp", + "BitWriter.cpp", + "CanvasCaptureMediaStream.cpp", + "ChannelMediaDecoder.cpp", + "ChannelMediaResource.cpp", + "CloneableWithRangeMediaResource.cpp", + "CrossGraphPort.cpp", + "DOMMediaStream.cpp", + "DynamicResampler.cpp", + "FileBlockCache.cpp", + "FileMediaResource.cpp", + "ForwardedInputTrack.cpp", + "GetUserMediaRequest.cpp", + "GraphDriver.cpp", + "GraphRunner.cpp", + "GVAutoplayPermissionRequest.cpp", + "ImageToI420.cpp", + "MediaCache.cpp", + "MediaContainerType.cpp", + "MediaData.cpp", + "MediaDecoder.cpp", + "MediaDecoderStateMachine.cpp", + "MediaDeviceInfo.cpp", + "MediaDevices.cpp", + "MediaFormatReader.cpp", + "MediaInfo.cpp", + "MediaManager.cpp", + "MediaMIMETypes.cpp", + "MediaPlaybackDelayPolicy.cpp", + "MediaRecorder.cpp", + "MediaResource.cpp", + "MediaShutdownManager.cpp", + "MediaStreamError.cpp", + "MediaStreamTrack.cpp", + "MediaStreamWindowCapturer.cpp", + "MediaTimer.cpp", + "MediaTrack.cpp", + "MediaTrackGraph.cpp", + "MediaTrackList.cpp", + "MediaTrackListener.cpp", + "MemoryBlockCache.cpp", + "QueueObject.cpp", + "ReaderProxy.cpp", + "SeekJob.cpp", + "TextTrack.cpp", + "TextTrackCue.cpp", + "TextTrackCueList.cpp", + "TextTrackList.cpp", + "TextTrackRegion.cpp", + "Tracing.cpp", + "VideoFrameContainer.cpp", + "VideoPlaybackQuality.cpp", + "VideoSegment.cpp", + "VideoStreamTrack.cpp", + "VideoTrack.cpp", + "VideoTrackList.cpp", + "VideoUtils.cpp", + "WebVTTListener.cpp", + "XiphExtradata.cpp", ] -if CONFIG['OS_TARGET'] == 'Linux': - UNIFIED_SOURCES += [ 'UnderrunHandlerLinux.cpp' ] +if CONFIG["OS_TARGET"] == "Linux": + UNIFIED_SOURCES += ["UnderrunHandlerLinux.cpp"] else: - UNIFIED_SOURCES += [ 'UnderrunHandlerNoop.cpp'] - -if CONFIG['OS_TARGET'] == 'WINNT': - EXPORTS.mozilla.audio += [ - 'AudioNotificationReceiver.h', - 'AudioNotificationSender.h', - ] - SOURCES += [ - 'AudioNotificationReceiver.cpp', - 'AudioNotificationSender.cpp', - 'ThreadPoolCOMListener.cpp', - ] + UNIFIED_SOURCES += ["UnderrunHandlerNoop.cpp"] + +if CONFIG["OS_TARGET"] == "WINNT": + EXPORTS.mozilla.audio += [ + "AudioNotificationReceiver.h", + "AudioNotificationSender.h", + ] + SOURCES += [ + "AudioNotificationReceiver.cpp", + "AudioNotificationSender.cpp", + "ThreadPoolCOMListener.cpp", + ] # CubebUtils.cpp needs to be built separately due to what appears to be some kind # of compiler bug on Android 4.2 x86 opt. See bug 1408459. # DecoderTraits.cpp needs to be built separately because of Mac OS X headers. SOURCES += [ - 'CubebUtils.cpp', - 'DecoderTraits.cpp', + "CubebUtils.cpp", + "DecoderTraits.cpp", ] # Some codec-related code uses multi-character constants, which GCC and clang # warn about. Suppress turning this warning into an error. -SOURCES['DecoderTraits.cpp'].flags += ['-Wno-error=multichar'] +SOURCES["DecoderTraits.cpp"].flags += ["-Wno-error=multichar"] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXTRA_JS_MODULES.media += [ - 'PeerConnection.jsm', + "PeerConnection.jsm", ] EXTRA_JS_MODULES.media += [ - 'IdpSandbox.jsm', - 'PeerConnectionIdp.jsm', + "IdpSandbox.jsm", + "PeerConnectionIdp.jsm", ] LOCAL_INCLUDES += [ - '/caps', - '/docshell/base', - '/dom/base', - '/layout/generic', - '/layout/xul', - '/media/libyuv/libyuv/include', - '/netwerk/base', - '/toolkit/content/tests/browser/', + "/caps", + "/docshell/base", + "/dom/base", + "/layout/generic", + "/layout/xul", + "/media/libyuv/libyuv/include", + "/netwerk/base", + "/toolkit/content/tests/browser/", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: LOCAL_INCLUDES += [ - '/dom/media/webrtc/common', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "/dom/media/webrtc/common", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] -DEFINES['MOZILLA_INTERNAL_API'] = True -DEFINES['TRACING'] = True +DEFINES["MOZILLA_INTERNAL_API"] = True +DEFINES["TRACING"] = True -if CONFIG['MOZ_ANDROID_HLS_SUPPORT']: - DEFINES['MOZ_ANDROID_HLS_SUPPORT'] = True +if CONFIG["MOZ_ANDROID_HLS_SUPPORT"]: + DEFINES["MOZ_ANDROID_HLS_SUPPORT"] = True -if CONFIG['COMPILE_ENVIRONMENT']: +if CONFIG["COMPILE_ENVIRONMENT"]: EXPORTS += [ - '!audioipc_client_ffi_generated.h', - '!audioipc_server_ffi_generated.h', + "!audioipc_client_ffi_generated.h", + "!audioipc_server_ffi_generated.h", ] - CbindgenHeader('audioipc_client_ffi_generated.h', - inputs=['/media/audioipc/client']) + CbindgenHeader("audioipc_client_ffi_generated.h", inputs=["/media/audioipc/client"]) - CbindgenHeader('audioipc_server_ffi_generated.h', - inputs=['/media/audioipc/server']) + CbindgenHeader("audioipc_server_ffi_generated.h", inputs=["/media/audioipc/server"]) -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Suppress some GCC warnings being treated as errors: # - about attributes on forward declarations for types that are already # defined, which complains about an important MOZ_EXPORT for android::AString -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): +if CONFIG["CC_TYPE"] in ("clang", "gcc"): CXXFLAGS += [ - '-Wno-error=attributes', - '-Wno-error=shadow', + "-Wno-error=attributes", + "-Wno-error=shadow", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/mp3/moz.build b/dom/media/mp3/moz.build index a35ed8344eba87..70031cc2b4269d 100644 --- a/dom/media/mp3/moz.build +++ b/dom/media/mp3/moz.build @@ -5,18 +5,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'MP3Decoder.h', - 'MP3Demuxer.h', - 'MP3FrameParser.h', + "MP3Decoder.h", + "MP3Demuxer.h", + "MP3FrameParser.h", ] UNIFIED_SOURCES += [ - 'MP3Decoder.cpp', - 'MP3Demuxer.cpp', - 'MP3FrameParser.cpp', + "MP3Decoder.cpp", + "MP3Demuxer.cpp", + "MP3FrameParser.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/mp4/moz.build b/dom/media/mp4/moz.build index a1ec61bd93f3a4..cf93a172db60e5 100644 --- a/dom/media/mp4/moz.build +++ b/dom/media/mp4/moz.build @@ -5,41 +5,41 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'Atom.h', - 'AtomType.h', - 'Box.h', - 'BufferStream.h', - 'ByteStream.h', - 'DecoderData.h', - 'Index.h', - 'MoofParser.h', - 'MP4Decoder.h', - 'MP4Demuxer.h', - 'MP4Interval.h', - 'MP4Metadata.h', - 'ResourceStream.h', - 'SinfParser.h', + "Atom.h", + "AtomType.h", + "Box.h", + "BufferStream.h", + "ByteStream.h", + "DecoderData.h", + "Index.h", + "MoofParser.h", + "MP4Decoder.h", + "MP4Demuxer.h", + "MP4Interval.h", + "MP4Metadata.h", + "ResourceStream.h", + "SinfParser.h", ] UNIFIED_SOURCES += [ - 'Box.cpp', - 'BufferStream.cpp', - 'DecoderData.cpp', - 'Index.cpp', - 'MoofParser.cpp', - 'MP4Decoder.cpp', - 'MP4Demuxer.cpp', - 'MP4Metadata.cpp', - 'ResourceStream.cpp', - 'SinfParser.cpp', + "Box.cpp", + "BufferStream.cpp", + "DecoderData.cpp", + "Index.cpp", + "MoofParser.cpp", + "MP4Decoder.cpp", + "MP4Demuxer.cpp", + "MP4Metadata.cpp", + "ResourceStream.cpp", + "SinfParser.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # Suppress warnings for now. CXXFLAGS += [ - '-Wno-sign-compare', + "-Wno-sign-compare", ] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/ogg/moz.build b/dom/media/ogg/moz.build index ab430f6824c32a..6fc5421ea355a2 100644 --- a/dom/media/ogg/moz.build +++ b/dom/media/ogg/moz.build @@ -5,26 +5,26 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'OggCodecState.h', - 'OggCodecStore.h', - 'OggDecoder.h', - 'OggDemuxer.h', - 'OggRLBox.h', - 'OggRLBoxTypes.h', - 'OggWriter.h', - 'OpusParser.h', + "OggCodecState.h", + "OggCodecStore.h", + "OggDecoder.h", + "OggDemuxer.h", + "OggRLBox.h", + "OggRLBoxTypes.h", + "OggWriter.h", + "OpusParser.h", ] UNIFIED_SOURCES += [ - 'OggCodecState.cpp', - 'OggCodecStore.cpp', - 'OggDecoder.cpp', - 'OggDemuxer.cpp', - 'OggWriter.cpp', - 'OpusParser.cpp', + "OggCodecState.cpp", + "OggCodecStore.cpp", + "OggDecoder.cpp", + "OggDemuxer.cpp", + "OggWriter.cpp", + "OpusParser.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/platforms/agnostic/bytestreams/gtest/moz.build b/dom/media/platforms/agnostic/bytestreams/gtest/moz.build index fe90948e44e28e..be0bb9b3ae4adb 100644 --- a/dom/media/platforms/agnostic/bytestreams/gtest/moz.build +++ b/dom/media/platforms/agnostic/bytestreams/gtest/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestAnnexB.cpp', + "TestAnnexB.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/media/platforms/agnostic/bytestreams/moz.build b/dom/media/platforms/agnostic/bytestreams/moz.build index dcf785c893fab0..225cb427f86815 100644 --- a/dom/media/platforms/agnostic/bytestreams/moz.build +++ b/dom/media/platforms/agnostic/bytestreams/moz.build @@ -8,28 +8,28 @@ with Files("**"): BUG_COMPONENT = ("Core", "Audio/Video: Playback") TEST_DIRS += [ - 'gtest', + "gtest", ] EXPORTS += [ - 'Adts.h', - 'AnnexB.h', - 'H264.h', + "Adts.h", + "AnnexB.h", + "H264.h", ] UNIFIED_SOURCES += [ - 'Adts.cpp', - 'AnnexB.cpp', - 'H264.cpp', + "Adts.cpp", + "AnnexB.cpp", + "H264.cpp", ] LOCAL_INCLUDES += [ - '../../../mp4/', + "../../../mp4/", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # Suppress warnings for now. CXXFLAGS += [ - '-Wno-sign-compare', + "-Wno-sign-compare", ] diff --git a/dom/media/platforms/agnostic/eme/moz.build b/dom/media/platforms/agnostic/eme/moz.build index ac62bb0d9cab05..34f0007b3bcc38 100644 --- a/dom/media/platforms/agnostic/eme/moz.build +++ b/dom/media/platforms/agnostic/eme/moz.build @@ -5,18 +5,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'ChromiumCDMVideoDecoder.h', - 'DecryptThroughputLimit.h', - 'EMEDecoderModule.h', - 'SamplesWaitingForKey.h', + "ChromiumCDMVideoDecoder.h", + "DecryptThroughputLimit.h", + "EMEDecoderModule.h", + "SamplesWaitingForKey.h", ] UNIFIED_SOURCES += [ - 'ChromiumCDMVideoDecoder.cpp', - 'EMEDecoderModule.cpp', - 'SamplesWaitingForKey.cpp', + "ChromiumCDMVideoDecoder.cpp", + "EMEDecoderModule.cpp", + "SamplesWaitingForKey.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/platforms/agnostic/gmp/moz.build b/dom/media/platforms/agnostic/gmp/moz.build index 0a514d986b8121..2ef4a550a72d7d 100644 --- a/dom/media/platforms/agnostic/gmp/moz.build +++ b/dom/media/platforms/agnostic/gmp/moz.build @@ -5,16 +5,16 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'GMPDecoderModule.h', - 'GMPVideoDecoder.h', + "GMPDecoderModule.h", + "GMPVideoDecoder.h", ] UNIFIED_SOURCES += [ - 'GMPDecoderModule.cpp', - 'GMPVideoDecoder.cpp', + "GMPDecoderModule.cpp", + "GMPVideoDecoder.cpp", ] # GMPVideoEncodedFrameImpl.h needs IPC -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/platforms/ffmpeg/ffvpx/moz.build b/dom/media/platforms/ffmpeg/ffvpx/moz.build index 1b42a9499c434e..363a0facee9ab3 100644 --- a/dom/media/platforms/ffmpeg/ffvpx/moz.build +++ b/dom/media/platforms/ffmpeg/ffvpx/moz.build @@ -4,36 +4,36 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -LOCAL_INCLUDES += ['/xpcom/build'] +LOCAL_INCLUDES += ["/xpcom/build"] EXPORTS += [ - 'FFVPXRuntimeLinker.h', + "FFVPXRuntimeLinker.h", ] UNIFIED_SOURCES += [ - '../FFmpegAudioDecoder.cpp', - '../FFmpegDataDecoder.cpp', - '../FFmpegDecoderModule.cpp', - '../FFmpegVideoDecoder.cpp', + "../FFmpegAudioDecoder.cpp", + "../FFmpegDataDecoder.cpp", + "../FFmpegDecoderModule.cpp", + "../FFmpegVideoDecoder.cpp", ] SOURCES += [ - 'FFVPXRuntimeLinker.cpp', + "FFVPXRuntimeLinker.cpp", ] LOCAL_INCLUDES += [ - '..', - '../ffmpeg58/include', + "..", + "../ffmpeg58/include", ] -CXXFLAGS += [ '-Wno-deprecated-declarations' ] -if CONFIG['CC_TYPE'] == 'clang': - CXXFLAGS += [ - '-Wno-unknown-attributes', - ] -if CONFIG['CC_TYPE'] == 'gcc': - CXXFLAGS += [ - '-Wno-attributes', - ] +CXXFLAGS += ["-Wno-deprecated-declarations"] +if CONFIG["CC_TYPE"] == "clang": + CXXFLAGS += [ + "-Wno-unknown-attributes", + ] +if CONFIG["CC_TYPE"] == "gcc": + CXXFLAGS += [ + "-Wno-attributes", + ] -DEFINES['FFVPX_VERSION'] = 46465650 -DEFINES['USING_MOZFFVPX'] = True +DEFINES["FFVPX_VERSION"] = 46465650 +DEFINES["USING_MOZFFVPX"] = True -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/platforms/ffmpeg/moz.build b/dom/media/platforms/ffmpeg/moz.build index 12388a12926675..6bb549f3453f44 100644 --- a/dom/media/platforms/ffmpeg/moz.build +++ b/dom/media/platforms/ffmpeg/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'FFmpegRuntimeLinker.h', + "FFmpegRuntimeLinker.h", ] DIRS += [ - 'libav53', - 'libav54', - 'libav55', - 'ffmpeg57', - 'ffmpeg58', + "libav53", + "libav54", + "libav55", + "ffmpeg57", + "ffmpeg58", ] UNIFIED_SOURCES += [ - 'FFmpegRuntimeLinker.cpp', + "FFmpegRuntimeLinker.cpp", ] -if CONFIG['MOZ_WAYLAND']: - include('/ipc/chromium/chromium-config.mozbuild') +if CONFIG["MOZ_WAYLAND"]: + include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/platforms/moz.build b/dom/media/platforms/moz.build index 097c9c6a769351..c5a9ce2029715a 100644 --- a/dom/media/platforms/moz.build +++ b/dom/media/platforms/moz.build @@ -5,142 +5,137 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'agnostic/AgnosticDecoderModule.h', - 'agnostic/BlankDecoderModule.h', - 'agnostic/DummyMediaDataDecoder.h', - 'agnostic/OpusDecoder.h', - 'agnostic/TheoraDecoder.h', - 'agnostic/VorbisDecoder.h', - 'agnostic/VPXDecoder.h', - 'agnostic/WAVDecoder.h', - 'AllocationPolicy.h', - 'MediaTelemetryConstants.h', - 'PDMFactory.h', - 'PEMFactory.h', - 'PlatformDecoderModule.h', - 'PlatformEncoderModule.h', - 'ReorderQueue.h', - 'SimpleMap.h', - 'wrappers/AudioTrimmer.h', - 'wrappers/MediaChangeMonitor.h', - 'wrappers/MediaDataDecoderProxy.h' + "agnostic/AgnosticDecoderModule.h", + "agnostic/BlankDecoderModule.h", + "agnostic/DummyMediaDataDecoder.h", + "agnostic/OpusDecoder.h", + "agnostic/TheoraDecoder.h", + "agnostic/VorbisDecoder.h", + "agnostic/VPXDecoder.h", + "agnostic/WAVDecoder.h", + "AllocationPolicy.h", + "MediaTelemetryConstants.h", + "PDMFactory.h", + "PEMFactory.h", + "PlatformDecoderModule.h", + "PlatformEncoderModule.h", + "ReorderQueue.h", + "SimpleMap.h", + "wrappers/AudioTrimmer.h", + "wrappers/MediaChangeMonitor.h", + "wrappers/MediaDataDecoderProxy.h", ] UNIFIED_SOURCES += [ - 'agnostic/AgnosticDecoderModule.cpp', - 'agnostic/BlankDecoderModule.cpp', - 'agnostic/DummyMediaDataDecoder.cpp', - 'agnostic/NullDecoderModule.cpp', - 'agnostic/OpusDecoder.cpp', - 'agnostic/TheoraDecoder.cpp', - 'agnostic/VorbisDecoder.cpp', - 'agnostic/VPXDecoder.cpp', - 'agnostic/WAVDecoder.cpp', - 'AllocationPolicy.cpp', - 'PDMFactory.cpp', - 'PEMFactory.cpp', - 'wrappers/AudioTrimmer.cpp', - 'wrappers/MediaChangeMonitor.cpp', - 'wrappers/MediaDataDecoderProxy.cpp' + "agnostic/AgnosticDecoderModule.cpp", + "agnostic/BlankDecoderModule.cpp", + "agnostic/DummyMediaDataDecoder.cpp", + "agnostic/NullDecoderModule.cpp", + "agnostic/OpusDecoder.cpp", + "agnostic/TheoraDecoder.cpp", + "agnostic/VorbisDecoder.cpp", + "agnostic/VPXDecoder.cpp", + "agnostic/WAVDecoder.cpp", + "AllocationPolicy.cpp", + "PDMFactory.cpp", + "PEMFactory.cpp", + "wrappers/AudioTrimmer.cpp", + "wrappers/MediaChangeMonitor.cpp", + "wrappers/MediaDataDecoderProxy.cpp", ] -DIRS += [ - 'agnostic/bytestreams', - 'agnostic/eme', - 'agnostic/gmp', - 'omx' -] +DIRS += ["agnostic/bytestreams", "agnostic/eme", "agnostic/gmp", "omx"] -if CONFIG['MOZ_WMF']: - DIRS += [ 'wmf' ]; +if CONFIG["MOZ_WMF"]: + DIRS += ["wmf"] -if CONFIG['MOZ_FFVPX'] or CONFIG['MOZ_FFMPEG']: +if CONFIG["MOZ_FFVPX"] or CONFIG["MOZ_FFMPEG"]: # common code to either FFmpeg or FFVPX EXPORTS += [ - 'ffmpeg/FFmpegRDFTTypes.h', + "ffmpeg/FFmpegRDFTTypes.h", ] UNIFIED_SOURCES += [ - 'ffmpeg/FFmpegLibWrapper.cpp', + "ffmpeg/FFmpegLibWrapper.cpp", ] -if CONFIG['MOZ_FFVPX']: +if CONFIG["MOZ_FFVPX"]: DIRS += [ - 'ffmpeg/ffvpx', + "ffmpeg/ffvpx", ] -if CONFIG['MOZ_FFMPEG']: +if CONFIG["MOZ_FFMPEG"]: DIRS += [ - 'ffmpeg', + "ffmpeg", ] -if CONFIG['MOZ_AV1']: +if CONFIG["MOZ_AV1"]: EXPORTS += [ - 'agnostic/AOMDecoder.h', - 'agnostic/DAV1DDecoder.h', + "agnostic/AOMDecoder.h", + "agnostic/DAV1DDecoder.h", ] UNIFIED_SOURCES += [ - 'agnostic/AOMDecoder.cpp', - 'agnostic/DAV1DDecoder.cpp', + "agnostic/AOMDecoder.cpp", + "agnostic/DAV1DDecoder.cpp", ] -if CONFIG['MOZ_OMX']: +if CONFIG["MOZ_OMX"]: EXPORTS += [ - 'omx/OmxCoreLibLinker.h', + "omx/OmxCoreLibLinker.h", ] UNIFIED_SOURCES += [ - 'omx/OmxCoreLibLinker.cpp', + "omx/OmxCoreLibLinker.cpp", ] -if CONFIG['MOZ_APPLEMEDIA']: - EXPORTS += [ - 'apple/AppleDecoderModule.h', - 'apple/AppleEncoderModule.h', - ] - UNIFIED_SOURCES += [ - 'apple/AppleATDecoder.cpp', - 'apple/AppleDecoderModule.cpp', - 'apple/AppleEncoderModule.cpp', - 'apple/AppleVTDecoder.cpp', - 'apple/AppleVTEncoder.cpp', - ] - LOCAL_INCLUDES += [ - '/media/libyuv/libyuv/include', - ] - OS_LIBS += [ - '-framework AudioToolbox', - '-framework CoreMedia', - '-framework VideoToolbox', - # For some unknown reason, the documented method of using weak_import - # attribute doesn't work with VideoToolbox's functions. - # We want to lazily load _VTRegisterSupplementalVideoDecoderIfAvailable - # symbol as it's only available in macOS 11 and later. - '-Wl,-U,_VTRegisterSupplementalVideoDecoderIfAvailable', - # Same for VTIsHardwareDecodeSupported available from macOS 10.13. - '-Wl,-U,_VTIsHardwareDecodeSupported', - ] +if CONFIG["MOZ_APPLEMEDIA"]: + EXPORTS += [ + "apple/AppleDecoderModule.h", + "apple/AppleEncoderModule.h", + ] + UNIFIED_SOURCES += [ + "apple/AppleATDecoder.cpp", + "apple/AppleDecoderModule.cpp", + "apple/AppleEncoderModule.cpp", + "apple/AppleVTDecoder.cpp", + "apple/AppleVTEncoder.cpp", + ] + LOCAL_INCLUDES += [ + "/media/libyuv/libyuv/include", + ] + OS_LIBS += [ + "-framework AudioToolbox", + "-framework CoreMedia", + "-framework VideoToolbox", + # For some unknown reason, the documented method of using weak_import + # attribute doesn't work with VideoToolbox's functions. + # We want to lazily load _VTRegisterSupplementalVideoDecoderIfAvailable + # symbol as it's only available in macOS 11 and later. + "-Wl,-U,_VTRegisterSupplementalVideoDecoderIfAvailable", + # Same for VTIsHardwareDecodeSupported available from macOS 10.13. + "-Wl,-U,_VTIsHardwareDecodeSupported", + ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXPORTS += [ - 'android/AndroidDecoderModule.h', - 'android/AndroidEncoderModule.h', - 'android/JavaCallbacksSupport.h', + "android/AndroidDecoderModule.h", + "android/AndroidEncoderModule.h", + "android/JavaCallbacksSupport.h", ] UNIFIED_SOURCES += [ - 'android/AndroidDataEncoder.cpp', - 'android/AndroidDecoderModule.cpp', - 'android/AndroidEncoderModule.cpp', - 'android/RemoteDataDecoder.cpp', + "android/AndroidDataEncoder.cpp", + "android/AndroidDecoderModule.cpp", + "android/AndroidEncoderModule.cpp", + "android/RemoteDataDecoder.cpp", ] LOCAL_INCLUDES += [ - '/media/libyuv/libyuv/include', + "/media/libyuv/libyuv/include", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/platforms/omx/moz.build b/dom/media/platforms/omx/moz.build index 971a8567f147ab..260903c1ad897a 100644 --- a/dom/media/platforms/omx/moz.build +++ b/dom/media/platforms/omx/moz.build @@ -5,35 +5,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'OmxDecoderModule.h', + "OmxDecoderModule.h", ] UNIFIED_SOURCES += [ - 'OmxDataDecoder.cpp', - 'OmxDecoderModule.cpp', - 'OmxPlatformLayer.cpp', - 'OmxPromiseLayer.cpp', + "OmxDataDecoder.cpp", + "OmxDecoderModule.cpp", + "OmxPlatformLayer.cpp", + "OmxPromiseLayer.cpp", ] LOCAL_INCLUDES += [ - '/media/openmax_il/il112', + "/media/openmax_il/il112", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['MOZ_OMX']: +if CONFIG["MOZ_OMX"]: UNIFIED_SOURCES += [ - 'PureOmxPlatformLayer.cpp', + "PureOmxPlatformLayer.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Avoid warnings from third-party code that we can not modify. -if CONFIG['CC_TYPE'] == 'clang-cl': - CXXFLAGS += ['-Wno-invalid-source-encoding'] +if CONFIG["CC_TYPE"] == "clang-cl": + CXXFLAGS += ["-Wno-invalid-source-encoding"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/platforms/wmf/moz.build b/dom/media/platforms/wmf/moz.build index 49c33f56feb9de..bd9c0e03be1ed3 100644 --- a/dom/media/platforms/wmf/moz.build +++ b/dom/media/platforms/wmf/moz.build @@ -5,33 +5,33 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'DXVA2Manager.h', - 'MFTDecoder.h', - 'WMF.h', - 'WMFAudioMFTManager.h', - 'WMFDecoderModule.h', - 'WMFMediaDataDecoder.h', - 'WMFUtils.h', - 'WMFVideoMFTManager.h', + "DXVA2Manager.h", + "MFTDecoder.h", + "WMF.h", + "WMFAudioMFTManager.h", + "WMFDecoderModule.h", + "WMFMediaDataDecoder.h", + "WMFUtils.h", + "WMFVideoMFTManager.h", ] UNIFIED_SOURCES += [ - 'DXVA2Manager.cpp', - 'MFTDecoder.cpp', - 'WMFAudioMFTManager.cpp', - 'WMFDecoderModule.cpp', - 'WMFMediaDataDecoder.cpp', - 'WMFVideoMFTManager.cpp', + "DXVA2Manager.cpp", + "MFTDecoder.cpp", + "WMFAudioMFTManager.cpp", + "WMFDecoderModule.cpp", + "WMFMediaDataDecoder.cpp", + "WMFVideoMFTManager.cpp", ] SOURCES += [ - 'WMFUtils.cpp', + "WMFUtils.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/systemservices/moz.build b/dom/media/systemservices/moz.build index 3bc07f42d361c2..fa1dbe4d4ba81a 100644 --- a/dom/media/systemservices/moz.build +++ b/dom/media/systemservices/moz.build @@ -3,110 +3,111 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: EXPORTS += [ - 'CamerasChild.h', - 'CamerasParent.h', - 'VideoEngine.h', - 'VideoFrameUtils.h' + "CamerasChild.h", + "CamerasParent.h", + "VideoEngine.h", + "VideoFrameUtils.h", ] UNIFIED_SOURCES += [ - 'CamerasChild.cpp', - 'CamerasParent.cpp', - 'VideoEngine.cpp', - 'VideoFrameUtils.cpp' + "CamerasChild.cpp", + "CamerasParent.cpp", + "VideoEngine.cpp", + "VideoFrameUtils.cpp", ] LOCAL_INCLUDES += [ - '/dom/media/webrtc', - '/media/libyuv/libyuv/include', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "/dom/media/webrtc", + "/media/libyuv/libyuv/include", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] - if CONFIG['OS_TARGET'] == 'Android': + if CONFIG["OS_TARGET"] == "Android": UNIFIED_SOURCES += [ - 'android_video_capture/device_info_android.cc', - 'android_video_capture/video_capture_android.cc', + "android_video_capture/device_info_android.cc", + "android_video_capture/video_capture_android.cc", ] - elif CONFIG['OS_TARGET'] == 'Darwin': + elif CONFIG["OS_TARGET"] == "Darwin": UNIFIED_SOURCES += [ - 'objc_video_capture/device_info.mm', - 'objc_video_capture/device_info_objc.mm', - 'objc_video_capture/rtc_video_capture_objc.mm', - 'objc_video_capture/video_capture.mm', + "objc_video_capture/device_info.mm", + "objc_video_capture/device_info_objc.mm", + "objc_video_capture/rtc_video_capture_objc.mm", + "objc_video_capture/video_capture.mm", ] CMMFLAGS += [ - '-fobjc-arc', + "-fobjc-arc", ] - if CONFIG['OS_TARGET'] != 'Android': + if CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'video_engine/desktop_capture_impl.cc', - 'video_engine/platform_uithread.cc', - 'video_engine/tab_capturer.cc', + "video_engine/desktop_capture_impl.cc", + "video_engine/platform_uithread.cc", + "video_engine/tab_capturer.cc", ] -if CONFIG['OS_TARGET'] == 'Android': - DEFINES['WEBRTC_ANDROID'] = True +if CONFIG["OS_TARGET"] == "Android": + DEFINES["WEBRTC_ANDROID"] = True -if CONFIG['OS_TARGET'] == 'Android': - EXPORTS += [ - 'OpenSLESProvider.h' - ] +if CONFIG["OS_TARGET"] == "Android": + EXPORTS += ["OpenSLESProvider.h"] UNIFIED_SOURCES += [ - 'OpenSLESProvider.cpp', + "OpenSLESProvider.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - UNIFIED_SOURCES += ['OSXRunLoopSingleton.cpp'] - EXPORTS += ['OSXRunLoopSingleton.h'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + UNIFIED_SOURCES += ["OSXRunLoopSingleton.cpp"] + EXPORTS += ["OSXRunLoopSingleton.h"] -EXPORTS.mozilla += ['ShmemPool.h',] +EXPORTS.mozilla += [ + "ShmemPool.h", +] -EXPORTS.mozilla.media += ['CamerasTypes.h', - 'MediaChild.h', - 'MediaParent.h', - 'MediaSystemResourceClient.h', - 'MediaSystemResourceManager.h', - 'MediaSystemResourceManagerChild.h', - 'MediaSystemResourceManagerParent.h', - 'MediaSystemResourceMessageUtils.h', - 'MediaSystemResourceService.h', - 'MediaSystemResourceTypes.h', - 'MediaTaskUtils.h', - 'MediaUtils.h', +EXPORTS.mozilla.media += [ + "CamerasTypes.h", + "MediaChild.h", + "MediaParent.h", + "MediaSystemResourceClient.h", + "MediaSystemResourceManager.h", + "MediaSystemResourceManagerChild.h", + "MediaSystemResourceManagerParent.h", + "MediaSystemResourceMessageUtils.h", + "MediaSystemResourceService.h", + "MediaSystemResourceTypes.h", + "MediaTaskUtils.h", + "MediaUtils.h", ] UNIFIED_SOURCES += [ - 'MediaChild.cpp', - 'MediaParent.cpp', - 'MediaSystemResourceClient.cpp', - 'MediaSystemResourceManager.cpp', - 'MediaSystemResourceManagerChild.cpp', - 'MediaSystemResourceManagerParent.cpp', - 'MediaSystemResourceService.cpp', - 'MediaUtils.cpp', - 'ShmemPool.cpp', + "MediaChild.cpp", + "MediaParent.cpp", + "MediaSystemResourceClient.cpp", + "MediaSystemResourceManager.cpp", + "MediaSystemResourceManagerChild.cpp", + "MediaSystemResourceManagerParent.cpp", + "MediaSystemResourceService.cpp", + "MediaUtils.cpp", + "ShmemPool.cpp", ] IPDL_SOURCES += [ - 'PCameras.ipdl', - 'PMedia.ipdl', - 'PMediaSystemResourceManager.ipdl', + "PCameras.ipdl", + "PMedia.ipdl", + "PMediaSystemResourceManager.ipdl", ] # /dom/base needed for nsGlobalWindow.h in MediaChild.cpp LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -with Files('android_video_capture/**'): - SCHEDULES.exclusive = ['android'] +with Files("android_video_capture/**"): + SCHEDULES.exclusive = ["android"] diff --git a/dom/media/wave/moz.build b/dom/media/wave/moz.build index fae2093dbc13c0..0c217763cd2482 100644 --- a/dom/media/wave/moz.build +++ b/dom/media/wave/moz.build @@ -5,19 +5,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'WaveDecoder.h', - 'WaveDemuxer.h', + "WaveDecoder.h", + "WaveDemuxer.h", ] UNIFIED_SOURCES += [ - 'WaveDecoder.cpp', - 'WaveDemuxer.cpp', + "WaveDecoder.cpp", + "WaveDemuxer.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/webaudio/blink/moz.build b/dom/media/webaudio/blink/moz.build index 87d5de8cec6aa8..af698131c0612e 100644 --- a/dom/media/webaudio/blink/moz.build +++ b/dom/media/webaudio/blink/moz.build @@ -5,35 +5,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'Biquad.cpp', - 'DynamicsCompressor.cpp', - 'DynamicsCompressorKernel.cpp', - 'FFTConvolver.cpp', - 'HRTFDatabase.cpp', - 'HRTFDatabaseLoader.cpp', - 'HRTFElevation.cpp', - 'HRTFKernel.cpp', - 'HRTFPanner.cpp', - 'IIRFilter.cpp', - 'PeriodicWave.cpp', - 'Reverb.cpp', - 'ReverbAccumulationBuffer.cpp', - 'ReverbConvolver.cpp', - 'ReverbConvolverStage.cpp', - 'ReverbInputBuffer.cpp', - 'ZeroPole.cpp', + "Biquad.cpp", + "DynamicsCompressor.cpp", + "DynamicsCompressorKernel.cpp", + "FFTConvolver.cpp", + "HRTFDatabase.cpp", + "HRTFDatabaseLoader.cpp", + "HRTFElevation.cpp", + "HRTFKernel.cpp", + "HRTFPanner.cpp", + "IIRFilter.cpp", + "PeriodicWave.cpp", + "Reverb.cpp", + "ReverbAccumulationBuffer.cpp", + "ReverbConvolver.cpp", + "ReverbConvolverStage.cpp", + "ReverbInputBuffer.cpp", + "ZeroPole.cpp", ] # Are we targeting x86 or x64? If so, build SSE2 files. -if CONFIG['INTEL_ARCHITECTURE']: - DEFINES['USE_SSE2'] = True +if CONFIG["INTEL_ARCHITECTURE"]: + DEFINES["USE_SSE2"] = True -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/media/webaudio', + "/dom/media/webaudio", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-shadow"] diff --git a/dom/media/webaudio/moz.build b/dom/media/webaudio/moz.build index c162e637792d4e..8d04ecb7cb0959 100644 --- a/dom/media/webaudio/moz.build +++ b/dom/media/webaudio/moz.build @@ -4,148 +4,144 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('*'): - BUG_COMPONENT = ('Core', 'Web Audio') +with Files("*"): + BUG_COMPONENT = ("Core", "Web Audio") -DIRS += ['blink'] +DIRS += ["blink"] MOCHITEST_MANIFESTS += [ - 'test/blink/mochitest.ini', - 'test/mochitest.ini', + "test/blink/mochitest.ini", + "test/mochitest.ini", ] TEST_HARNESS_FILES.testing.mochitest.tests.dom.media.webaudio.test.blink += [ - 'test/blink/audio-testing.js', - 'test/blink/convolution-testing.js', - 'test/blink/panner-model-testing.js', + "test/blink/audio-testing.js", + "test/blink/convolution-testing.js", + "test/blink/panner-model-testing.js", ] EXPORTS += [ - 'AlignedTArray.h', - 'AudioBlock.h', - 'AudioEventTimeline.h', - 'AudioNodeEngine.h', - 'AudioNodeExternalInputTrack.h', - 'AudioNodeTrack.h', - 'AudioParamTimeline.h', - 'MediaBufferDecoder.h', - 'ThreeDPoint.h', - 'WebAudioUtils.h', + "AlignedTArray.h", + "AudioBlock.h", + "AudioEventTimeline.h", + "AudioNodeEngine.h", + "AudioNodeExternalInputTrack.h", + "AudioNodeTrack.h", + "AudioParamTimeline.h", + "MediaBufferDecoder.h", + "ThreeDPoint.h", + "WebAudioUtils.h", ] EXPORTS.mozilla += [ - 'FFTBlock.h', - 'MediaStreamAudioDestinationNode.h', + "FFTBlock.h", + "MediaStreamAudioDestinationNode.h", ] EXPORTS.mozilla.dom += [ - 'AnalyserNode.h', - 'AudioBuffer.h', - 'AudioBufferSourceNode.h', - 'AudioContext.h', - 'AudioDestinationNode.h', - 'AudioListener.h', - 'AudioNode.h', - 'AudioParam.h', - 'AudioParamDescriptorMap.h', - 'AudioParamMap.h', - 'AudioProcessingEvent.h', - 'AudioScheduledSourceNode.h', - 'AudioWorkletGlobalScope.h', - 'AudioWorkletNode.h', - 'AudioWorkletProcessor.h', - 'BiquadFilterNode.h', - 'ChannelMergerNode.h', - 'ChannelSplitterNode.h', - 'ConstantSourceNode.h', - 'ConvolverNode.h', - 'DelayNode.h', - 'DynamicsCompressorNode.h', - 'GainNode.h', - 'IIRFilterNode.h', - 'MediaElementAudioSourceNode.h', - 'MediaStreamAudioDestinationNode.h', - 'MediaStreamAudioSourceNode.h', - 'MediaStreamTrackAudioSourceNode.h', - 'OscillatorNode.h', - 'PannerNode.h', - 'PeriodicWave.h', - 'ScriptProcessorNode.h', - 'StereoPannerNode.h', - 'WaveShaperNode.h', + "AnalyserNode.h", + "AudioBuffer.h", + "AudioBufferSourceNode.h", + "AudioContext.h", + "AudioDestinationNode.h", + "AudioListener.h", + "AudioNode.h", + "AudioParam.h", + "AudioParamDescriptorMap.h", + "AudioParamMap.h", + "AudioProcessingEvent.h", + "AudioScheduledSourceNode.h", + "AudioWorkletGlobalScope.h", + "AudioWorkletNode.h", + "AudioWorkletProcessor.h", + "BiquadFilterNode.h", + "ChannelMergerNode.h", + "ChannelSplitterNode.h", + "ConstantSourceNode.h", + "ConvolverNode.h", + "DelayNode.h", + "DynamicsCompressorNode.h", + "GainNode.h", + "IIRFilterNode.h", + "MediaElementAudioSourceNode.h", + "MediaStreamAudioDestinationNode.h", + "MediaStreamAudioSourceNode.h", + "MediaStreamTrackAudioSourceNode.h", + "OscillatorNode.h", + "PannerNode.h", + "PeriodicWave.h", + "ScriptProcessorNode.h", + "StereoPannerNode.h", + "WaveShaperNode.h", ] UNIFIED_SOURCES += [ - 'AnalyserNode.cpp', - 'AudioBlock.cpp', - 'AudioBuffer.cpp', - 'AudioBufferSourceNode.cpp', - 'AudioContext.cpp', - 'AudioDestinationNode.cpp', - 'AudioEventTimeline.cpp', - 'AudioListener.cpp', - 'AudioNode.cpp', - 'AudioNodeEngine.cpp', - 'AudioNodeExternalInputTrack.cpp', - 'AudioNodeTrack.cpp', - 'AudioParam.cpp', - 'AudioParamMap.cpp', - 'AudioProcessingEvent.cpp', - 'AudioScheduledSourceNode.cpp', - 'AudioWorkletGlobalScope.cpp', - 'AudioWorkletImpl.cpp', - 'AudioWorkletNode.cpp', - 'AudioWorkletProcessor.cpp', - 'BiquadFilterNode.cpp', - 'ChannelMergerNode.cpp', - 'ChannelSplitterNode.cpp', - 'ConstantSourceNode.cpp', - 'ConvolverNode.cpp', - 'DelayBuffer.cpp', - 'DelayNode.cpp', - 'DynamicsCompressorNode.cpp', - 'FFTBlock.cpp', - 'GainNode.cpp', - 'IIRFilterNode.cpp', - 'MediaBufferDecoder.cpp', - 'MediaElementAudioSourceNode.cpp', - 'MediaStreamAudioDestinationNode.cpp', - 'MediaStreamAudioSourceNode.cpp', - 'MediaStreamTrackAudioSourceNode.cpp', - 'OscillatorNode.cpp', - 'PannerNode.cpp', - 'PeriodicWave.cpp', - 'ScriptProcessorNode.cpp', - 'StereoPannerNode.cpp', - 'ThreeDPoint.cpp', - 'WaveShaperNode.cpp', - 'WebAudioUtils.cpp', + "AnalyserNode.cpp", + "AudioBlock.cpp", + "AudioBuffer.cpp", + "AudioBufferSourceNode.cpp", + "AudioContext.cpp", + "AudioDestinationNode.cpp", + "AudioEventTimeline.cpp", + "AudioListener.cpp", + "AudioNode.cpp", + "AudioNodeEngine.cpp", + "AudioNodeExternalInputTrack.cpp", + "AudioNodeTrack.cpp", + "AudioParam.cpp", + "AudioParamMap.cpp", + "AudioProcessingEvent.cpp", + "AudioScheduledSourceNode.cpp", + "AudioWorkletGlobalScope.cpp", + "AudioWorkletImpl.cpp", + "AudioWorkletNode.cpp", + "AudioWorkletProcessor.cpp", + "BiquadFilterNode.cpp", + "ChannelMergerNode.cpp", + "ChannelSplitterNode.cpp", + "ConstantSourceNode.cpp", + "ConvolverNode.cpp", + "DelayBuffer.cpp", + "DelayNode.cpp", + "DynamicsCompressorNode.cpp", + "FFTBlock.cpp", + "GainNode.cpp", + "IIRFilterNode.cpp", + "MediaBufferDecoder.cpp", + "MediaElementAudioSourceNode.cpp", + "MediaStreamAudioDestinationNode.cpp", + "MediaStreamAudioSourceNode.cpp", + "MediaStreamTrackAudioSourceNode.cpp", + "OscillatorNode.cpp", + "PannerNode.cpp", + "PeriodicWave.cpp", + "ScriptProcessorNode.cpp", + "StereoPannerNode.cpp", + "ThreeDPoint.cpp", + "WaveShaperNode.cpp", + "WebAudioUtils.cpp", ] -if CONFIG['CPU_ARCH'] == 'aarch64' or CONFIG['BUILD_ARM_NEON']: - DEFINES['USE_NEON'] = True - SOURCES += ['AudioNodeEngineNEON.cpp'] - SOURCES['AudioNodeEngineNEON.cpp'].flags += CONFIG['NEON_FLAGS'] - if CONFIG['BUILD_ARM_NEON']: - LOCAL_INCLUDES += [ - '/media/openmax_dl/dl/api/' - ] +if CONFIG["CPU_ARCH"] == "aarch64" or CONFIG["BUILD_ARM_NEON"]: + DEFINES["USE_NEON"] = True + SOURCES += ["AudioNodeEngineNEON.cpp"] + SOURCES["AudioNodeEngineNEON.cpp"].flags += CONFIG["NEON_FLAGS"] + if CONFIG["BUILD_ARM_NEON"]: + LOCAL_INCLUDES += ["/media/openmax_dl/dl/api/"] # Are we targeting x86 or x64? If so, build SSE2 files. -if CONFIG['INTEL_ARCHITECTURE']: - SOURCES += ['AudioNodeEngineSSE2.cpp'] - DEFINES['USE_SSE2'] = True - SOURCES['AudioNodeEngineSSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] +if CONFIG["INTEL_ARCHITECTURE"]: + SOURCES += ["AudioNodeEngineSSE2.cpp"] + DEFINES["USE_SSE2"] = True + SOURCES["AudioNodeEngineSSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] # Allow outputing trace points from Web Audio API code -DEFINES['TRACING'] = True +DEFINES["TRACING"] = True -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' -LOCAL_INCLUDES += [ - '..' -] +FINAL_LIBRARY = "xul" +LOCAL_INCLUDES += [".."] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/webm/moz.build b/dom/media/webm/moz.build index a3d0215ab8e015..24bf5d911673b2 100644 --- a/dom/media/webm/moz.build +++ b/dom/media/webm/moz.build @@ -5,30 +5,31 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'NesteggPacketHolder.h', - 'WebMBufferedParser.h', - 'WebMDecoder.h', - 'WebMDemuxer.h', + "NesteggPacketHolder.h", + "WebMBufferedParser.h", + "WebMDecoder.h", + "WebMDemuxer.h", ] UNIFIED_SOURCES += [ - 'WebMBufferedParser.cpp', - 'WebMDecoder.cpp', - 'WebMDemuxer.cpp', + "WebMBufferedParser.cpp", + "WebMDecoder.cpp", + "WebMDemuxer.cpp", ] -if CONFIG['MOZ_WEBM_ENCODER']: - EXPORTS += ['WebMWriter.h'] - UNIFIED_SOURCES += ['EbmlComposer.cpp', - 'WebMWriter.cpp', +if CONFIG["MOZ_WEBM_ENCODER"]: + EXPORTS += ["WebMWriter.h"] + UNIFIED_SOURCES += [ + "EbmlComposer.cpp", + "WebMWriter.cpp", ] -CXXFLAGS += CONFIG['MOZ_LIBVPX_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_LIBVPX_CFLAGS"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/dom/media/webrtc/common/moz.build b/dom/media/webrtc/common/moz.build index 6e5f5baf99d008..01f2334eb3362c 100644 --- a/dom/media/webrtc/common/moz.build +++ b/dom/media/webrtc/common/moz.build @@ -3,22 +3,20 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") -EXPORTS.mozilla.dom += [ - 'CandidateInfo.h' -] +EXPORTS.mozilla.dom += ["CandidateInfo.h"] LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr', - '/third_party/libwebrtc/webrtc', + "/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr", + "/third_party/libwebrtc/webrtc", ] UNIFIED_SOURCES += [ - 'browser_logging/CSFLog.cpp', - 'browser_logging/WebRtcLog.cpp', - 'time_profiling/timecard.c', - 'YuvStamper.cpp', + "browser_logging/CSFLog.cpp", + "browser_logging/WebRtcLog.cpp", + "time_profiling/timecard.c", + "YuvStamper.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/jsapi/moz.build b/dom/media/webrtc/jsapi/moz.build index 17d4cda8f5d68d..978df17d291bcd 100644 --- a/dom/media/webrtc/jsapi/moz.build +++ b/dom/media/webrtc/jsapi/moz.build @@ -3,35 +3,35 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") LOCAL_INCLUDES += [ - '!/ipc/ipdl/_ipdlheaders', - '/dom/base', - '/dom/media', - '/dom/media/webrtc', - '/ipc/chromium/src', - '/media/webrtc', - '/netwerk/srtp/src/include', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "!/ipc/ipdl/_ipdlheaders", + "/dom/base", + "/dom/media", + "/dom/media/webrtc", + "/ipc/chromium/src", + "/media/webrtc", + "/netwerk/srtp/src/include", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] UNIFIED_SOURCES += [ - 'MediaTransportHandler.cpp', - 'MediaTransportHandlerIPC.cpp', - 'MediaTransportParent.cpp', - 'PacketDumper.cpp', - 'PeerConnectionCtx.cpp', - 'PeerConnectionImpl.cpp', - 'PeerConnectionMedia.cpp', - 'RTCDtlsTransport.cpp', - 'RTCDTMFSender.cpp', - 'RTCRtpReceiver.cpp', - 'RTCStatsIdGenerator.cpp', - 'RTCStatsReport.cpp', - 'TransceiverImpl.cpp', - 'WebrtcGlobalInformation.cpp', + "MediaTransportHandler.cpp", + "MediaTransportHandlerIPC.cpp", + "MediaTransportParent.cpp", + "PacketDumper.cpp", + "PeerConnectionCtx.cpp", + "PeerConnectionImpl.cpp", + "PeerConnectionMedia.cpp", + "RTCDtlsTransport.cpp", + "RTCDTMFSender.cpp", + "RTCRtpReceiver.cpp", + "RTCStatsIdGenerator.cpp", + "RTCStatsReport.cpp", + "TransceiverImpl.cpp", + "WebrtcGlobalInformation.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/jsep/moz.build b/dom/media/webrtc/jsep/moz.build index 862187155bba6b..4ef0d90da423f1 100644 --- a/dom/media/webrtc/jsep/moz.build +++ b/dom/media/webrtc/jsep/moz.build @@ -3,20 +3,16 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") LOCAL_INCLUDES += [ - '/dom/media/webrtc', - '/media/webrtc', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', - '/third_party/sipcc', + "/dom/media/webrtc", + "/media/webrtc", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", + "/third_party/sipcc", ] -UNIFIED_SOURCES += [ - 'JsepSessionImpl.cpp', - 'JsepTrack.cpp', - 'SsrcGenerator.cpp' -] +UNIFIED_SOURCES += ["JsepSessionImpl.cpp", "JsepTrack.cpp", "SsrcGenerator.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/libwebrtcglue/moz.build b/dom/media/webrtc/libwebrtcglue/moz.build index af45a4ec1a2494..9ba8c5d40e26e6 100644 --- a/dom/media/webrtc/libwebrtcglue/moz.build +++ b/dom/media/webrtc/libwebrtcglue/moz.build @@ -3,35 +3,35 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") LOCAL_INCLUDES += [ - '!/ipc/ipdl/_ipdlheaders', - '/dom/media/gmp', # for GMPLog.h, - '/dom/media/webrtc', - '/ipc/chromium/src', - '/media/libyuv/libyuv/include', - '/media/webrtc', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "!/ipc/ipdl/_ipdlheaders", + "/dom/media/gmp", # for GMPLog.h, + "/dom/media/webrtc", + "/ipc/chromium/src", + "/media/libyuv/libyuv/include", + "/media/webrtc", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] UNIFIED_SOURCES += [ - 'AudioConduit.cpp', - 'GmpVideoCodec.cpp', - 'MediaDataCodec.cpp', - 'RtpSourceObserver.cpp', - 'VideoConduit.cpp', - 'VideoStreamFactory.cpp', - 'WebrtcGmpVideoCodec.cpp', - 'WebrtcMediaDataDecoderCodec.cpp', - 'WebrtcMediaDataEncoderCodec.cpp', + "AudioConduit.cpp", + "GmpVideoCodec.cpp", + "MediaDataCodec.cpp", + "RtpSourceObserver.cpp", + "VideoConduit.cpp", + "VideoStreamFactory.cpp", + "WebrtcGmpVideoCodec.cpp", + "WebrtcMediaDataDecoderCodec.cpp", + "WebrtcMediaDataEncoderCodec.cpp", ] -if CONFIG['OS_TARGET'] == 'Android': +if CONFIG["OS_TARGET"] == "Android": UNIFIED_SOURCES += [ - 'MediaCodecVideoCodec.cpp', - 'WebrtcMediaCodecVP8VideoCodec.cpp', + "MediaCodecVideoCodec.cpp", + "WebrtcMediaCodecVP8VideoCodec.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/moz.build b/dom/media/webrtc/moz.build index af943a97eb0ea2..bc95fa080390b5 100644 --- a/dom/media/webrtc/moz.build +++ b/dom/media/webrtc/moz.build @@ -3,114 +3,114 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") -with Files('**'): - BUG_COMPONENT = ('Core', 'WebRTC: Audio/Video') +with Files("**"): + BUG_COMPONENT = ("Core", "WebRTC: Audio/Video") -with Files('PeerIdentity.*'): - BUG_COMPONENT = ('Core', 'WebRTC: Signaling') +with Files("PeerIdentity.*"): + BUG_COMPONENT = ("Core", "WebRTC: Signaling") -with Files('common/**'): - BUG_COMPONENT = ('CORE', 'WebRTC: Signaling') -with Files('jsep/**'): - BUG_COMPONENT = ('CORE', 'WebRTC: Signaling') -with Files('libwebrtcglue/**'): - BUG_COMPONENT = ('CORE', 'WebRTC: Signaling') -with Files('transportbridge/**'): - BUG_COMPONENT = ('CORE', 'WebRTC: Signaling') -with Files('jsapi/**'): - BUG_COMPONENT = ('CORE', 'WebRTC: Signaling') -with Files('sdp/**'): - BUG_COMPONENT = ('CORE', 'WebRTC: Signaling') +with Files("common/**"): + BUG_COMPONENT = ("CORE", "WebRTC: Signaling") +with Files("jsep/**"): + BUG_COMPONENT = ("CORE", "WebRTC: Signaling") +with Files("libwebrtcglue/**"): + BUG_COMPONENT = ("CORE", "WebRTC: Signaling") +with Files("transportbridge/**"): + BUG_COMPONENT = ("CORE", "WebRTC: Signaling") +with Files("jsapi/**"): + BUG_COMPONENT = ("CORE", "WebRTC: Signaling") +with Files("sdp/**"): + BUG_COMPONENT = ("CORE", "WebRTC: Signaling") EXPORTS += [ - 'CubebDeviceEnumerator.h', - 'MediaEngine.h', - 'MediaEngineDefault.h', - 'MediaEnginePrefs.h', - 'MediaEngineSource.h', - 'MediaTrackConstraints.h', - 'SineWaveGenerator.h', + "CubebDeviceEnumerator.h", + "MediaEngine.h", + "MediaEngineDefault.h", + "MediaEnginePrefs.h", + "MediaEngineSource.h", + "MediaTrackConstraints.h", + "SineWaveGenerator.h", ] SOURCES += [ - 'CubebDeviceEnumerator.cpp', + "CubebDeviceEnumerator.cpp", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: EXPORTS += [ - 'MediaEngineRemoteVideoSource.h', - 'MediaEngineWebRTC.h', - 'MediaEngineWebRTCAudio.h', + "MediaEngineRemoteVideoSource.h", + "MediaEngineWebRTC.h", + "MediaEngineWebRTCAudio.h", ] - EXPORTS.mozilla.dom += [ 'RTCIdentityProviderRegistrar.h' ] + EXPORTS.mozilla.dom += ["RTCIdentityProviderRegistrar.h"] UNIFIED_SOURCES += [ - 'MediaEngineRemoteVideoSource.cpp', - 'MediaEngineWebRTCAudio.cpp', - 'RTCCertificate.cpp', - 'RTCIdentityProviderRegistrar.cpp', + "MediaEngineRemoteVideoSource.cpp", + "MediaEngineWebRTCAudio.cpp", + "RTCCertificate.cpp", + "RTCIdentityProviderRegistrar.cpp", ] # MediaEngineWebRTC.cpp needs to be built separately. SOURCES += [ - 'MediaEngineWebRTC.cpp', + "MediaEngineWebRTC.cpp", ] LOCAL_INCLUDES += [ - '..', - '/dom/base', - '/dom/media', - '/dom/media/webrtc/common', - '/dom/media/webrtc/common/browser_logging', - '/media/libyuv/libyuv/include', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "..", + "/dom/base", + "/dom/media", + "/dom/media/webrtc/common", + "/dom/media/webrtc/common/browser_logging", + "/media/libyuv/libyuv/include", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] -if CONFIG['MOZ_WEBRTC_SIGNALING']: +if CONFIG["MOZ_WEBRTC_SIGNALING"]: DIRS += [ - 'common', - 'jsapi', - 'jsep', - 'libwebrtcglue', - 'sdp', - 'third_party_build', - 'transportbridge', + "common", + "jsapi", + "jsep", + "libwebrtcglue", + "sdp", + "third_party_build", + "transportbridge", ] # Avoid warnings from third-party code that we can not modify. - if CONFIG['CC_TYPE'] == 'clang-cl': - CXXFLAGS += ['-Wno-invalid-source-encoding'] + if CONFIG["CC_TYPE"] == "clang-cl": + CXXFLAGS += ["-Wno-invalid-source-encoding"] PREPROCESSED_IPDL_SOURCES += [ - 'PMediaTransport.ipdl', - 'PWebrtcGlobal.ipdl', + "PMediaTransport.ipdl", + "PWebrtcGlobal.ipdl", ] UNIFIED_SOURCES += [ - 'MediaEngineDefault.cpp', - 'MediaEngineSource.cpp', - 'MediaTrackConstraints.cpp', - 'PeerIdentity.cpp', + "MediaEngineDefault.cpp", + "MediaEngineSource.cpp", + "MediaTrackConstraints.cpp", + "PeerIdentity.cpp", ] EXPORTS.mozilla += [ - 'PeerIdentity.h', + "PeerIdentity.h", ] EXPORTS.mozilla.dom += [ - 'RTCCertificate.h', + "RTCCertificate.h", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Suppress some GCC/clang warnings being treated as errors: # - about attributes on forward declarations for types that are already # defined, which complains about important MOZ_EXPORT attributes for # android API types CXXFLAGS += [ - '-Wno-error=attributes', - '-Wno-error=shadow', + "-Wno-error=attributes", + "-Wno-error=shadow", ] -DEFINES['TRACING'] = True -FINAL_LIBRARY = 'xul' +DEFINES["TRACING"] = True +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/sdp/moz.build b/dom/media/webrtc/sdp/moz.build index 5141add23a6e6e..675009acd07a1a 100644 --- a/dom/media/webrtc/sdp/moz.build +++ b/dom/media/webrtc/sdp/moz.build @@ -4,45 +4,45 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['OS_TARGET'] == 'WINNT': - DEFINES['SIP_OS_WINDOWS'] = True -elif CONFIG['OS_TARGET'] == 'Darwin': - DEFINES['SIP_OS_OSX'] = True +if CONFIG["OS_TARGET"] == "WINNT": + DEFINES["SIP_OS_WINDOWS"] = True +elif CONFIG["OS_TARGET"] == "Darwin": + DEFINES["SIP_OS_OSX"] = True else: - DEFINES['SIP_OS_LINUX'] = True + DEFINES["SIP_OS_LINUX"] = True # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/media/webrtc', - '/media/webrtc', - '/third_party/sipcc', + "/dom/media/webrtc", + "/media/webrtc", + "/third_party/sipcc", ] UNIFIED_SOURCES += [ - 'HybridSdpParser.cpp', - 'ParsingResultComparer.cpp', - 'SdpAttribute.cpp', - 'SdpHelper.cpp', - 'SdpLog.cpp', - 'SdpMediaSection.cpp', - 'SdpPref.cpp', - 'SdpTelemetry.cpp', - 'SipccSdp.cpp', - 'SipccSdpAttributeList.cpp', - 'SipccSdpMediaSection.cpp', - 'SipccSdpParser.cpp', + "HybridSdpParser.cpp", + "ParsingResultComparer.cpp", + "SdpAttribute.cpp", + "SdpHelper.cpp", + "SdpLog.cpp", + "SdpMediaSection.cpp", + "SdpPref.cpp", + "SdpTelemetry.cpp", + "SipccSdp.cpp", + "SipccSdpAttributeList.cpp", + "SipccSdpMediaSection.cpp", + "SipccSdpParser.cpp", ] SOURCES += [ # Building these as part of the unified build leads to multiply defined # symbols on windows. - 'RsdparsaSdp.cpp', - 'RsdparsaSdpAttributeList.cpp', - 'RsdparsaSdpGlue.cpp', - 'RsdparsaSdpMediaSection.cpp', - 'RsdparsaSdpParser.cpp', + "RsdparsaSdp.cpp", + "RsdparsaSdpAttributeList.cpp", + "RsdparsaSdpGlue.cpp", + "RsdparsaSdpMediaSection.cpp", + "RsdparsaSdpParser.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/tests/fuzztests/moz.build b/dom/media/webrtc/tests/fuzztests/moz.build index 239b4a6fcd5ac7..fef388e6c90fe1 100644 --- a/dom/media/webrtc/tests/fuzztests/moz.build +++ b/dom/media/webrtc/tests/fuzztests/moz.build @@ -4,19 +4,19 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingSdp') +Library("FuzzingSdp") LOCAL_INCLUDES += [ - '/dom/media/webrtc', - '/ipc/chromium/src', - '/media/webrtc', + "/dom/media/webrtc", + "/ipc/chromium/src", + "/media/webrtc", ] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") SOURCES += [ - 'sdp_parser_libfuzz.cpp', + "sdp_parser_libfuzz.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/media/webrtc/third_party_build/extract-for-git.py b/dom/media/webrtc/third_party_build/extract-for-git.py index 0e53c3681d8e4c..67bbeee8f53af3 100644 --- a/dom/media/webrtc/third_party_build/extract-for-git.py +++ b/dom/media/webrtc/third_party_build/extract-for-git.py @@ -10,88 +10,108 @@ # be imported into Git. It filters out commits that are not part of upstream # code and rewrites the paths to match upstream. Finally, the commits are # combined into a mailbox file that can be applied with `git am`. -LIBWEBRTC_DIR = 'third_party/libwebrtc/webrtc' +LIBWEBRTC_DIR = "third_party/libwebrtc/webrtc" def build_commit_list(revset): """Build commit list from the specified revset. - The revset can be a single revision, e.g. 52bb9bb94661, or a range, - e.g. 8c08a5bb8a99::52bb9bb94661, or any other valid revset - (check hg help revset). Only commits that touch libwebrtc are included. + The revset can be a single revision, e.g. 52bb9bb94661, or a range, + e.g. 8c08a5bb8a99::52bb9bb94661, or any other valid revset + (check hg help revset). Only commits that touch libwebrtc are included. """ - res = subprocess.run(['hg', 'log', '-r', revset, '-M', '--template', '{node}\n', - LIBWEBRTC_DIR], - capture_output=True, text=True, cwd='../../../../') - return [line.strip() for line in res.stdout.strip().split('\n')] + res = subprocess.run( + ["hg", "log", "-r", revset, "-M", "--template", "{node}\n", LIBWEBRTC_DIR], + capture_output=True, + text=True, + cwd="../../../../", + ) + return [line.strip() for line in res.stdout.strip().split("\n")] def extract_author_date(sha1): - res = subprocess.run(['hg', 'log', '-r', sha1, '--template', '{author}|{date|isodate}'], - capture_output=True, text=True) - return res.stdout.split('|') + res = subprocess.run( + ["hg", "log", "-r", sha1, "--template", "{author}|{date|isodate}"], + capture_output=True, + text=True, + ) + return res.stdout.split("|") def extract_description(sha1): - res = subprocess.run(['hg', 'log', '-r', sha1, '--template', '{desc}'], - capture_output=True, text=True) + res = subprocess.run( + ["hg", "log", "-r", sha1, "--template", "{desc}"], + capture_output=True, + text=True, + ) return res.stdout def extract_commit(sha1): - res = subprocess.run(['hg', 'log', '-r', sha1, '-pg', '--template', '\n'], - capture_output=True, text=True) + res = subprocess.run( + ["hg", "log", "-r", sha1, "-pg", "--template", "\n"], + capture_output=True, + text=True, + ) return res.stdout def filter_nonwebrtc(commit): filtered = [] skipping = False - for line in commit.split('\n'): + for line in commit.split("\n"): # Extract only patches affecting libwebrtc, but avoid commits that # touch build, which is tracked by a separate repo, or that affect # moz.build files which are code generated. - if (line.startswith('diff --git a/' + LIBWEBRTC_DIR) - and not line.startswith('diff --git a/' + LIBWEBRTC_DIR + '/build') - and not line.endswith('moz.build')): + if ( + line.startswith("diff --git a/" + LIBWEBRTC_DIR) + and not line.startswith("diff --git a/" + LIBWEBRTC_DIR + "/build") + and not line.endswith("moz.build") + ): skipping = False - elif line.startswith('diff --git'): + elif line.startswith("diff --git"): skipping = True if not skipping: filtered.append(line) - return '\n'.join(filtered) + return "\n".join(filtered) def fixup_paths(commit): - return re.sub('third_party/libwebrtc/webrtc/', '', commit) + return re.sub("third_party/libwebrtc/webrtc/", "", commit) def write_as_mbox(sha1, author, date, description, commit, ofile): # Use same magic date as git format-patch - ofile.write('From {} Mon Sep 17 00:00:00 2001\n'.format(sha1)) - ofile.write('From: {}\n'.format(author)) - ofile.write('Date: {}\n'.format(date)) - description = description.split('\n') - ofile.write('Subject: {}\n'.format(description[0])) - ofile.write('\n'.join(description[1:])) - ofile.write('\nMercurial Revision: https://hg.mozilla.org/mozilla-central/rev/{}\n' - .format(sha1)) + ofile.write("From {} Mon Sep 17 00:00:00 2001\n".format(sha1)) + ofile.write("From: {}\n".format(author)) + ofile.write("Date: {}\n".format(date)) + description = description.split("\n") + ofile.write("Subject: {}\n".format(description[0])) + ofile.write("\n".join(description[1:])) + ofile.write( + "\nMercurial Revision: https://hg.mozilla.org/mozilla-central/rev/{}\n".format( + sha1 + ) + ) ofile.write(commit) - ofile.write('\n') - ofile.write('\n') + ofile.write("\n") + ofile.write("\n") -if __name__ == '__main__': +if __name__ == "__main__": commits = [] - parser = argparse.ArgumentParser(description='Format commits for upstream libwebrtc') - parser.add_argument('revsets', metavar='revset', type=str, nargs='+', - help='A revset to process') + parser = argparse.ArgumentParser( + description="Format commits for upstream libwebrtc" + ) + parser.add_argument( + "revsets", metavar="revset", type=str, nargs="+", help="A revset to process" + ) args = parser.parse_args() for revset in args.revsets: commits.extend(build_commit_list(revset)) - with open('mailbox.patch', 'w') as ofile: + with open("mailbox.patch", "w") as ofile: for sha1 in commits: author, date = extract_author_date(sha1) description = extract_description(sha1) diff --git a/dom/media/webrtc/third_party_build/moz.build b/dom/media/webrtc/third_party_build/moz.build index dea7cd0d750857..248b91388d9766 100644 --- a/dom/media/webrtc/third_party_build/moz.build +++ b/dom/media/webrtc/third_party_build/moz.build @@ -7,67 +7,73 @@ with Files("**"): BUG_COMPONENT = ("Core", "WebRTC") -include('/build/gn.mozbuild') +include("/build/gn.mozbuild") webrtc_non_unified_sources = [ - '../../../../third_party/libwebrtc/webrtc/common_audio/vad/vad_core.c', # Because of name clash in the kInitCheck variable - '../../../../third_party/libwebrtc/webrtc/common_audio/vad/webrtc_vad.c', # Because of name clash in the kInitCheck variable - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/acm2/codec_manager.cc', # Because of duplicate IsCodecRED/etc - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/g722/g722_decode.c', # Because of name clash in the saturate function - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/g722/g722_encode.c', # Because of name clash in the saturate function - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c', # Because of name clash in the exp2_Q10_T function - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c', # Because of name clash in the exp2_Q10_T function - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c', # Because of name clash in the kDampFilter variable - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c', # Because of name clash in the kDampFilter variable - '../../../../third_party/libwebrtc/webrtc/modules/audio_coding/neteq/audio_vector.cc', # Because of explicit template specializations - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/audio_manager.cc', # Because of TAG redefinition - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/audio_record_jni.cc', # Becuse of commonly named module static vars - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/audio_track_jni.cc', # Becuse of commonly named module static vars - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/opensles_player.cc', # Because of TAG redefinition - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc', # Because of LATE() - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc',# Because of LATE() - '../../../../third_party/libwebrtc/webrtc/modules/audio_device/win/audio_device_core_win.cc', # Because of ordering assumptions in strsafe.h - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aec/echo_cancellation.cc', # Because of conflicts over 'near' on windows - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core.cc', # Because of the PART_LEN2 define - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core_c.cc', # Because of the PART_LEN2 define - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc', # Because of the PART_LEN2 define - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc', # Because of the PART_LEN2 define - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc', # Because of the PART_LEN2 define - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/agc/legacy/analog_agc.c', # Because of name clash in the kInitCheck variable - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc', # Because of needing to define _USE_MATH_DEFINES before including - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc', # Because of needing to define _USE_MATH_DEFINES before including - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc', # Because of needing to define _USE_MATH_DEFINES before including - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/echo_cancellation_impl.cc', # Because of name clash in the MapError function - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/echo_control_mobile_impl.cc', # Because of name clash in the MapError function - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/echo_detector/normalized_covariance_estimator.cc', # Because of kAlpha - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/gain_control_impl.cc', # Because of name clash in the Handle typedef - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/noise_suppression_impl.cc', # Because of name clash in the Handle typedef - '../../../../third_party/libwebrtc/webrtc/modules/audio_processing/rms_level.cc', # Because of name clash in the kMinLevel variable - '../../../../third_party/libwebrtc/webrtc/modules/congestion_controller/trendline_estimator.cc', # Because of name clash in kDeltaCounterMax - '../../../../third_party/libwebrtc/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc', # Because base/logging.h uses #ifndef LOG before defining anything - '../../../../third_party/libwebrtc/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc', # Because of duplicate definitions of static consts against remote_bitrate_estimator_abs_send_time.cc - '../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc', # Because of identically named functions and vars between flexfec_receiver.cc and flexfec_sender.cc in an anonymous namespaces - '../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc', # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces - '../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc', # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces - '../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc', # Because of identically named constant kRedForFecHeaderLength in an anonymous namespace - '../../../../third_party/libwebrtc/webrtc/modules/video_capture/windows/device_info_ds.cc', # Because of the MEDIASUBTYPE_HDYC variable - '../../../../third_party/libwebrtc/webrtc/modules/video_capture/windows/help_functions_ds.cc', # Because of initguid.h - '../../../../third_party/libwebrtc/webrtc/modules/video_capture/windows/sink_filter_ds.cc', # Because of the MEDIASUBTYPE_HDYC variable and initguid.h - '../../../../third_party/libwebrtc/webrtc/video/overuse_frame_detector.cc', # Because of name clash with call_stats.cc on kWeightFactor + "../../../../third_party/libwebrtc/webrtc/common_audio/vad/vad_core.c", # Because of name clash in the kInitCheck variable + "../../../../third_party/libwebrtc/webrtc/common_audio/vad/webrtc_vad.c", # Because of name clash in the kInitCheck variable + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/acm2/codec_manager.cc", # Because of duplicate IsCodecRED/etc + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/g722/g722_decode.c", # Because of name clash in the saturate function + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/g722/g722_encode.c", # Because of name clash in the saturate function + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/decode_plc.c", # Because of name clash in the exp2_Q10_T function + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/lpc_masking_model.c", # Because of name clash in the exp2_Q10_T function + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter.c", # Because of name clash in the kDampFilter variable + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/codecs/isac/fix/source/pitch_filter_c.c", # Because of name clash in the kDampFilter variable + "../../../../third_party/libwebrtc/webrtc/modules/audio_coding/neteq/audio_vector.cc", # Because of explicit template specializations + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/audio_manager.cc", # Because of TAG redefinition + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/audio_record_jni.cc", # Becuse of commonly named module static vars + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/audio_track_jni.cc", # Becuse of commonly named module static vars + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/android/opensles_player.cc", # Because of TAG redefinition + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/linux/audio_device_pulse_linux.cc", # Because of LATE() + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/linux/audio_mixer_manager_pulse_linux.cc", # Because of LATE() + "../../../../third_party/libwebrtc/webrtc/modules/audio_device/win/audio_device_core_win.cc", # Because of ordering assumptions in strsafe.h + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aec/echo_cancellation.cc", # Because of conflicts over 'near' on windows + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core.cc", # Because of the PART_LEN2 define + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core_c.cc", # Because of the PART_LEN2 define + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core_mips.cc", # Because of the PART_LEN2 define + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/aecm_core_neon.cc", # Because of the PART_LEN2 define + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/aecm/echo_control_mobile.cc", # Because of the PART_LEN2 define + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/agc/legacy/analog_agc.c", # Because of name clash in the kInitCheck variable + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc", # Because of needing to define _USE_MATH_DEFINES before including + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/beamformer/covariance_matrix_generator.cc", # Because of needing to define _USE_MATH_DEFINES before including + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/beamformer/nonlinear_beamformer.cc", # Because of needing to define _USE_MATH_DEFINES before including + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/echo_cancellation_impl.cc", # Because of name clash in the MapError function + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/echo_control_mobile_impl.cc", # Because of name clash in the MapError function + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/echo_detector/normalized_covariance_estimator.cc", # Because of kAlpha + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/gain_control_impl.cc", # Because of name clash in the Handle typedef + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/noise_suppression_impl.cc", # Because of name clash in the Handle typedef + "../../../../third_party/libwebrtc/webrtc/modules/audio_processing/rms_level.cc", # Because of name clash in the kMinLevel variable + "../../../../third_party/libwebrtc/webrtc/modules/congestion_controller/trendline_estimator.cc", # Because of name clash in kDeltaCounterMax + "../../../../third_party/libwebrtc/webrtc/modules/desktop_capture/win/screen_capturer_win_gdi.cc", # Because base/logging.h uses #ifndef LOG before defining anything + "../../../../third_party/libwebrtc/webrtc/modules/remote_bitrate_estimator/remote_bitrate_estimator_single_stream.cc", # Because of duplicate definitions of static consts against remote_bitrate_estimator_abs_send_time.cc + "../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/flexfec_receiver.cc", # Because of identically named functions and vars between flexfec_receiver.cc and flexfec_sender.cc in an anonymous namespaces + "../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbn.cc", # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces + "../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/rtcp_packet/tmmbr.cc", # Because of identically named functions and vars between tmmbr.cc and tmmbn.cc in an anonymous namespaces + "../../../../third_party/libwebrtc/webrtc/modules/rtp_rtcp/source/ulpfec_generator.cc", # Because of identically named constant kRedForFecHeaderLength in an anonymous namespace + "../../../../third_party/libwebrtc/webrtc/modules/video_capture/windows/device_info_ds.cc", # Because of the MEDIASUBTYPE_HDYC variable + "../../../../third_party/libwebrtc/webrtc/modules/video_capture/windows/help_functions_ds.cc", # Because of initguid.h + "../../../../third_party/libwebrtc/webrtc/modules/video_capture/windows/sink_filter_ds.cc", # Because of the MEDIASUBTYPE_HDYC variable and initguid.h + "../../../../third_party/libwebrtc/webrtc/video/overuse_frame_detector.cc", # Because of name clash with call_stats.cc on kWeightFactor ] -GN_DIRS += ['../../../../third_party/libwebrtc/webrtc'] +GN_DIRS += ["../../../../third_party/libwebrtc/webrtc"] gn_vars_copy = gn_vars.copy() -GN_DIRS['../../../../third_party/libwebrtc/webrtc'].variables = gn_vars_copy -GN_DIRS['../../../../third_party/libwebrtc/webrtc'].mozilla_flags = [ - '-fobjc-arc', - '-mfpu=neon', - '-msse2', +GN_DIRS["../../../../third_party/libwebrtc/webrtc"].variables = gn_vars_copy +GN_DIRS["../../../../third_party/libwebrtc/webrtc"].mozilla_flags = [ + "-fobjc-arc", + "-mfpu=neon", + "-msse2", ] -GN_DIRS['../../../../third_party/libwebrtc/webrtc'].gn_target = '//:webrtc' +GN_DIRS["../../../../third_party/libwebrtc/webrtc"].gn_target = "//:webrtc" # We allow warnings for third-party code that can be updated from upstream. -GN_DIRS['../../../../third_party/libwebrtc/webrtc'].sandbox_vars['COMPILE_FLAGS'] = {'WARNINGS_AS_ERRORS': []} -GN_DIRS['../../../../third_party/libwebrtc/webrtc'].sandbox_vars['FINAL_LIBRARY'] = 'webrtc' -GN_DIRS['../../../../third_party/libwebrtc/webrtc'].non_unified_sources += webrtc_non_unified_sources +GN_DIRS["../../../../third_party/libwebrtc/webrtc"].sandbox_vars["COMPILE_FLAGS"] = { + "WARNINGS_AS_ERRORS": [] +} +GN_DIRS["../../../../third_party/libwebrtc/webrtc"].sandbox_vars[ + "FINAL_LIBRARY" +] = "webrtc" +GN_DIRS[ + "../../../../third_party/libwebrtc/webrtc" +].non_unified_sources += webrtc_non_unified_sources diff --git a/dom/media/webrtc/third_party_build/vendor-libwebrtc.py b/dom/media/webrtc/third_party_build/vendor-libwebrtc.py index e93601e8f5af18..55a2efe268ac14 100644 --- a/dom/media/webrtc/third_party_build/vendor-libwebrtc.py +++ b/dom/media/webrtc/third_party_build/vendor-libwebrtc.py @@ -12,81 +12,94 @@ LIBWEBRTC_USED_IN_FIREFOX = [ - 'AUTHORS', - 'LICENSE', - 'OWNERS', - 'PATENTS', - 'api', - 'audio', - 'call', - 'common_audio', - 'common_types.h', - 'common_video', - 'logging', - 'media', - 'modules', - 'rtc_base', - 'sdk/android', - 'system_wrappers', - 'video', + "AUTHORS", + "LICENSE", + "OWNERS", + "PATENTS", + "api", + "audio", + "call", + "common_audio", + "common_types.h", + "common_video", + "logging", + "media", + "modules", + "rtc_base", + "sdk/android", + "system_wrappers", + "video", ] -LIBWEBRTC_DIR = os.path.normpath('../../../../third_party/libwebrtc/webrtc') +LIBWEBRTC_DIR = os.path.normpath("../../../../third_party/libwebrtc/webrtc") def make_github_url(repo, commit): - if not repo.endswith('/'): - repo += '/' - return repo + 'archive/' + commit + '.tar.gz' + if not repo.endswith("/"): + repo += "/" + return repo + "archive/" + commit + ".tar.gz" def make_googlesource_url(target, commit): - if target == 'libwebrtc': - return 'https://webrtc.googlesource.com/src.git/+archive/' + commit + '.tar.gz' + if target == "libwebrtc": + return "https://webrtc.googlesource.com/src.git/+archive/" + commit + ".tar.gz" else: - return ('https://chromium.googlesource.com/chromium/src/build/+archive/' + commit + - '.tar.gz') + return ( + "https://chromium.googlesource.com/chromium/src/build/+archive/" + + commit + + ".tar.gz" + ) def fetch(target, url): - print('Fetching commit from {}'.format(url)) + print("Fetching commit from {}".format(url)) req = requests.get(url) if req.status_code == 200: - with open(target + '.tar.gz', 'wb') as f: + with open(target + ".tar.gz", "wb") as f: f.write(req.content) else: - print('Hit status code {} fetching commit. Aborting.'.format(req.status_code), - file=sys.stderr) + print( + "Hit status code {} fetching commit. Aborting.".format(req.status_code), + file=sys.stderr, + ) sys.exit(1) - with open(os.path.join(LIBWEBRTC_DIR, 'README.mozilla'), 'w') as f: - f.write('libwebrtc updated from commit {} on {}.'.format( - url, datetime.datetime.utcnow().isoformat())) + with open(os.path.join(LIBWEBRTC_DIR, "README.mozilla"), "w") as f: + f.write( + "libwebrtc updated from commit {} on {}.".format( + url, datetime.datetime.utcnow().isoformat() + ) + ) def fetch_local(target, path, commit): - target_archive = target + '.tar.gz' - cp = subprocess.run(['git', 'archive', '-o', target_archive, commit], cwd=path) + target_archive = target + ".tar.gz" + cp = subprocess.run(["git", "archive", "-o", target_archive, commit], cwd=path) if cp.returncode != 0: - print('Hit return code {} fetching commit. Aborting.'.format(cp.returncode), - file=sys.stderr) + print( + "Hit return code {} fetching commit. Aborting.".format(cp.returncode), + file=sys.stderr, + ) sys.exit(1) - with open(os.path.join(LIBWEBRTC_DIR, 'README.mozilla'), 'w') as f: - f.write('libwebrtc updated from {} commit {} on {}.'.format( - path, commit, datetime.datetime.utcnow().isoformat())) + with open(os.path.join(LIBWEBRTC_DIR, "README.mozilla"), "w") as f: + f.write( + "libwebrtc updated from {} commit {} on {}.".format( + path, commit, datetime.datetime.utcnow().isoformat() + ) + ) shutil.move(os.path.join(path, target_archive), target_archive) def unpack(target): - target_archive = target + '.tar.gz' - target_path = 'tmp-' + target + target_archive = target + ".tar.gz" + target_path = "tmp-" + target try: shutil.rmtree(target_path) except FileNotFoundError: pass tarfile.open(target_archive).extractall(path=target_path) - if target == 'libwebrtc': + if target == "libwebrtc": for path in LIBWEBRTC_USED_IN_FIREFOX: try: shutil.rmtree(os.path.join(LIBWEBRTC_DIR, path)) @@ -97,48 +110,54 @@ def unpack(target): if os.path.exists(os.path.join(target_path, LIBWEBRTC_USED_IN_FIREFOX[0])): for path in LIBWEBRTC_USED_IN_FIREFOX: - shutil.move(os.path.join(target_path, path), - os.path.join(LIBWEBRTC_DIR, path)) + shutil.move( + os.path.join(target_path, path), os.path.join(LIBWEBRTC_DIR, path) + ) else: # GitHub packs everything inside a separate directory target_path = os.path.join(target_path, os.listdir(target_path)[0]) for path in LIBWEBRTC_USED_IN_FIREFOX: - shutil.move(os.path.join(target_path, path), - os.path.join(LIBWEBRTC_DIR, path)) + shutil.move( + os.path.join(target_path, path), os.path.join(LIBWEBRTC_DIR, path) + ) else: try: - shutil.rmtree(os.path.join(LIBWEBRTC_DIR, 'build')) + shutil.rmtree(os.path.join(LIBWEBRTC_DIR, "build")) except FileNotFoundError: pass - os.makedirs(os.path.join(LIBWEBRTC_DIR, 'build')) + os.makedirs(os.path.join(LIBWEBRTC_DIR, "build")) - if os.path.exists(os.path.join(target_path, 'linux')): + if os.path.exists(os.path.join(target_path, "linux")): for path in os.listdir(target_path): - shutil.move(os.path.join(target_path, path), - os.path.join(LIBWEBRTC_DIR, 'build', path)) + shutil.move( + os.path.join(target_path, path), + os.path.join(LIBWEBRTC_DIR, "build", path), + ) else: # GitHub packs everything inside a separate directory target_path = os.path.join(target_path, os.listdir(target_path)[0]) for path in os.listdir(target_path): - shutil.move(os.path.join(target_path, path), - os.path.join(LIBWEBRTC_DIR, 'build', path)) + shutil.move( + os.path.join(target_path, path), + os.path.join(LIBWEBRTC_DIR, "build", path), + ) def cleanup(target): - os.remove(target + '.tar.gz') - shutil.rmtree('tmp-' + target) + os.remove(target + ".tar.gz") + shutil.rmtree("tmp-" + target) -if __name__ == '__main__': - parser = argparse.ArgumentParser(description='Update libwebrtc') - parser.add_argument('target', choices=('libwebrtc', 'build')) +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Update libwebrtc") + parser.add_argument("target", choices=("libwebrtc", "build")) group = parser.add_mutually_exclusive_group(required=True) - group.add_argument('--from-github', type=str) - group.add_argument('--from-googlesource', action='store_true', default=False) - group.add_argument('--from-local', type=str) - parser.add_argument('--commit', type=str, default='master') - parser.add_argument('--skip-fetch', action='store_true', default=False) - parser.add_argument('--skip-cleanup', action='store_true', default=False) + group.add_argument("--from-github", type=str) + group.add_argument("--from-googlesource", action="store_true", default=False) + group.add_argument("--from-local", type=str) + parser.add_argument("--commit", type=str, default="master") + parser.add_argument("--skip-fetch", action="store_true", default=False) + parser.add_argument("--skip-cleanup", action="store_true", default=False) args = parser.parse_args() os.makedirs(LIBWEBRTC_DIR, exist_ok=True) diff --git a/dom/media/webrtc/transport/build/moz.build b/dom/media/webrtc/transport/build/moz.build index 72b93ceec68b22..7349ee5d71b4a3 100644 --- a/dom/media/webrtc/transport/build/moz.build +++ b/dom/media/webrtc/transport/build/moz.build @@ -7,33 +7,33 @@ include("/ipc/chromium/chromium-config.mozbuild") EXPORTS.transport += [ - '../dtlsidentity.h', - '../m_cpp_utils.h', - '../mediapacket.h', - '../nr_socket_proxy_config.h', - '../nricectx.h', - '../nricemediastream.h', - '../nriceresolverfake.h', - '../nricestunaddr.h', - '../rlogconnector.h', - '../runnable_utils.h', - '../sigslot.h', - '../simpletokenbucket.h', - '../SrtpFlow.h', - '../stun_socket_filter.h', - '../transportflow.h', - '../transportlayer.h', - '../transportlayerdtls.h', - '../transportlayerice.h', - '../transportlayerlog.h', - '../transportlayerloopback.h', - '../transportlayersrtp.h', + "../dtlsidentity.h", + "../m_cpp_utils.h", + "../mediapacket.h", + "../nr_socket_proxy_config.h", + "../nricectx.h", + "../nricemediastream.h", + "../nriceresolverfake.h", + "../nricestunaddr.h", + "../rlogconnector.h", + "../runnable_utils.h", + "../sigslot.h", + "../simpletokenbucket.h", + "../SrtpFlow.h", + "../stun_socket_filter.h", + "../transportflow.h", + "../transportlayer.h", + "../transportlayerdtls.h", + "../transportlayerice.h", + "../transportlayerlog.h", + "../transportlayerloopback.h", + "../transportlayersrtp.h", ] -include('../common.build') +include("../common.build") # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") # These files cannot be built in unified mode because of the redefinition of # getLogModule, UNIMPLEMENTED, nr_socket_long_term_violation_time, @@ -41,4 +41,4 @@ include('/tools/fuzzing/libfuzzer-config.mozbuild') # PBrowserOrId and WebrtcTCPSocketChild. SOURCES += transport_cppsrcs -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webrtc/transport/common.build b/dom/media/webrtc/transport/common.build index 28351219b3ecf6..fecf14913598d3 100644 --- a/dom/media/webrtc/transport/common.build +++ b/dom/media/webrtc/transport/common.build @@ -5,93 +5,93 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. transport_lcppsrcs = [ - 'dtlsidentity.cpp', - 'mediapacket.cpp', - 'nr_socket_proxy_config.cpp', - 'nr_socket_prsock.cpp', - 'nr_socket_tcp.cpp', - 'nr_timer.cpp', - 'nricectx.cpp', - 'nricemediastream.cpp', - 'nriceresolver.cpp', - 'nriceresolverfake.cpp', - 'nricestunaddr.cpp', - 'nrinterfaceprioritizer.cpp', - 'rlogconnector.cpp', - 'simpletokenbucket.cpp', - 'SrtpFlow.cpp', - 'stun_socket_filter.cpp', - 'test_nr_socket.cpp', - 'transportflow.cpp', - 'transportlayer.cpp', - 'transportlayerdtls.cpp', - 'transportlayerice.cpp', - 'transportlayerlog.cpp', - 'transportlayerloopback.cpp', - 'transportlayersrtp.cpp', - 'WebrtcTCPSocketWrapper.cpp', + "dtlsidentity.cpp", + "mediapacket.cpp", + "nr_socket_proxy_config.cpp", + "nr_socket_prsock.cpp", + "nr_socket_tcp.cpp", + "nr_timer.cpp", + "nricectx.cpp", + "nricemediastream.cpp", + "nriceresolver.cpp", + "nriceresolverfake.cpp", + "nricestunaddr.cpp", + "nrinterfaceprioritizer.cpp", + "rlogconnector.cpp", + "simpletokenbucket.cpp", + "SrtpFlow.cpp", + "stun_socket_filter.cpp", + "test_nr_socket.cpp", + "transportflow.cpp", + "transportlayer.cpp", + "transportlayerdtls.cpp", + "transportlayerice.cpp", + "transportlayerlog.cpp", + "transportlayerloopback.cpp", + "transportlayersrtp.cpp", + "WebrtcTCPSocketWrapper.cpp", ] transport_cppsrcs = [ - '/dom/media/webrtc/transport/%s' % s for s in sorted(transport_lcppsrcs) + "/dom/media/webrtc/transport/%s" % s for s in sorted(transport_lcppsrcs) ] LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/', - '/dom/media/webrtc/transport/third_party/', - '/dom/media/webrtc/transport/third_party/nICEr/src/crypto', - '/dom/media/webrtc/transport/third_party/nICEr/src/ice', - '/dom/media/webrtc/transport/third_party/nICEr/src/net', - '/dom/media/webrtc/transport/third_party/nICEr/src/stun', - '/dom/media/webrtc/transport/third_party/nICEr/src/util', - '/dom/media/webrtc/transport/third_party/nrappkit/src/event', - '/dom/media/webrtc/transport/third_party/nrappkit/src/log', - '/dom/media/webrtc/transport/third_party/nrappkit/src/plugin', - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/generic/include', - '/dom/media/webrtc/transport/third_party/nrappkit/src/registry', - '/dom/media/webrtc/transport/third_party/nrappkit/src/share', - '/dom/media/webrtc/transport/third_party/nrappkit/src/stats', - '/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr', - '/netwerk/srtp/src/crypto/include', - '/netwerk/srtp/src/include', + "/dom/media/webrtc/transport/", + "/dom/media/webrtc/transport/third_party/", + "/dom/media/webrtc/transport/third_party/nICEr/src/crypto", + "/dom/media/webrtc/transport/third_party/nICEr/src/ice", + "/dom/media/webrtc/transport/third_party/nICEr/src/net", + "/dom/media/webrtc/transport/third_party/nICEr/src/stun", + "/dom/media/webrtc/transport/third_party/nICEr/src/util", + "/dom/media/webrtc/transport/third_party/nrappkit/src/event", + "/dom/media/webrtc/transport/third_party/nrappkit/src/log", + "/dom/media/webrtc/transport/third_party/nrappkit/src/plugin", + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/generic/include", + "/dom/media/webrtc/transport/third_party/nrappkit/src/registry", + "/dom/media/webrtc/transport/third_party/nrappkit/src/share", + "/dom/media/webrtc/transport/third_party/nrappkit/src/stats", + "/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr", + "/netwerk/srtp/src/crypto/include", + "/netwerk/srtp/src/include", ] -if CONFIG['OS_TARGET'] in ['Darwin', 'DragonFly', 'FreeBSD', 'NetBSD', 'OpenBSD']: - if CONFIG['OS_TARGET'] == 'Darwin': - DEFINES['DARWIN'] = True +if CONFIG["OS_TARGET"] in ["Darwin", "DragonFly", "FreeBSD", "NetBSD", "OpenBSD"]: + if CONFIG["OS_TARGET"] == "Darwin": + DEFINES["DARWIN"] = True else: - DEFINES['BSD'] = True + DEFINES["BSD"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include", ] -elif CONFIG['OS_TARGET'] == 'Linux': - DEFINES['LINUX'] = True +elif CONFIG["OS_TARGET"] == "Linux": + DEFINES["LINUX"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/linux/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/linux/include", ] -elif CONFIG['OS_TARGET'] == 'Android': - DEFINES['LINUX'] = True - DEFINES['ANDROID'] = True +elif CONFIG["OS_TARGET"] == "Android": + DEFINES["LINUX"] = True + DEFINES["ANDROID"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/android/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/android/include", ] -elif CONFIG['OS_TARGET'] == 'WINNT': - DEFINES['WIN'] = True +elif CONFIG["OS_TARGET"] == "WINNT": + DEFINES["WIN"] = True # for stun.h - DEFINES['WIN32'] = True + DEFINES["WIN32"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/win32/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/win32/include", ] -for var in ('HAVE_STRDUP', 'NR_SOCKET_IS_VOID_PTR'): +for var in ("HAVE_STRDUP", "NR_SOCKET_IS_VOID_PTR"): DEFINES[var] = True -DEFINES['R_DEFINED_INT2'] = 'int16_t' -DEFINES['R_DEFINED_UINT2'] = 'uint16_t' -DEFINES['R_DEFINED_INT4'] = 'int32_t' -DEFINES['R_DEFINED_UINT4'] = 'uint32_t' -DEFINES['R_DEFINED_INT8'] = 'int64_t' -DEFINES['R_DEFINED_UINT8'] = 'uint64_t' +DEFINES["R_DEFINED_INT2"] = "int16_t" +DEFINES["R_DEFINED_UINT2"] = "uint16_t" +DEFINES["R_DEFINED_INT4"] = "int32_t" +DEFINES["R_DEFINED_UINT4"] = "uint32_t" +DEFINES["R_DEFINED_INT8"] = "int64_t" +DEFINES["R_DEFINED_UINT8"] = "uint64_t" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/webrtc/transport/fuzztest/moz.build b/dom/media/webrtc/transport/fuzztest/moz.build index f6c4b95774690d..f22a5a702bed26 100644 --- a/dom/media/webrtc/transport/fuzztest/moz.build +++ b/dom/media/webrtc/transport/fuzztest/moz.build @@ -4,28 +4,28 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingStun') +Library("FuzzingStun") -DEFINES['HAVE_STRDUP'] = True +DEFINES["HAVE_STRDUP"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nICEr/src/net', - '/dom/media/webrtc/transport/third_party/nICEr/src/stun', - '/dom/media/webrtc/transport/third_party/nrappkit/src/event', - '/dom/media/webrtc/transport/third_party/nrappkit/src/log', - '/dom/media/webrtc/transport/third_party/nrappkit/src/plugin', - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include', - '/dom/media/webrtc/transport/third_party/nrappkit/src/share', - '/dom/media/webrtc/transport/third_party/nrappkit/src/stats', - '/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr', - '/ipc/chromium/src', + "/dom/media/webrtc/transport/third_party/nICEr/src/net", + "/dom/media/webrtc/transport/third_party/nICEr/src/stun", + "/dom/media/webrtc/transport/third_party/nrappkit/src/event", + "/dom/media/webrtc/transport/third_party/nrappkit/src/log", + "/dom/media/webrtc/transport/third_party/nrappkit/src/plugin", + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include", + "/dom/media/webrtc/transport/third_party/nrappkit/src/share", + "/dom/media/webrtc/transport/third_party/nrappkit/src/stats", + "/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr", + "/ipc/chromium/src", ] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") SOURCES += [ - 'stun_parser_libfuzz.cpp', + "stun_parser_libfuzz.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/media/webrtc/transport/ipc/moz.build b/dom/media/webrtc/transport/ipc/moz.build index 8e0bebc949935a..a2a72bb624601c 100644 --- a/dom/media/webrtc/transport/ipc/moz.build +++ b/dom/media/webrtc/transport/ipc/moz.build @@ -3,52 +3,52 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.net += [ - 'NrIceStunAddrMessageUtils.h', - 'PStunAddrsParams.h', - 'StunAddrsRequestChild.h', - 'StunAddrsRequestParent.h', - 'WebrtcTCPSocket.h', - 'WebrtcTCPSocketCallback.h', - 'WebrtcTCPSocketChild.h', - 'WebrtcTCPSocketParent.h', + "NrIceStunAddrMessageUtils.h", + "PStunAddrsParams.h", + "StunAddrsRequestChild.h", + "StunAddrsRequestParent.h", + "WebrtcTCPSocket.h", + "WebrtcTCPSocketCallback.h", + "WebrtcTCPSocketChild.h", + "WebrtcTCPSocketParent.h", ] UNIFIED_SOURCES += [ - 'StunAddrsRequestChild.cpp', - 'StunAddrsRequestParent.cpp', - 'WebrtcTCPSocket.cpp', - 'WebrtcTCPSocketChild.cpp', - 'WebrtcTCPSocketLog.cpp', - 'WebrtcTCPSocketParent.cpp', + "StunAddrsRequestChild.cpp", + "StunAddrsRequestParent.cpp", + "WebrtcTCPSocket.cpp", + "WebrtcTCPSocketChild.cpp", + "WebrtcTCPSocketLog.cpp", + "WebrtcTCPSocketParent.cpp", ] IPDL_SOURCES += [ - 'PStunAddrsRequest.ipdl', - 'PWebrtcTCPSocket.ipdl', - 'WebrtcProxyConfig.ipdlh', + "PStunAddrsRequest.ipdl", + "PWebrtcTCPSocket.ipdl", + "WebrtcProxyConfig.ipdlh", ] include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -DEFINES['R_DEFINED_INT2'] = 'int16_t' -DEFINES['R_DEFINED_UINT2'] = 'uint16_t' -DEFINES['R_DEFINED_INT4'] = 'int32_t' -DEFINES['R_DEFINED_UINT4'] = 'uint32_t' +DEFINES["R_DEFINED_INT2"] = "int16_t" +DEFINES["R_DEFINED_UINT2"] = "uint16_t" +DEFINES["R_DEFINED_INT4"] = "int32_t" +DEFINES["R_DEFINED_UINT4"] = "uint32_t" # These are defined to avoid a conflict between typedefs in winsock2.h and # r_types.h. This is safe because these types are unused by the code here, # but still deeply unfortunate. There is similar code in the win32 version of # csi_platform.h, but that trick does not work here, even if that file is # directly included. -DEFINES['R_DEFINED_INT8'] = 'int8_t' -DEFINES['R_DEFINED_UINT8'] = 'uint8_t' +DEFINES["R_DEFINED_INT8"] = "int8_t" +DEFINES["R_DEFINED_UINT8"] = "uint8_t" LOCAL_INCLUDES += [ - '/dom/media/webrtc/jsapi', - '/dom/media/webrtc/transport/third_party/nICEr/src/net', - '/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr', - '/media/webrtc', - '/netwerk/base', - '/netwerk/protocol/http', + "/dom/media/webrtc/jsapi", + "/dom/media/webrtc/transport/third_party/nICEr/src/net", + "/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr", + "/media/webrtc", + "/netwerk/base", + "/netwerk/protocol/http", ] diff --git a/dom/media/webrtc/transport/moz.build b/dom/media/webrtc/transport/moz.build index 82ff399a43fcc8..4fefe2c062d751 100644 --- a/dom/media/webrtc/transport/moz.build +++ b/dom/media/webrtc/transport/moz.build @@ -10,15 +10,13 @@ with Files("**"): include("/ipc/chromium/chromium-config.mozbuild") DIRS += [ - '/dom/media/webrtc/transport/third_party', - '/dom/media/webrtc/transport/build', - '/dom/media/webrtc/transport/ipc', + "/dom/media/webrtc/transport/third_party", + "/dom/media/webrtc/transport/build", + "/dom/media/webrtc/transport/ipc", ] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -if CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'fuzztest' - ] +if CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["fuzztest"] diff --git a/dom/media/webrtc/transport/test/moz.build b/dom/media/webrtc/transport/test/moz.build index 7fab8c1815a485..b9818705c2892b 100644 --- a/dom/media/webrtc/transport/test/moz.build +++ b/dom/media/webrtc/transport/test/moz.build @@ -6,102 +6,102 @@ include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['OS_TARGET'] != 'WINNT': +if CONFIG["OS_TARGET"] != "WINNT": - if CONFIG['OS_TARGET'] != 'Android': + if CONFIG["OS_TARGET"] != "Android": SOURCES += [ - 'ice_unittest.cpp', + "ice_unittest.cpp", ] SOURCES += [ - 'buffered_stun_socket_unittest.cpp', - 'multi_tcp_socket_unittest.cpp', - 'nrappkit_unittest.cpp', - 'proxy_tunnel_socket_unittest.cpp', - 'rlogconnector_unittest.cpp', - 'runnable_utils_unittest.cpp', - 'simpletokenbucket_unittest.cpp', - 'sockettransportservice_unittest.cpp', - 'stunserver.cpp', - 'test_nr_socket_ice_unittest.cpp', - 'test_nr_socket_unittest.cpp', - 'TestSyncRunnable.cpp', - 'transport_unittests.cpp', - 'turn_unittest.cpp', - 'webrtcproxychannel_unittest.cpp', + "buffered_stun_socket_unittest.cpp", + "multi_tcp_socket_unittest.cpp", + "nrappkit_unittest.cpp", + "proxy_tunnel_socket_unittest.cpp", + "rlogconnector_unittest.cpp", + "runnable_utils_unittest.cpp", + "simpletokenbucket_unittest.cpp", + "sockettransportservice_unittest.cpp", + "stunserver.cpp", + "test_nr_socket_ice_unittest.cpp", + "test_nr_socket_unittest.cpp", + "TestSyncRunnable.cpp", + "transport_unittests.cpp", + "turn_unittest.cpp", + "webrtcproxychannel_unittest.cpp", ] - if CONFIG['MOZ_SCTP']: + if CONFIG["MOZ_SCTP"]: SOURCES += [ - 'sctp_unittest.cpp', + "sctp_unittest.cpp", ] -for var in ('HAVE_STRDUP', 'NR_SOCKET_IS_VOID_PTR', 'SCTP_DEBUG'): +for var in ("HAVE_STRDUP", "NR_SOCKET_IS_VOID_PTR", "SCTP_DEBUG"): DEFINES[var] = True -if CONFIG['OS_TARGET'] == 'Android': - DEFINES['LINUX'] = True - DEFINES['ANDROID'] = True +if CONFIG["OS_TARGET"] == "Android": + DEFINES["LINUX"] = True + DEFINES["ANDROID"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/android/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/android/include", ] -if CONFIG['OS_TARGET'] == 'Linux': - DEFINES['LINUX'] = True +if CONFIG["OS_TARGET"] == "Linux": + DEFINES["LINUX"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/linux/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/linux/include", ] -if CONFIG['OS_TARGET'] == 'Darwin': +if CONFIG["OS_TARGET"] == "Darwin": LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include", ] -if CONFIG['OS_TARGET'] in ('DragonFly', 'FreeBSD', 'NetBSD', 'OpenBSD'): - if CONFIG['OS_TARGET'] == 'Darwin': - DEFINES['DARWIN'] = True +if CONFIG["OS_TARGET"] in ("DragonFly", "FreeBSD", "NetBSD", "OpenBSD"): + if CONFIG["OS_TARGET"] == "Darwin": + DEFINES["DARWIN"] = True else: - DEFINES['BSD'] = True + DEFINES["BSD"] = True LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include', + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/darwin/include", ] # SCTP DEFINES -if CONFIG['OS_TARGET'] == 'WINNT': - DEFINES['WIN'] = True +if CONFIG["OS_TARGET"] == "WINNT": + DEFINES["WIN"] = True # for stun.h - DEFINES['WIN32'] = True - DEFINES['__Userspace_os_Windows'] = 1 + DEFINES["WIN32"] = True + DEFINES["__Userspace_os_Windows"] = 1 else: # Works for Darwin, Linux, Android. Probably doesn't work for others. - DEFINES['__Userspace_os_%s' % CONFIG['OS_TARGET']] = 1 + DEFINES["__Userspace_os_%s" % CONFIG["OS_TARGET"]] = 1 -if CONFIG['OS_TARGET'] in ('Darwin', 'Android'): - DEFINES['GTEST_USE_OWN_TR1_TUPLE'] = 1 +if CONFIG["OS_TARGET"] in ("Darwin", "Android"): + DEFINES["GTEST_USE_OWN_TR1_TUPLE"] = 1 LOCAL_INCLUDES += [ - '/dom/media/webrtc/transport/', - '/dom/media/webrtc/transport/third_party/', - '/dom/media/webrtc/transport/third_party/nICEr/src/crypto', - '/dom/media/webrtc/transport/third_party/nICEr/src/ice', - '/dom/media/webrtc/transport/third_party/nICEr/src/net', - '/dom/media/webrtc/transport/third_party/nICEr/src/stun', - '/dom/media/webrtc/transport/third_party/nICEr/src/util', - '/dom/media/webrtc/transport/third_party/nrappkit/src/event', - '/dom/media/webrtc/transport/third_party/nrappkit/src/log', - '/dom/media/webrtc/transport/third_party/nrappkit/src/plugin', - '/dom/media/webrtc/transport/third_party/nrappkit/src/port/generic/include', - '/dom/media/webrtc/transport/third_party/nrappkit/src/registry', - '/dom/media/webrtc/transport/third_party/nrappkit/src/share', - '/dom/media/webrtc/transport/third_party/nrappkit/src/stats', - '/dom/media/webrtc/transport/third_party/nrappkit/src/util/', - '/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr', - '/netwerk/sctp/src/', - '/xpcom/tests/' + "/dom/media/webrtc/transport/", + "/dom/media/webrtc/transport/third_party/", + "/dom/media/webrtc/transport/third_party/nICEr/src/crypto", + "/dom/media/webrtc/transport/third_party/nICEr/src/ice", + "/dom/media/webrtc/transport/third_party/nICEr/src/net", + "/dom/media/webrtc/transport/third_party/nICEr/src/stun", + "/dom/media/webrtc/transport/third_party/nICEr/src/util", + "/dom/media/webrtc/transport/third_party/nrappkit/src/event", + "/dom/media/webrtc/transport/third_party/nrappkit/src/log", + "/dom/media/webrtc/transport/third_party/nrappkit/src/plugin", + "/dom/media/webrtc/transport/third_party/nrappkit/src/port/generic/include", + "/dom/media/webrtc/transport/third_party/nrappkit/src/registry", + "/dom/media/webrtc/transport/third_party/nrappkit/src/share", + "/dom/media/webrtc/transport/third_party/nrappkit/src/stats", + "/dom/media/webrtc/transport/third_party/nrappkit/src/util/", + "/dom/media/webrtc/transport/third_party/nrappkit/src/util/libekr", + "/netwerk/sctp/src/", + "/xpcom/tests/", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/webrtc/transportbridge/moz.build b/dom/media/webrtc/transportbridge/moz.build index b0aa509ec839c7..65ce9100a3f358 100644 --- a/dom/media/webrtc/transportbridge/moz.build +++ b/dom/media/webrtc/transportbridge/moz.build @@ -3,27 +3,27 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") LOCAL_INCLUDES += [ - '!/ipc/ipdl/_ipdlheaders', - '/dom/media', - '/dom/media/webrtc', - '/ipc/chromium/src', - '/media/libyuv/libyuv/include', - '/media/webrtc', - '/netwerk/srtp/src/crypto/include', - '/netwerk/srtp/src/include', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', + "!/ipc/ipdl/_ipdlheaders", + "/dom/media", + "/dom/media/webrtc", + "/ipc/chromium/src", + "/media/libyuv/libyuv/include", + "/media/webrtc", + "/netwerk/srtp/src/crypto/include", + "/netwerk/srtp/src/include", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", ] UNIFIED_SOURCES += [ - 'MediaPipeline.cpp', - 'MediaPipelineFilter.cpp', - 'RtpLogger.cpp', + "MediaPipeline.cpp", + "MediaPipelineFilter.cpp", + "RtpLogger.cpp", ] -DEFINES['TRACING'] = True +DEFINES["TRACING"] = True -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webspeech/moz.build b/dom/media/webspeech/moz.build index 85e2277b388bcc..ed3a203aaf325a 100644 --- a/dom/media/webspeech/moz.build +++ b/dom/media/webspeech/moz.build @@ -3,7 +3,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS = ['synth'] +DIRS = ["synth"] -if CONFIG['MOZ_WEBSPEECH']: - DIRS += ['recognition'] +if CONFIG["MOZ_WEBSPEECH"]: + DIRS += ["recognition"] diff --git a/dom/media/webspeech/recognition/moz.build b/dom/media/webspeech/recognition/moz.build index 23dae6699765d0..cfdf30bf569cbf 100644 --- a/dom/media/webspeech/recognition/moz.build +++ b/dom/media/webspeech/recognition/moz.build @@ -3,67 +3,65 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] -XPIDL_MODULE = 'dom_webspeechrecognition' +XPIDL_MODULE = "dom_webspeechrecognition" -XPIDL_SOURCES = [ - 'nsISpeechRecognitionService.idl' -] +XPIDL_SOURCES = ["nsISpeechRecognitionService.idl"] EXPORTS.mozilla.dom += [ - 'OnlineSpeechRecognitionService.h', - 'SpeechGrammar.h', - 'SpeechGrammarList.h', - 'SpeechRecognition.h', - 'SpeechRecognitionAlternative.h', - 'SpeechRecognitionResult.h', - 'SpeechRecognitionResultList.h', - 'SpeechTrackListener.h', + "OnlineSpeechRecognitionService.h", + "SpeechGrammar.h", + "SpeechGrammarList.h", + "SpeechRecognition.h", + "SpeechRecognitionAlternative.h", + "SpeechRecognitionResult.h", + "SpeechRecognitionResultList.h", + "SpeechTrackListener.h", ] EXPORTS += [ - 'endpointer.h', - 'energy_endpointer.h', - 'energy_endpointer_params.h', + "endpointer.h", + "energy_endpointer.h", + "energy_endpointer_params.h", ] -if CONFIG['MOZ_WEBSPEECH_TEST_BACKEND']: +if CONFIG["MOZ_WEBSPEECH_TEST_BACKEND"]: EXPORTS.mozilla.dom += [ - 'test/FakeSpeechRecognitionService.h', + "test/FakeSpeechRecognitionService.h", ] UNIFIED_SOURCES += [ - 'endpointer.cc', - 'energy_endpointer.cc', - 'energy_endpointer_params.cc', - 'OnlineSpeechRecognitionService.cpp', - 'SpeechGrammar.cpp', - 'SpeechGrammarList.cpp', - 'SpeechRecognition.cpp', - 'SpeechRecognitionAlternative.cpp', - 'SpeechRecognitionResult.cpp', - 'SpeechRecognitionResultList.cpp', - 'SpeechTrackListener.cpp', + "endpointer.cc", + "energy_endpointer.cc", + "energy_endpointer_params.cc", + "OnlineSpeechRecognitionService.cpp", + "SpeechGrammar.cpp", + "SpeechGrammarList.cpp", + "SpeechRecognition.cpp", + "SpeechRecognitionAlternative.cpp", + "SpeechRecognitionResult.cpp", + "SpeechRecognitionResultList.cpp", + "SpeechTrackListener.cpp", ] -if CONFIG['MOZ_WEBSPEECH_TEST_BACKEND']: +if CONFIG["MOZ_WEBSPEECH_TEST_BACKEND"]: UNIFIED_SOURCES += [ - 'test/FakeSpeechRecognitionService.cpp', + "test/FakeSpeechRecognitionService.cpp", ] USE_LIBS += [ - 'jsoncpp', + "jsoncpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/toolkit/components/jsoncpp/include', + "/dom/base", + "/toolkit/components/jsoncpp/include", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/webspeech/synth/android/moz.build b/dom/media/webspeech/synth/android/moz.build index 88f59e3d97f671..348c157f3c4790 100644 --- a/dom/media/webspeech/synth/android/moz.build +++ b/dom/media/webspeech/synth/android/moz.build @@ -4,16 +4,16 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXPORTS.mozilla.dom += ['SpeechSynthesisService.h'] +EXPORTS.mozilla.dom += ["SpeechSynthesisService.h"] UNIFIED_SOURCES += [ - 'SpeechSynthesisService.cpp', + "SpeechSynthesisService.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webspeech/synth/cocoa/moz.build b/dom/media/webspeech/synth/cocoa/moz.build index 229a9db4c61f53..4d59f7a389d3e9 100644 --- a/dom/media/webspeech/synth/cocoa/moz.build +++ b/dom/media/webspeech/synth/cocoa/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'OSXSpeechSynthesizerService.mm', + "OSXSpeechSynthesizerService.mm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webspeech/synth/moz.build b/dom/media/webspeech/synth/moz.build index 8a1a529ac21c5b..e6963e955e2eb7 100644 --- a/dom/media/webspeech/synth/moz.build +++ b/dom/media/webspeech/synth/moz.build @@ -3,71 +3,66 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_WEBSPEECH']: +if CONFIG["MOZ_WEBSPEECH"]: MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', - 'test/startup/mochitest.ini', + "test/mochitest.ini", + "test/startup/mochitest.ini", ] - XPIDL_MODULE = 'dom_webspeechsynth' + XPIDL_MODULE = "dom_webspeechsynth" - XPIDL_SOURCES += [ - 'nsISpeechService.idl', - 'nsISynthVoiceRegistry.idl' - ] + XPIDL_SOURCES += ["nsISpeechService.idl", "nsISynthVoiceRegistry.idl"] EXPORTS.mozilla.dom += [ - 'ipc/SpeechSynthesisChild.h', - 'ipc/SpeechSynthesisParent.h', - 'nsSpeechTask.h', - 'nsSynthVoiceRegistry.h', - 'SpeechSynthesis.h', - 'SpeechSynthesisUtterance.h', - 'SpeechSynthesisVoice.h', + "ipc/SpeechSynthesisChild.h", + "ipc/SpeechSynthesisParent.h", + "nsSpeechTask.h", + "nsSynthVoiceRegistry.h", + "SpeechSynthesis.h", + "SpeechSynthesisUtterance.h", + "SpeechSynthesisVoice.h", ] UNIFIED_SOURCES += [ - 'ipc/SpeechSynthesisChild.cpp', - 'ipc/SpeechSynthesisParent.cpp', - 'nsSpeechTask.cpp', - 'nsSynthVoiceRegistry.cpp', - 'SpeechSynthesis.cpp', - 'SpeechSynthesisUtterance.cpp', - 'SpeechSynthesisVoice.cpp', + "ipc/SpeechSynthesisChild.cpp", + "ipc/SpeechSynthesisParent.cpp", + "nsSpeechTask.cpp", + "nsSynthVoiceRegistry.cpp", + "SpeechSynthesis.cpp", + "SpeechSynthesisUtterance.cpp", + "SpeechSynthesisVoice.cpp", ] - if CONFIG['MOZ_WEBSPEECH_TEST_BACKEND']: - UNIFIED_SOURCES += [ - 'test/nsFakeSynthServices.cpp' - ] + if CONFIG["MOZ_WEBSPEECH_TEST_BACKEND"]: + UNIFIED_SOURCES += ["test/nsFakeSynthServices.cpp"] XPCOM_MANIFESTS += [ - 'test/components.conf', + "test/components.conf", ] - if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - DIRS += ['windows'] + if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + DIRS += ["windows"] - if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - DIRS += ['cocoa'] + if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + DIRS += ["cocoa"] - if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - DIRS += ['android'] + if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + DIRS += ["android"] - if CONFIG['MOZ_SYNTH_SPEECHD']: - DIRS += ['speechd'] + if CONFIG["MOZ_SYNTH_SPEECHD"]: + DIRS += ["speechd"] IPDL_SOURCES += [ - 'ipc/PSpeechSynthesis.ipdl', - 'ipc/PSpeechSynthesisRequest.ipdl', + "ipc/PSpeechSynthesis.ipdl", + "ipc/PSpeechSynthesisRequest.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - 'ipc', + "ipc", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/media/webspeech/synth/speechd/moz.build b/dom/media/webspeech/synth/speechd/moz.build index 5dc831883d6cb4..0d9632a4888776 100644 --- a/dom/media/webspeech/synth/speechd/moz.build +++ b/dom/media/webspeech/synth/speechd/moz.build @@ -4,14 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -UNIFIED_SOURCES += [ - 'SpeechDispatcherService.cpp' -] +UNIFIED_SOURCES += ["SpeechDispatcherService.cpp"] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webspeech/synth/windows/moz.build b/dom/media/webspeech/synth/windows/moz.build index bf38c62896a64f..90bafe9ca770b4 100644 --- a/dom/media/webspeech/synth/windows/moz.build +++ b/dom/media/webspeech/synth/windows/moz.build @@ -5,13 +5,13 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'SapiService.cpp', + "SapiService.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/media/webvtt/moz.build b/dom/media/webvtt/moz.build index dbda057fcff3c1..94444f58d37086 100644 --- a/dom/media/webvtt/moz.build +++ b/dom/media/webvtt/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsIWebVTTListener.idl', - 'nsIWebVTTParserWrapper.idl', + "nsIWebVTTListener.idl", + "nsIWebVTTParserWrapper.idl", ] -XPIDL_MODULE = 'webvtt' +XPIDL_MODULE = "webvtt" EXTRA_JS_MODULES += [ - 'WebVTTParserWrapper.jsm', + "WebVTTParserWrapper.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXTRA_JS_MODULES += [ - 'vtt.jsm', + "vtt.jsm", ] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell.ini"] diff --git a/dom/messagechannel/moz.build b/dom/messagechannel/moz.build index 611347ef9407c9..d2726be8519590 100644 --- a/dom/messagechannel/moz.build +++ b/dom/messagechannel/moz.build @@ -7,32 +7,32 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: postMessage") -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] EXPORTS.mozilla.dom += [ - 'MessageChannel.h', - 'MessagePort.h', - 'MessagePortChild.h', - 'MessagePortParent.h', + "MessageChannel.h", + "MessagePort.h", + "MessagePortChild.h", + "MessagePortParent.h", ] UNIFIED_SOURCES += [ - 'MessageChannel.cpp', - 'MessagePort.cpp', - 'MessagePortChild.cpp', - 'MessagePortParent.cpp', - 'MessagePortService.cpp', + "MessageChannel.cpp", + "MessagePort.cpp", + "MessagePortChild.cpp", + "MessagePortParent.cpp", + "MessagePortService.cpp", ] IPDL_SOURCES += [ - 'PMessagePort.ipdl', + "PMessagePort.ipdl", ] LOCAL_INCLUDES += [ - '../base', - '../events', + "../base", + "../events", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/messagechannel/tests/moz.build b/dom/messagechannel/tests/moz.build index aeb0cb5e24ac55..6cf89eae3b7afe 100644 --- a/dom/messagechannel/tests/moz.build +++ b/dom/messagechannel/tests/moz.build @@ -4,7 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['chrome.ini'] +MOCHITEST_MANIFESTS += ["mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["chrome.ini"] -XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["unit/xpcshell.ini"] diff --git a/dom/midi/moz.build b/dom/midi/moz.build index eb0b11cd979b76..11723dcfad5ad0 100644 --- a/dom/midi/moz.build +++ b/dom/midi/moz.build @@ -5,59 +5,59 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. IPDL_SOURCES += [ - 'MIDITypes.ipdlh', - 'PMIDIManager.ipdl', - 'PMIDIPort.ipdl', + "MIDITypes.ipdlh", + "PMIDIManager.ipdl", + "PMIDIPort.ipdl", ] EXPORTS.mozilla.dom += [ - 'MIDIAccess.h', - 'MIDIAccessManager.h', - 'MIDIInput.h', - 'MIDIInputMap.h', - 'MIDIManagerChild.h', - 'MIDIManagerParent.h', - 'MIDIMessageEvent.h', - 'MIDIMessageQueue.h', - 'MIDIOutput.h', - 'MIDIOutputMap.h', - 'MIDIPermissionRequest.h', - 'MIDIPlatformRunnables.h', - 'MIDIPlatformService.h', - 'MIDIPort.h', - 'MIDIPortChild.h', - 'MIDIPortInterface.h', - 'MIDIPortParent.h', - 'MIDIUtils.h' + "MIDIAccess.h", + "MIDIAccessManager.h", + "MIDIInput.h", + "MIDIInputMap.h", + "MIDIManagerChild.h", + "MIDIManagerParent.h", + "MIDIMessageEvent.h", + "MIDIMessageQueue.h", + "MIDIOutput.h", + "MIDIOutputMap.h", + "MIDIPermissionRequest.h", + "MIDIPlatformRunnables.h", + "MIDIPlatformService.h", + "MIDIPort.h", + "MIDIPortChild.h", + "MIDIPortInterface.h", + "MIDIPortParent.h", + "MIDIUtils.h", ] UNIFIED_SOURCES = [ - 'MIDIAccess.cpp', - 'MIDIAccessManager.cpp', - 'MIDIInput.cpp', - 'MIDIInputMap.cpp', - 'MIDIManagerChild.cpp', - 'MIDIManagerParent.cpp', - 'MIDIMessageEvent.cpp', - 'MIDIMessageQueue.cpp', - 'MIDIOutput.cpp', - 'MIDIOutputMap.cpp', - 'MIDIPermissionRequest.cpp', - 'MIDIPlatformRunnables.cpp', - 'MIDIPlatformService.cpp', - 'MIDIPort.cpp', - 'MIDIPortChild.cpp', - 'MIDIPortInterface.cpp', - 'MIDIPortParent.cpp', - 'MIDIUtils.cpp', - 'TestMIDIPlatformService.cpp' + "MIDIAccess.cpp", + "MIDIAccessManager.cpp", + "MIDIInput.cpp", + "MIDIInputMap.cpp", + "MIDIManagerChild.cpp", + "MIDIManagerParent.cpp", + "MIDIMessageEvent.cpp", + "MIDIMessageQueue.cpp", + "MIDIOutput.cpp", + "MIDIOutputMap.cpp", + "MIDIPermissionRequest.cpp", + "MIDIPlatformRunnables.cpp", + "MIDIPlatformService.cpp", + "MIDIPort.cpp", + "MIDIPortChild.cpp", + "MIDIPortInterface.cpp", + "MIDIPortParent.cpp", + "MIDIUtils.cpp", + "TestMIDIPlatformService.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] diff --git a/dom/moz.build b/dom/moz.build index d34d8a7f756225..0b1ea040108133 100644 --- a/dom/moz.build +++ b/dom/moz.build @@ -11,115 +11,115 @@ with Files("**"): with Files("plugins/**"): BUG_COMPONENT = ("Core", "Plug-ins") -JAR_MANIFESTS += ['jar.mn'] +JAR_MANIFESTS += ["jar.mn"] interfaces = [ - 'base', - 'html', - 'events', - 'sidebar', - 'xul', - 'security', - 'storage', - 'geolocation', - 'notification', - 'push', - 'payments', + "base", + "html", + "events", + "sidebar", + "xul", + "security", + "storage", + "geolocation", + "notification", + "push", + "payments", ] -DIRS += ['interfaces/' + i for i in interfaces] +DIRS += ["interfaces/" + i for i in interfaces] DIRS += [ - 'abort', - 'animation', - 'base', - 'bindings', - 'battery', - 'browser-element', - 'cache', - 'canvas', - 'webgpu', - 'chrome-webidl', - 'clients', - 'commandhandler', - 'credentialmanagement', - 'crypto', - 'debugger', - 'encoding', - 'events', - 'fetch', - 'file', - 'filehandle', - 'filesystem', - 'flex', - 'gamepad', - 'geolocation', - 'grid', - 'html', - 'jsurl', - 'mathml', - 'media', - 'midi', - 'notification', - 'offline', - 'power', - 'push', - 'quota', - 'security', - 'storage', - 'svg', - 'locales', - 'network', - 'permission', - 'plugins/base', - 'plugins/ipc', - 'prototype', - 'indexedDB', - 'system', - 'ipc', - 'workers', - 'audiochannel', - 'broadcastchannel', - 'messagechannel', - 'promise', - 'smil', - 'url', - 'webauthn', - 'webidl', - 'webshare', - 'xml', - 'xslt', - 'xul', - 'manifest', - 'vr', - 'u2f', - 'console', - 'performance', - 'webbrowserpersist', - 'xhr', - 'worklet', - 'script', - 'payments', - 'websocket', - 'serviceworkers', - 'simpledb', - 'reporting', - 'localstorage', - 'prio', - 'l10n', + "abort", + "animation", + "base", + "bindings", + "battery", + "browser-element", + "cache", + "canvas", + "webgpu", + "chrome-webidl", + "clients", + "commandhandler", + "credentialmanagement", + "crypto", + "debugger", + "encoding", + "events", + "fetch", + "file", + "filehandle", + "filesystem", + "flex", + "gamepad", + "geolocation", + "grid", + "html", + "jsurl", + "mathml", + "media", + "midi", + "notification", + "offline", + "power", + "push", + "quota", + "security", + "storage", + "svg", + "locales", + "network", + "permission", + "plugins/base", + "plugins/ipc", + "prototype", + "indexedDB", + "system", + "ipc", + "workers", + "audiochannel", + "broadcastchannel", + "messagechannel", + "promise", + "smil", + "url", + "webauthn", + "webidl", + "webshare", + "xml", + "xslt", + "xul", + "manifest", + "vr", + "u2f", + "console", + "performance", + "webbrowserpersist", + "xhr", + "worklet", + "script", + "payments", + "websocket", + "serviceworkers", + "simpledb", + "reporting", + "localstorage", + "prio", + "l10n", ] -if CONFIG['OS_ARCH'] == 'WINNT': - DIRS += ['plugins/ipc/hangui'] +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["plugins/ipc/hangui"] -DIRS += ['presentation'] +DIRS += ["presentation"] TEST_DIRS += [ - 'tests', - 'imptests', + "tests", + "imptests", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('gtk', 'cocoa', 'windows'): - TEST_DIRS += ['plugins/test'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("gtk", "cocoa", "windows"): + TEST_DIRS += ["plugins/test"] -SPHINX_TREES['/dom'] = 'docs' +SPHINX_TREES["/dom"] = "docs" diff --git a/dom/network/interfaces/moz.build b/dom/network/interfaces/moz.build index 2ad69b2c257911..362ad4c2281a00 100644 --- a/dom/network/interfaces/moz.build +++ b/dom/network/interfaces/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsITCPSocketCallback.idl', - 'nsIUDPSocketChild.idl', + "nsITCPSocketCallback.idl", + "nsIUDPSocketChild.idl", ] -XPIDL_MODULE = 'dom_network' +XPIDL_MODULE = "dom_network" diff --git a/dom/network/moz.build b/dom/network/moz.build index 91066c8a44d8d0..f33dab5c5ee968 100644 --- a/dom/network/moz.build +++ b/dom/network/moz.build @@ -4,55 +4,55 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Networking') +with Files("**"): + BUG_COMPONENT = ("Core", "Networking") -DIRS += ['interfaces'] +DIRS += ["interfaces"] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] EXPORTS.mozilla.dom += [ - 'TCPServerSocket.h', - 'TCPSocket.h', - 'UDPSocket.h', + "TCPServerSocket.h", + "TCPSocket.h", + "UDPSocket.h", ] EXPORTS.mozilla.dom.network += [ - 'Connection.h', - 'Constants.h', - 'TCPServerSocketChild.h', - 'TCPServerSocketParent.h', - 'TCPSocketChild.h', - 'TCPSocketParent.h', - 'UDPSocketChild.h', - 'UDPSocketParent.h', + "Connection.h", + "Constants.h", + "TCPServerSocketChild.h", + "TCPServerSocketParent.h", + "TCPSocketChild.h", + "TCPSocketParent.h", + "UDPSocketChild.h", + "UDPSocketParent.h", ] UNIFIED_SOURCES += [ - 'Connection.cpp', - 'ConnectionMainThread.cpp', - 'ConnectionWorker.cpp', - 'TCPServerSocket.cpp', - 'TCPServerSocketChild.cpp', - 'TCPServerSocketParent.cpp', - 'TCPSocket.cpp', - 'TCPSocketChild.cpp', - 'TCPSocketParent.cpp', - 'UDPSocket.cpp', - 'UDPSocketChild.cpp', - 'UDPSocketParent.cpp', + "Connection.cpp", + "ConnectionMainThread.cpp", + "ConnectionWorker.cpp", + "TCPServerSocket.cpp", + "TCPServerSocketChild.cpp", + "TCPServerSocketParent.cpp", + "TCPSocket.cpp", + "TCPSocketChild.cpp", + "TCPSocketParent.cpp", + "UDPSocket.cpp", + "UDPSocketChild.cpp", + "UDPSocketParent.cpp", ] IPDL_SOURCES += [ - 'PTCPServerSocket.ipdl', - 'PTCPSocket.ipdl', - 'PUDPSocket.ipdl', + "PTCPServerSocket.ipdl", + "PTCPSocket.ipdl", + "PUDPSocket.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/notification/moz.build b/dom/notification/moz.build index 991460e54c4615..19e6f68453e7fe 100644 --- a/dom/notification/moz.build +++ b/dom/notification/moz.build @@ -8,45 +8,45 @@ with Files("**"): BUG_COMPONENT = ("Toolkit", "Notifications and Alerts") EXTRA_JS_MODULES += [ - 'NotificationStorage.jsm', + "NotificationStorage.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXPORTS.mozilla.dom += [ - 'Notification.h', - 'NotificationEvent.h', + "Notification.h", + "NotificationEvent.h", ] UNIFIED_SOURCES += [ - 'Notification.cpp', - 'NotificationEvent.cpp', + "Notification.cpp", + "NotificationEvent.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', - '/dom/ipc', + "/dom/base", + "/dom/ipc", ] -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] -MOCHITEST_MANIFESTS += ['test/mochitest/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome/chrome.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome/chrome.ini"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['MOZ_NEW_NOTIFICATION_STORE']: +if CONFIG["MOZ_NEW_NOTIFICATION_STORE"]: EXTRA_JS_MODULES += [ - 'new/NotificationDB.jsm', + "new/NotificationDB.jsm", ] else: EXTRA_JS_MODULES += [ - 'old/NotificationDB.jsm', + "old/NotificationDB.jsm", ] diff --git a/dom/offline/moz.build b/dom/offline/moz.build index c77427a4967cb0..938d4d107de2ab 100644 --- a/dom/offline/moz.build +++ b/dom/offline/moz.build @@ -8,15 +8,15 @@ with Files("**"): BUG_COMPONENT = ("Core", "Networking") EXPORTS += [ - 'nsDOMOfflineResourceList.h', + "nsDOMOfflineResourceList.h", ] UNIFIED_SOURCES += [ - 'nsDOMOfflineResourceList.cpp', + "nsDOMOfflineResourceList.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/uriloader/prefetch', + "/dom/base", + "/uriloader/prefetch", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/payments/ipc/moz.build b/dom/payments/ipc/moz.build index 07125450563c01..83c5afd5770790 100644 --- a/dom/payments/ipc/moz.build +++ b/dom/payments/ipc/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'PaymentRequestChild.h', - 'PaymentRequestParent.h', + "PaymentRequestChild.h", + "PaymentRequestParent.h", ] UNIFIED_SOURCES += [ - 'PaymentRequestChild.cpp', - 'PaymentRequestParent.cpp', + "PaymentRequestChild.cpp", + "PaymentRequestParent.cpp", ] IPDL_SOURCES += [ - 'PPaymentRequest.ipdl', + "PPaymentRequest.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/payments/moz.build b/dom/payments/moz.build index 8cd2bf67f5b7ea..9db02c2d1f15d9 100644 --- a/dom/payments/moz.build +++ b/dom/payments/moz.build @@ -5,49 +5,49 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'ipc', + "ipc", ] EXPORTS += [ - 'PaymentRequestData.h', - 'PaymentRequestService.h', + "PaymentRequestData.h", + "PaymentRequestService.h", ] EXPORTS.mozilla.dom += [ - 'MerchantValidationEvent.h', - 'PaymentAddress.h', - 'PaymentMethodChangeEvent.h', - 'PaymentRequest.h', - 'PaymentRequestManager.h', - 'PaymentRequestUpdateEvent.h', - 'PaymentResponse.h', + "MerchantValidationEvent.h", + "PaymentAddress.h", + "PaymentMethodChangeEvent.h", + "PaymentRequest.h", + "PaymentRequestManager.h", + "PaymentRequestUpdateEvent.h", + "PaymentResponse.h", ] UNIFIED_SOURCES += [ - 'BasicCardPayment.cpp', - 'MerchantValidationEvent.cpp', - 'PaymentActionResponse.cpp', - 'PaymentAddress.cpp', - 'PaymentMethodChangeEvent.cpp', - 'PaymentRequest.cpp', - 'PaymentRequestData.cpp', - 'PaymentRequestManager.cpp', - 'PaymentRequestService.cpp', - 'PaymentRequestUpdateEvent.cpp', - 'PaymentRequestUtils.cpp', - 'PaymentResponse.cpp', + "BasicCardPayment.cpp", + "MerchantValidationEvent.cpp", + "PaymentActionResponse.cpp", + "PaymentAddress.cpp", + "PaymentMethodChangeEvent.cpp", + "PaymentRequest.cpp", + "PaymentRequestData.cpp", + "PaymentRequestManager.cpp", + "PaymentRequestService.cpp", + "PaymentRequestUpdateEvent.cpp", + "PaymentRequestUtils.cpp", + "PaymentResponse.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" with Files("**"): BUG_COMPONENT = ("Core", "DOM: Web Payments") -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] diff --git a/dom/performance/moz.build b/dom/performance/moz.build index df7f7dc72fc2cc..aa8a9c25897ad4 100644 --- a/dom/performance/moz.build +++ b/dom/performance/moz.build @@ -8,45 +8,45 @@ with Files("**"): BUG_COMPONENT = ("Core", "Performance") EXPORTS.mozilla.dom += [ - 'Performance.h', - 'PerformanceEntry.h', - 'PerformanceMainThread.h', - 'PerformanceMark.h', - 'PerformanceMeasure.h', - 'PerformanceNavigation.h', - 'PerformanceNavigationTiming.h', - 'PerformanceObserver.h', - 'PerformanceObserverEntryList.h', - 'PerformanceResourceTiming.h', - 'PerformanceServerTiming.h', - 'PerformanceService.h', - 'PerformanceStorage.h', - 'PerformanceStorageWorker.h', - 'PerformanceTiming.h', + "Performance.h", + "PerformanceEntry.h", + "PerformanceMainThread.h", + "PerformanceMark.h", + "PerformanceMeasure.h", + "PerformanceNavigation.h", + "PerformanceNavigationTiming.h", + "PerformanceObserver.h", + "PerformanceObserverEntryList.h", + "PerformanceResourceTiming.h", + "PerformanceServerTiming.h", + "PerformanceService.h", + "PerformanceStorage.h", + "PerformanceStorageWorker.h", + "PerformanceTiming.h", ] UNIFIED_SOURCES += [ - 'Performance.cpp', - 'PerformanceEntry.cpp', - 'PerformanceMainThread.cpp', - 'PerformanceMark.cpp', - 'PerformanceMeasure.cpp', - 'PerformanceNavigation.cpp', - 'PerformanceNavigationTiming.cpp', - 'PerformanceObserver.cpp', - 'PerformanceObserverEntryList.cpp', - 'PerformanceResourceTiming.cpp', - 'PerformanceServerTiming.cpp', - 'PerformanceService.cpp', - 'PerformanceStorageWorker.cpp', - 'PerformanceTiming.cpp', - 'PerformanceWorker.cpp', + "Performance.cpp", + "PerformanceEntry.cpp", + "PerformanceMainThread.cpp", + "PerformanceMark.cpp", + "PerformanceMeasure.cpp", + "PerformanceNavigation.cpp", + "PerformanceNavigationTiming.cpp", + "PerformanceObserver.cpp", + "PerformanceObserverEntryList.cpp", + "PerformanceResourceTiming.cpp", + "PerformanceServerTiming.cpp", + "PerformanceService.cpp", + "PerformanceStorageWorker.cpp", + "PerformanceTiming.cpp", + "PerformanceWorker.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -MOCHITEST_MANIFESTS += [ 'tests/mochitest.ini' ] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/permission/moz.build b/dom/permission/moz.build index 16dc9c013e0d86..bdceaba05f2324 100644 --- a/dom/permission/moz.build +++ b/dom/permission/moz.build @@ -8,19 +8,19 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") EXPORTS.mozilla.dom += [ - 'Permissions.h', - 'PermissionStatus.h', + "Permissions.h", + "PermissionStatus.h", ] UNIFIED_SOURCES += [ - 'PermissionObserver.cpp', - 'Permissions.cpp', - 'PermissionStatus.cpp', - 'PermissionUtils.cpp', + "PermissionObserver.cpp", + "Permissions.cpp", + "PermissionStatus.cpp", + "PermissionUtils.cpp", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/dom/plugins/base/moz.build b/dom/plugins/base/moz.build index 083198427918a2..294c538c776178 100644 --- a/dom/plugins/base/moz.build +++ b/dom/plugins/base/moz.build @@ -5,94 +5,94 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsIHTTPHeaderListener.idl', - 'nsIPluginDocument.idl', - 'nsIPluginHost.idl', - 'nsIPluginInputStream.idl', - 'nsIPluginInstanceOwner.idl', - 'nsIPluginTag.idl', - 'nspluginroot.idl', + "nsIHTTPHeaderListener.idl", + "nsIPluginDocument.idl", + "nsIPluginHost.idl", + "nsIPluginInputStream.idl", + "nsIPluginInstanceOwner.idl", + "nsIPluginTag.idl", + "nspluginroot.idl", ] -XPIDL_MODULE = 'plugin' +XPIDL_MODULE = "plugin" EXPORTS += [ - 'npapi.h', - 'npfunctions.h', - 'npruntime.h', - 'nptypes.h', - 'nsJSNPRuntime.h', - 'nsNPAPIPluginInstance.h', - 'nsPluginDirServiceProvider.h', - 'nsPluginHost.h', - 'nsPluginInstanceOwner.h', - 'nsPluginLogging.h', - 'nsPluginNativeWindow.h', - 'nsPluginsCID.h', - 'nsPluginsDir.h', - 'nsPluginTags.h', + "npapi.h", + "npfunctions.h", + "npruntime.h", + "nptypes.h", + "nsJSNPRuntime.h", + "nsNPAPIPluginInstance.h", + "nsPluginDirServiceProvider.h", + "nsPluginHost.h", + "nsPluginInstanceOwner.h", + "nsPluginLogging.h", + "nsPluginNativeWindow.h", + "nsPluginsCID.h", + "nsPluginsDir.h", + "nsPluginTags.h", ] UNIFIED_SOURCES += [ - 'nsJSNPRuntime.cpp', - 'nsNPAPIPluginInstance.cpp', - 'nsNPAPIPluginStreamListener.cpp', - 'nsPluginInstanceOwner.cpp', - 'nsPluginStreamListenerPeer.cpp', - 'nsPluginTags.cpp', - 'PluginFinder.cpp', + "nsJSNPRuntime.cpp", + "nsNPAPIPluginInstance.cpp", + "nsNPAPIPluginStreamListener.cpp", + "nsPluginInstanceOwner.cpp", + "nsPluginStreamListenerPeer.cpp", + "nsPluginTags.cpp", + "PluginFinder.cpp", ] SOURCES += [ - 'nsNPAPIPlugin.cpp', # Conflict with X11 headers - 'nsPluginHost.cpp', # Conflict with NS_NPAPIPLUGIN_CALLBACK + "nsNPAPIPlugin.cpp", # Conflict with X11 headers + "nsPluginHost.cpp", # Conflict with NS_NPAPIPLUGIN_CALLBACK ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": UNIFIED_SOURCES += [ - 'nsPluginDirServiceProvider.cpp', - 'nsPluginNativeWindowWin.cpp', - 'nsPluginsDirWin.cpp', + "nsPluginDirServiceProvider.cpp", + "nsPluginNativeWindowWin.cpp", + "nsPluginsDirWin.cpp", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": UNIFIED_SOURCES += [ - 'nsPluginNativeWindow.cpp', + "nsPluginNativeWindow.cpp", ] SOURCES += [ - 'nsPluginsDirDarwin.cpp', # conflict with mozilla::EventPriority + "nsPluginsDirDarwin.cpp", # conflict with mozilla::EventPriority ] else: UNIFIED_SOURCES += [ - 'nsPluginNativeWindow.cpp', - 'nsPluginsDirUnix.cpp', + "nsPluginNativeWindow.cpp", + "nsPluginsDirUnix.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/plugins/ipc', - '/layout/generic', - '/layout/xul', - '/netwerk/base', - '/widget', - '/widget/cocoa', - '/xpcom/base', + "/dom/base", + "/dom/plugins/ipc", + "/layout/generic", + "/layout/xul", + "/netwerk/base", + "/widget", + "/widget/cocoa", + "/xpcom/base", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": LOCAL_INCLUDES += [ - '/xpcom/base', + "/xpcom/base", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/plugins/ipc/hangui/moz.build b/dom/plugins/ipc/hangui/moz.build index 213ead658e7ab1..db07f43d9be43a 100644 --- a/dom/plugins/ipc/hangui/moz.build +++ b/dom/plugins/ipc/hangui/moz.build @@ -4,24 +4,24 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Program('plugin-hang-ui') +Program("plugin-hang-ui") UNIFIED_SOURCES += [ - 'MiniShmChild.cpp', - 'PluginHangUIChild.cpp', + "MiniShmChild.cpp", + "PluginHangUIChild.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -DEFINES['NS_NO_XPCOM'] = True -DEFINES['_HAS_EXCEPTIONS'] = 0 +DEFINES["NS_NO_XPCOM"] = True +DEFINES["_HAS_EXCEPTIONS"] = 0 DisableStlWrapping() -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - WIN32_EXE_LDFLAGS += ['-municode'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + WIN32_EXE_LDFLAGS += ["-municode"] -RCINCLUDE = 'HangUIDlg.rc' +RCINCLUDE = "HangUIDlg.rc" OS_LIBS += [ - 'comctl32', + "comctl32", ] diff --git a/dom/plugins/ipc/interpose/moz.build b/dom/plugins/ipc/interpose/moz.build index 8bd8ee651b5e62..ee24953570ef99 100644 --- a/dom/plugins/ipc/interpose/moz.build +++ b/dom/plugins/ipc/interpose/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SharedLibrary('plugin_child_interpose') +SharedLibrary("plugin_child_interpose") -UNIFIED_SOURCES += [ "%s.mm" % (LIBRARY_NAME) ] +UNIFIED_SOURCES += ["%s.mm" % (LIBRARY_NAME)] -OS_LIBS += ['-framework Carbon'] +OS_LIBS += ["-framework Carbon"] DIST_INSTALL = True diff --git a/dom/plugins/ipc/moz.build b/dom/plugins/ipc/moz.build index 49f1e276708171..62a726e25c5efd 100644 --- a/dom/plugins/ipc/moz.build +++ b/dom/plugins/ipc/moz.build @@ -4,149 +4,146 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - DIRS += ['interpose'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + DIRS += ["interpose"] EXPORTS.mozilla += [ - 'PluginLibrary.h', + "PluginLibrary.h", ] EXPORTS.mozilla.plugins += [ - 'AStream.h', - 'BrowserStreamChild.h', - 'BrowserStreamParent.h', - 'ChildTimer.h', - 'FunctionBrokerIPCUtils.h', - 'IpdlTuple.h', - 'NPEventAndroid.h', - 'NPEventOSX.h', - 'NPEventUnix.h', - 'NPEventWindows.h', - 'PluginBridge.h', - 'PluginInstanceChild.h', - 'PluginInstanceParent.h', - 'PluginMessageUtils.h', - 'PluginModuleChild.h', - 'PluginModuleParent.h', - 'PluginProcessChild.h', - 'PluginProcessParent.h', - 'PluginQuirks.h', - 'PluginScriptableObjectChild.h', - 'PluginScriptableObjectParent.h', - 'PluginScriptableObjectUtils-inl.h', - 'PluginScriptableObjectUtils.h', - 'PluginUtilsOSX.h', - 'StreamNotifyChild.h', - 'StreamNotifyParent.h', + "AStream.h", + "BrowserStreamChild.h", + "BrowserStreamParent.h", + "ChildTimer.h", + "FunctionBrokerIPCUtils.h", + "IpdlTuple.h", + "NPEventAndroid.h", + "NPEventOSX.h", + "NPEventUnix.h", + "NPEventWindows.h", + "PluginBridge.h", + "PluginInstanceChild.h", + "PluginInstanceParent.h", + "PluginMessageUtils.h", + "PluginModuleChild.h", + "PluginModuleParent.h", + "PluginProcessChild.h", + "PluginProcessParent.h", + "PluginQuirks.h", + "PluginScriptableObjectChild.h", + "PluginScriptableObjectParent.h", + "PluginScriptableObjectUtils-inl.h", + "PluginScriptableObjectUtils.h", + "PluginUtilsOSX.h", + "StreamNotifyChild.h", + "StreamNotifyParent.h", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": EXPORTS.mozilla.plugins += [ - 'PluginSurfaceParent.h', + "PluginSurfaceParent.h", ] UNIFIED_SOURCES += [ - 'PluginHangUIParent.cpp', - 'PluginSurfaceParent.cpp', + "PluginHangUIParent.cpp", + "PluginSurfaceParent.cpp", ] SOURCES += [ - 'MiniShmParent.cpp', # Issues with CreateEvent + "MiniShmParent.cpp", # Issues with CreateEvent ] - DEFINES['MOZ_HANGUI_PROCESS_NAME'] = '"plugin-hang-ui%s"' % CONFIG['BIN_SUFFIX'] + DEFINES["MOZ_HANGUI_PROCESS_NAME"] = '"plugin-hang-ui%s"' % CONFIG["BIN_SUFFIX"] LOCAL_INCLUDES += [ - '/widget', - 'hangui', + "/widget", + "hangui", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": EXPORTS.mozilla.plugins += [ - 'PluginInterposeOSX.h', + "PluginInterposeOSX.h", ] UNIFIED_SOURCES += [ - 'BrowserStreamChild.cpp', - 'BrowserStreamParent.cpp', - 'ChildTimer.cpp', - 'FunctionBroker.cpp', - 'FunctionBrokerChild.cpp', - 'FunctionBrokerIPCUtils.cpp', - 'FunctionBrokerParent.cpp', - 'FunctionHook.cpp', - 'PluginBackgroundDestroyer.cpp', - 'PluginInstanceParent.cpp', - 'PluginMessageUtils.cpp', - 'PluginModuleChild.cpp', - 'PluginModuleParent.cpp', - 'PluginProcessChild.cpp', - 'PluginProcessParent.cpp', - 'PluginQuirks.cpp', - 'PluginScriptableObjectChild.cpp', - 'PluginScriptableObjectParent.cpp', + "BrowserStreamChild.cpp", + "BrowserStreamParent.cpp", + "ChildTimer.cpp", + "FunctionBroker.cpp", + "FunctionBrokerChild.cpp", + "FunctionBrokerIPCUtils.cpp", + "FunctionBrokerParent.cpp", + "FunctionHook.cpp", + "PluginBackgroundDestroyer.cpp", + "PluginInstanceParent.cpp", + "PluginMessageUtils.cpp", + "PluginModuleChild.cpp", + "PluginModuleParent.cpp", + "PluginProcessChild.cpp", + "PluginProcessParent.cpp", + "PluginQuirks.cpp", + "PluginScriptableObjectChild.cpp", + "PluginScriptableObjectParent.cpp", ] SOURCES += [ - 'PluginInstanceChild.cpp', # 'PluginThreadCallback' : ambiguous symbol + "PluginInstanceChild.cpp", # 'PluginThreadCallback' : ambiguous symbol ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": UNIFIED_SOURCES += [ - 'PluginInterposeOSX.mm', - 'PluginUtilsOSX.mm', + "PluginInterposeOSX.mm", + "PluginUtilsOSX.mm", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXPORTS.mozilla.plugins += [ - 'PluginWidgetChild.h', - 'PluginWidgetParent.h', - ] - UNIFIED_SOURCES += [ - 'D3D11SurfaceHolder.cpp', - 'PluginUtilsWin.cpp' + "PluginWidgetChild.h", + "PluginWidgetParent.h", ] + UNIFIED_SOURCES += ["D3D11SurfaceHolder.cpp", "PluginUtilsWin.cpp"] SOURCES += [ - 'PluginWidgetChild.cpp', - 'PluginWidgetParent.cpp', + "PluginWidgetChild.cpp", + "PluginWidgetParent.cpp", ] IPDL_SOURCES += [ - 'PBrowserStream.ipdl', - 'PFunctionBroker.ipdl', - 'PluginTypes.ipdlh', - 'PPluginBackgroundDestroyer.ipdl', - 'PPluginInstance.ipdl', - 'PPluginModule.ipdl', - 'PPluginScriptableObject.ipdl', - 'PPluginSurface.ipdl', - 'PStreamNotify.ipdl', + "PBrowserStream.ipdl", + "PFunctionBroker.ipdl", + "PluginTypes.ipdlh", + "PPluginBackgroundDestroyer.ipdl", + "PPluginInstance.ipdl", + "PPluginModule.ipdl", + "PPluginScriptableObject.ipdl", + "PPluginSurface.ipdl", + "PStreamNotify.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../base', - '/xpcom/base/', - '/xpcom/threads/', + "../base", + "/xpcom/base/", + "/xpcom/threads/", ] -if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["MOZ_SANDBOX"] and CONFIG["OS_ARCH"] == "WINNT": LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', - '/security/sandbox/win/src/sandboxpermissions', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", + "/security/sandbox/win/src/sandboxpermissions", ] -DEFINES['FORCE_PR_LOG'] = True +DEFINES["FORCE_PR_LOG"] = True -if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'gtk': - CXXFLAGS += CONFIG['TK_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] != "gtk": + CXXFLAGS += CONFIG["TK_CFLAGS"] else: # Force build against gtk+2 for struct offsets and such. - CXXFLAGS += CONFIG['MOZ_GTK2_CFLAGS'] + CXXFLAGS += CONFIG["MOZ_GTK2_CFLAGS"] -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/plugins/test/moz.build b/dom/plugins/test/moz.build index 032db35387cdf5..dd464355768a05 100644 --- a/dom/plugins/test/moz.build +++ b/dom/plugins/test/moz.build @@ -4,12 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['testplugin'] +DIRS += ["testplugin"] -XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini'] - -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('gtk', 'cocoa', 'windows'): - MOCHITEST_MANIFESTS += ['mochitest/mochitest.ini'] - MOCHITEST_CHROME_MANIFESTS += ['mochitest/chrome.ini'] - BROWSER_CHROME_MANIFESTS += ['mochitest/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["unit/xpcshell.ini"] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("gtk", "cocoa", "windows"): + MOCHITEST_MANIFESTS += ["mochitest/mochitest.ini"] + MOCHITEST_CHROME_MANIFESTS += ["mochitest/chrome.ini"] + BROWSER_CHROME_MANIFESTS += ["mochitest/browser.ini"] diff --git a/dom/plugins/test/testplugin/flashplugin/moz.build b/dom/plugins/test/testplugin/flashplugin/moz.build index 3df524a2bcd931..f66fb6eca4b3e3 100644 --- a/dom/plugins/test/testplugin/flashplugin/moz.build +++ b/dom/plugins/test/testplugin/flashplugin/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SharedLibrary('npswftest') +SharedLibrary("npswftest") -relative_path = 'flashplugin' -cocoa_name = 'npswftest' -include('../testplugin.mozbuild') +relative_path = "flashplugin" +cocoa_name = "npswftest" +include("../testplugin.mozbuild") diff --git a/dom/plugins/test/testplugin/moz.build b/dom/plugins/test/testplugin/moz.build index 6e250eefc50155..4dd85b8e18a628 100644 --- a/dom/plugins/test/testplugin/moz.build +++ b/dom/plugins/test/testplugin/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['secondplugin', 'thirdplugin', 'flashplugin'] +DIRS += ["secondplugin", "thirdplugin", "flashplugin"] -SharedLibrary('nptest') +SharedLibrary("nptest") -relative_path = '.' -cocoa_name = 'Test' -include('testplugin.mozbuild') +relative_path = "." +cocoa_name = "Test" +include("testplugin.mozbuild") diff --git a/dom/plugins/test/testplugin/secondplugin/moz.build b/dom/plugins/test/testplugin/secondplugin/moz.build index f95ed4190ec6d0..29c12260b44583 100644 --- a/dom/plugins/test/testplugin/secondplugin/moz.build +++ b/dom/plugins/test/testplugin/secondplugin/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SharedLibrary('npsecondtest') +SharedLibrary("npsecondtest") -relative_path = 'secondplugin' -cocoa_name = 'SecondTest' -include('../testplugin.mozbuild') +relative_path = "secondplugin" +cocoa_name = "SecondTest" +include("../testplugin.mozbuild") diff --git a/dom/plugins/test/testplugin/thirdplugin/moz.build b/dom/plugins/test/testplugin/thirdplugin/moz.build index f0d7b1a5b95f2a..d2d673e43484cc 100644 --- a/dom/plugins/test/testplugin/thirdplugin/moz.build +++ b/dom/plugins/test/testplugin/thirdplugin/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SharedLibrary('npthirdtest') +SharedLibrary("npthirdtest") -relative_path = 'thirdplugin' -cocoa_name = 'ThirdTest' -include('../testplugin.mozbuild') +relative_path = "thirdplugin" +cocoa_name = "ThirdTest" +include("../testplugin.mozbuild") diff --git a/dom/power/moz.build b/dom/power/moz.build index 25c195db26cbd0..c2524fdfa04ef3 100644 --- a/dom/power/moz.build +++ b/dom/power/moz.build @@ -8,26 +8,26 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIDOMWakeLockListener.idl', - 'nsIPowerManagerService.idl', - 'nsIWakeLock.idl', + "nsIDOMWakeLockListener.idl", + "nsIPowerManagerService.idl", + "nsIWakeLock.idl", ] -XPIDL_MODULE = 'dom_power' +XPIDL_MODULE = "dom_power" EXPORTS.mozilla.dom += [ - 'WakeLock.h', + "WakeLock.h", ] EXPORTS.mozilla.dom.power += [ - 'PowerManagerService.h', + "PowerManagerService.h", ] UNIFIED_SOURCES += [ - 'PowerManagerService.cpp', - 'WakeLock.cpp', + "PowerManagerService.cpp", + "WakeLock.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/presentation/interfaces/moz.build b/dom/presentation/interfaces/moz.build index 935e39000364a5..bc09f5679f4cc8 100644 --- a/dom/presentation/interfaces/moz.build +++ b/dom/presentation/interfaces/moz.build @@ -5,26 +5,25 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsIPresentationControlChannel.idl', - 'nsIPresentationControlService.idl', - 'nsIPresentationDevice.idl', - 'nsIPresentationDeviceManager.idl', - 'nsIPresentationDevicePrompt.idl', - 'nsIPresentationDeviceProvider.idl', - 'nsIPresentationListener.idl', - 'nsIPresentationLocalDevice.idl', - 'nsIPresentationRequestUIGlue.idl', - 'nsIPresentationService.idl', - 'nsIPresentationSessionRequest.idl', - 'nsIPresentationSessionTransport.idl', - 'nsIPresentationSessionTransportBuilder.idl', - 'nsIPresentationTerminateRequest.idl', + "nsIPresentationControlChannel.idl", + "nsIPresentationControlService.idl", + "nsIPresentationDevice.idl", + "nsIPresentationDeviceManager.idl", + "nsIPresentationDevicePrompt.idl", + "nsIPresentationDeviceProvider.idl", + "nsIPresentationListener.idl", + "nsIPresentationLocalDevice.idl", + "nsIPresentationRequestUIGlue.idl", + "nsIPresentationService.idl", + "nsIPresentationSessionRequest.idl", + "nsIPresentationSessionTransport.idl", + "nsIPresentationSessionTransportBuilder.idl", + "nsIPresentationTerminateRequest.idl", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": XPIDL_SOURCES += [ - 'nsIPresentationNetworkHelper.idl', + "nsIPresentationNetworkHelper.idl", ] -XPIDL_MODULE = 'dom_presentation' - +XPIDL_MODULE = "dom_presentation" diff --git a/dom/presentation/moz.build b/dom/presentation/moz.build index e1bf352ef18293..9f4f39cdb98c59 100644 --- a/dom/presentation/moz.build +++ b/dom/presentation/moz.build @@ -7,82 +7,80 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") -DIRS += ['interfaces', 'provider'] +DIRS += ["interfaces", "provider"] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell/xpcshell.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/mochitest/chrome.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell/xpcshell.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/mochitest/chrome.ini"] EXPORTS.mozilla.dom += [ - 'DCPresentationChannelDescription.h', - 'ipc/PresentationBuilderChild.h', - 'ipc/PresentationBuilderParent.h', - 'ipc/PresentationChild.h', - 'ipc/PresentationIPCService.h', - 'ipc/PresentationParent.h', - 'Presentation.h', - 'PresentationAvailability.h', - 'PresentationCallbacks.h', - 'PresentationConnection.h', - 'PresentationConnectionList.h', - 'PresentationDeviceManager.h', - 'PresentationReceiver.h', - 'PresentationRequest.h', - 'PresentationService.h', - 'PresentationServiceBase.h', - 'PresentationSessionInfo.h', - 'PresentationTCPSessionTransport.h', + "DCPresentationChannelDescription.h", + "ipc/PresentationBuilderChild.h", + "ipc/PresentationBuilderParent.h", + "ipc/PresentationChild.h", + "ipc/PresentationIPCService.h", + "ipc/PresentationParent.h", + "Presentation.h", + "PresentationAvailability.h", + "PresentationCallbacks.h", + "PresentationConnection.h", + "PresentationConnectionList.h", + "PresentationDeviceManager.h", + "PresentationReceiver.h", + "PresentationRequest.h", + "PresentationService.h", + "PresentationServiceBase.h", + "PresentationSessionInfo.h", + "PresentationTCPSessionTransport.h", ] UNIFIED_SOURCES += [ - 'AvailabilityCollection.cpp', - 'ControllerConnectionCollection.cpp', - 'DCPresentationChannelDescription.cpp', - 'ipc/PresentationBuilderChild.cpp', - 'ipc/PresentationBuilderParent.cpp', - 'ipc/PresentationChild.cpp', - 'ipc/PresentationContentSessionInfo.cpp', - 'ipc/PresentationIPCService.cpp', - 'ipc/PresentationParent.cpp', - 'Presentation.cpp', - 'PresentationAvailability.cpp', - 'PresentationCallbacks.cpp', - 'PresentationConnection.cpp', - 'PresentationConnectionList.cpp', - 'PresentationDeviceManager.cpp', - 'PresentationReceiver.cpp', - 'PresentationRequest.cpp', - 'PresentationService.cpp', - 'PresentationSessionInfo.cpp', - 'PresentationSessionRequest.cpp', - 'PresentationTCPSessionTransport.cpp', - 'PresentationTerminateRequest.cpp', - 'PresentationTransportBuilderConstructor.cpp' + "AvailabilityCollection.cpp", + "ControllerConnectionCollection.cpp", + "DCPresentationChannelDescription.cpp", + "ipc/PresentationBuilderChild.cpp", + "ipc/PresentationBuilderParent.cpp", + "ipc/PresentationChild.cpp", + "ipc/PresentationContentSessionInfo.cpp", + "ipc/PresentationIPCService.cpp", + "ipc/PresentationParent.cpp", + "Presentation.cpp", + "PresentationAvailability.cpp", + "PresentationCallbacks.cpp", + "PresentationConnection.cpp", + "PresentationConnectionList.cpp", + "PresentationDeviceManager.cpp", + "PresentationReceiver.cpp", + "PresentationRequest.cpp", + "PresentationService.cpp", + "PresentationSessionInfo.cpp", + "PresentationSessionRequest.cpp", + "PresentationTCPSessionTransport.cpp", + "PresentationTerminateRequest.cpp", + "PresentationTransportBuilderConstructor.cpp", ] EXTRA_JS_MODULES += [ - 'PresentationDataChannelSessionTransport.jsm', + "PresentationDataChannelSessionTransport.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXTRA_JS_MODULES += [ - 'PresentationNetworkHelper.jsm', + "PresentationNetworkHelper.jsm", ] IPDL_SOURCES += [ - 'ipc/PPresentation.ipdl', - 'ipc/PPresentationBuilder.ipdl', - 'ipc/PPresentationRequest.ipdl' + "ipc/PPresentation.ipdl", + "ipc/PPresentationBuilder.ipdl", + "ipc/PPresentationRequest.ipdl", ] -LOCAL_INCLUDES += [ - '../base' -] +LOCAL_INCLUDES += ["../base"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/presentation/provider/moz.build b/dom/presentation/provider/moz.build index eaea61af415a95..f6c4527d2cdf9d 100644 --- a/dom/presentation/provider/moz.build +++ b/dom/presentation/provider/moz.build @@ -4,28 +4,26 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXTRA_JS_MODULES += [ - 'PresentationControlService.jsm' -] +EXTRA_JS_MODULES += ["PresentationControlService.jsm"] UNIFIED_SOURCES += [ - 'DeviceProviderHelpers.cpp', - 'MulticastDNSDeviceProvider.cpp', + "DeviceProviderHelpers.cpp", + "MulticastDNSDeviceProvider.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXTRA_JS_MODULES.presentation += [ - 'ControllerStateMachine.jsm', - 'ReceiverStateMachine.jsm', - 'StateMachineHelper.jsm', + "ControllerStateMachine.jsm", + "ReceiverStateMachine.jsm", + "StateMachineHelper.jsm", ] -include('/ipc/chromium/chromium-config.mozbuild') -FINAL_LIBRARY = 'xul' +include("/ipc/chromium/chromium-config.mozbuild") +FINAL_LIBRARY = "xul" diff --git a/dom/prio/moz.build b/dom/prio/moz.build index 9f4d7603c89d87..49c89f75c9d6f0 100644 --- a/dom/prio/moz.build +++ b/dom/prio/moz.build @@ -7,18 +7,16 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") -LOCAL_INCLUDES += [ - '/third_party/msgpack/include' -] +LOCAL_INCLUDES += ["/third_party/msgpack/include"] EXPORTS.mozilla.dom += [ - 'PrioEncoder.h', + "PrioEncoder.h", ] UNIFIED_SOURCES += [ - 'PrioEncoder.cpp', + "PrioEncoder.cpp", ] -TEST_DIRS += ['test/gtest'] +TEST_DIRS += ["test/gtest"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/prio/test/gtest/moz.build b/dom/prio/test/gtest/moz.build index bb5107d5bd7008..2c175582fb0f6d 100644 --- a/dom/prio/test/gtest/moz.build +++ b/dom/prio/test/gtest/moz.build @@ -5,13 +5,13 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestPrioEncoder.cpp', + "TestPrioEncoder.cpp", ] LOCAL_INCLUDES += [ - '/dom/prio', - '/third_party/msgpack/include', - '/third_party/prio/include', + "/dom/prio", + "/third_party/msgpack/include", + "/third_party/prio/include", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/promise/moz.build b/dom/promise/moz.build index 3758801f9890b4..5c896e76b17e2a 100644 --- a/dom/promise/moz.build +++ b/dom/promise/moz.build @@ -8,34 +8,34 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") EXPORTS.mozilla.dom += [ - 'Promise-inl.h', - 'Promise.h', - 'PromiseDebugging.h', - 'PromiseNativeHandler.h', - 'PromiseWorkerProxy.h', + "Promise-inl.h", + "Promise.h", + "PromiseDebugging.h", + "PromiseNativeHandler.h", + "PromiseWorkerProxy.h", ] UNIFIED_SOURCES += [ - 'Promise.cpp', - 'PromiseDebugging.cpp', - 'PromiseNativeHandler.cpp', + "Promise.cpp", + "PromiseDebugging.cpp", + "PromiseNativeHandler.cpp", ] LOCAL_INCLUDES += [ - '../base', - '../ipc', - '/js/xpconnect/src', + "../base", + "../ipc", + "/js/xpconnect/src", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini"] -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/prototype/moz.build b/dom/prototype/moz.build index 96943b257bbc20..f73b3824ac93ee 100644 --- a/dom/prototype/moz.build +++ b/dom/prototype/moz.build @@ -8,18 +8,18 @@ with Files("**"): BUG_COMPONENT = ("Core", "XUL") EXPORTS.mozilla.dom += [ - 'PrototypeDocumentContentSink.h', + "PrototypeDocumentContentSink.h", ] SOURCES += [ - 'PrototypeDocumentContentSink.cpp', + "PrototypeDocumentContentSink.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/xul', + "/dom/base", + "/dom/xul", ] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/push/moz.build b/dom/push/moz.build index 01e57286d5e69b..ca0336f174db5e 100644 --- a/dom/push/moz.build +++ b/dom/push/moz.build @@ -7,66 +7,66 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Push Notifications") EXTRA_COMPONENTS += [ - 'Push.manifest', + "Push.manifest", ] EXTRA_JS_MODULES += [ - 'Push.jsm', - 'PushBroadcastService.jsm', - 'PushComponents.jsm', - 'PushCrypto.jsm', - 'PushDB.jsm', - 'PushRecord.jsm', - 'PushService.jsm', + "Push.jsm", + "PushBroadcastService.jsm", + "PushComponents.jsm", + "PushCrypto.jsm", + "PushDB.jsm", + "PushRecord.jsm", + "PushService.jsm", ] -if CONFIG['MOZ_BUILD_APP'] != 'mobile/android': +if CONFIG["MOZ_BUILD_APP"] != "mobile/android": # Everything but Fennec. EXTRA_JS_MODULES += [ - 'PushServiceHttp2.jsm', - 'PushServiceWebSocket.jsm', + "PushServiceHttp2.jsm", + "PushServiceWebSocket.jsm", ] else: # Fennec only. EXTRA_JS_MODULES += [ - 'PushServiceAndroidGCM.jsm', + "PushServiceAndroidGCM.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', + "test/mochitest.ini", ] XPCSHELL_TESTS_MANIFESTS += [ - 'test/xpcshell/xpcshell.ini', + "test/xpcshell/xpcshell.ini", ] EXPORTS.mozilla.dom += [ - 'PushManager.h', - 'PushNotifier.h', - 'PushSubscription.h', - 'PushSubscriptionOptions.h', - 'PushUtil.h', + "PushManager.h", + "PushNotifier.h", + "PushSubscription.h", + "PushSubscriptionOptions.h", + "PushUtil.h", ] UNIFIED_SOURCES += [ - 'PushManager.cpp', - 'PushNotifier.cpp', - 'PushSubscription.cpp', - 'PushSubscriptionOptions.cpp', - 'PushUtil.cpp', + "PushManager.cpp", + "PushNotifier.cpp", + "PushSubscription.cpp", + "PushSubscriptionOptions.cpp", + "PushUtil.cpp", ] -TEST_DIRS += ['test/xpcshell'] +TEST_DIRS += ["test/xpcshell"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '../base', - '../ipc', + "../base", + "../ipc", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/push/test/xpcshell/moz.build b/dom/push/test/xpcshell/moz.build index 07a05c396b7543..2ee9d49cb9afeb 100644 --- a/dom/push/test/xpcshell/moz.build +++ b/dom/push/test/xpcshell/moz.build @@ -1,8 +1,8 @@ EXTRA_COMPONENTS += [ - 'PushServiceHandler.js', - 'PushServiceHandler.manifest', + "PushServiceHandler.js", + "PushServiceHandler.manifest", ] TESTING_JS_MODULES += [ - 'broadcast_handler.jsm', + "broadcast_handler.jsm", ] diff --git a/dom/quota/moz.build b/dom/quota/moz.build index c8946247af85d6..a6de7f44b60a67 100644 --- a/dom/quota/moz.build +++ b/dom/quota/moz.build @@ -7,81 +7,81 @@ with Files("**"): BUG_COMPONENT = ("Core", "Storage: Quota Manager") -DIRS += ['test'] +DIRS += ["test"] XPIDL_SOURCES += [ - 'nsIQuotaCallbacks.idl', - 'nsIQuotaManagerService.idl', - 'nsIQuotaRequests.idl', - 'nsIQuotaResults.idl', + "nsIQuotaCallbacks.idl", + "nsIQuotaManagerService.idl", + "nsIQuotaRequests.idl", + "nsIQuotaResults.idl", ] -XPIDL_MODULE = 'dom_quota' +XPIDL_MODULE = "dom_quota" EXPORTS.mozilla.dom += [ - 'StorageManager.h', + "StorageManager.h", ] EXPORTS.mozilla.dom.quota += [ - 'ActorsParent.h', - 'CheckedUnsafePtr.h', - 'CipherStrategy.h', - 'Client.h', - 'DecryptingInputStream.h', - 'DecryptingInputStream_impl.h', - 'DummyCipherStrategy.h', - 'EncryptedBlock.h', - 'EncryptingOutputStream.h', - 'EncryptingOutputStream_impl.h', - 'FileStreams.h', - 'InitializationTypes.h', - 'IPCStreamCipherStrategy.h', - 'MemoryOutputStream.h', - 'OriginScope.h', - 'PersistenceType.h', - 'QuotaCommon.h', - 'QuotaInfo.h', - 'QuotaManager.h', - 'QuotaManagerService.h', - 'QuotaObject.h', - 'SerializationHelpers.h', - 'UsageInfo.h', + "ActorsParent.h", + "CheckedUnsafePtr.h", + "CipherStrategy.h", + "Client.h", + "DecryptingInputStream.h", + "DecryptingInputStream_impl.h", + "DummyCipherStrategy.h", + "EncryptedBlock.h", + "EncryptingOutputStream.h", + "EncryptingOutputStream_impl.h", + "FileStreams.h", + "InitializationTypes.h", + "IPCStreamCipherStrategy.h", + "MemoryOutputStream.h", + "OriginScope.h", + "PersistenceType.h", + "QuotaCommon.h", + "QuotaInfo.h", + "QuotaManager.h", + "QuotaManagerService.h", + "QuotaObject.h", + "SerializationHelpers.h", + "UsageInfo.h", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] UNIFIED_SOURCES += [ - 'ActorsChild.cpp', - 'ActorsParent.cpp', - 'Client.cpp', - 'DecryptingInputStream.cpp', - 'EncryptingOutputStream.cpp', - 'FileStreams.cpp', - 'MemoryOutputStream.cpp', - 'nsIndexedDBProtocolHandler.cpp', - 'PersistenceType.cpp', - 'QuotaCommon.cpp', - 'QuotaManagerService.cpp', - 'QuotaRequests.cpp', - 'QuotaResults.cpp', - 'StorageManager.cpp', + "ActorsChild.cpp", + "ActorsParent.cpp", + "Client.cpp", + "DecryptingInputStream.cpp", + "EncryptingOutputStream.cpp", + "FileStreams.cpp", + "MemoryOutputStream.cpp", + "nsIndexedDBProtocolHandler.cpp", + "PersistenceType.cpp", + "QuotaCommon.cpp", + "QuotaManagerService.cpp", + "QuotaRequests.cpp", + "QuotaResults.cpp", + "StorageManager.cpp", ] IPDL_SOURCES += [ - 'PQuota.ipdl', - 'PQuotaRequest.ipdl', - 'PQuotaUsageRequest.ipdl', + "PQuota.ipdl", + "PQuotaRequest.ipdl", + "PQuotaUsageRequest.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/caps', + "/caps", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/quota/test/gtest/moz.build b/dom/quota/test/gtest/moz.build index c2efc0bf5f0cad..d79c1065127557 100644 --- a/dom/quota/test/gtest/moz.build +++ b/dom/quota/test/gtest/moz.build @@ -5,17 +5,17 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES = [ - 'TestCheckedUnsafePtr.cpp', - 'TestEncryptedStream.cpp', - 'TestQuotaCommon.cpp', - 'TestQuotaManager.cpp', - 'TestUsageInfo.cpp', + "TestCheckedUnsafePtr.cpp", + "TestEncryptedStream.cpp", + "TestQuotaCommon.cpp", + "TestQuotaManager.cpp", + "TestUsageInfo.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" LOCAL_INCLUDES += [ - '/dom/quota', + "/dom/quota", ] diff --git a/dom/quota/test/moz.build b/dom/quota/test/moz.build index 13d7f80fb88c23..5ff03beba2671f 100644 --- a/dom/quota/test/moz.build +++ b/dom/quota/test/moz.build @@ -4,47 +4,47 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['gtest'] +TEST_DIRS += ["gtest"] -BROWSER_CHROME_MANIFESTS += ['browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["browser/browser.ini"] -MOCHITEST_MANIFESTS += ['mochitest/mochitest.ini'] +MOCHITEST_MANIFESTS += ["mochitest/mochitest.ini"] XPCSHELL_TESTS_MANIFESTS += [ - 'xpcshell/telemetry/xpcshell.ini', - 'xpcshell/upgrades/xpcshell.ini', - 'xpcshell/xpcshell.ini' + "xpcshell/telemetry/xpcshell.ini", + "xpcshell/upgrades/xpcshell.ini", + "xpcshell/xpcshell.ini", ] TEST_HARNESS_FILES.testing.mochitest.browser.dom.quota.test.common += [ - 'common/browser.js', - 'common/content.js', - 'common/file.js', - 'common/global.js', - 'common/nestedtest.js', - 'common/system.js', + "common/browser.js", + "common/content.js", + "common/file.js", + "common/global.js", + "common/nestedtest.js", + "common/system.js", ] TEST_HARNESS_FILES.testing.mochitest.tests.dom.quota.test.common += [ - 'common/content.js', - 'common/file.js', - 'common/global.js', - 'common/mochitest.js', - 'common/test_simpledb.js', - 'common/test_storage_manager_persist_allow.js', - 'common/test_storage_manager_persist_deny.js', - 'common/test_storage_manager_persisted.js', + "common/content.js", + "common/file.js", + "common/global.js", + "common/mochitest.js", + "common/test_simpledb.js", + "common/test_storage_manager_persist_allow.js", + "common/test_storage_manager_persist_deny.js", + "common/test_storage_manager_persisted.js", ] TEST_HARNESS_FILES.xpcshell.dom.quota.test.common += [ - 'common/file.js', - 'common/global.js', - 'common/system.js', - 'common/test_simpledb.js', - 'common/xpcshell.js', + "common/file.js", + "common/global.js", + "common/system.js", + "common/test_simpledb.js", + "common/xpcshell.js", ] TEST_HARNESS_FILES.xpcshell.dom.quota.test.xpcshell.common += [ - 'xpcshell/common/head.js', - 'xpcshell/common/utils.js', + "xpcshell/common/head.js", + "xpcshell/common/utils.js", ] diff --git a/dom/reporting/moz.build b/dom/reporting/moz.build index ff988ca592ac85..9b122e386bd03b 100644 --- a/dom/reporting/moz.build +++ b/dom/reporting/moz.build @@ -5,47 +5,47 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom = [ - 'CrashReport.h', - 'DeprecationReportBody.h', - 'EndpointForReportChild.h', - 'EndpointForReportParent.h', - 'FeaturePolicyViolationReportBody.h', - 'Report.h', - 'ReportBody.h', - 'ReportDeliver.h', - 'ReportingHeader.h', - 'ReportingObserver.h', - 'ReportingUtils.h', - 'TestingDeprecatedInterface.h', + "CrashReport.h", + "DeprecationReportBody.h", + "EndpointForReportChild.h", + "EndpointForReportParent.h", + "FeaturePolicyViolationReportBody.h", + "Report.h", + "ReportBody.h", + "ReportDeliver.h", + "ReportingHeader.h", + "ReportingObserver.h", + "ReportingUtils.h", + "TestingDeprecatedInterface.h", ] UNIFIED_SOURCES += [ - 'CrashReport.cpp', - 'DeprecationReportBody.cpp', - 'EndpointForReportChild.cpp', - 'EndpointForReportParent.cpp', - 'FeaturePolicyViolationReportBody.cpp', - 'Report.cpp', - 'ReportBody.cpp', - 'ReportDeliver.cpp', - 'ReportingHeader.cpp', - 'ReportingObserver.cpp', - 'ReportingUtils.cpp', - 'TestingDeprecatedInterface.cpp', + "CrashReport.cpp", + "DeprecationReportBody.cpp", + "EndpointForReportChild.cpp", + "EndpointForReportParent.cpp", + "FeaturePolicyViolationReportBody.cpp", + "Report.cpp", + "ReportBody.cpp", + "ReportDeliver.cpp", + "ReportingHeader.cpp", + "ReportingObserver.cpp", + "ReportingUtils.cpp", + "TestingDeprecatedInterface.cpp", ] IPDL_SOURCES += [ - 'PEndpointForReport.ipdl', + "PEndpointForReport.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Security") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/browser.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] -TEST_DIRS += [ 'tests/gtest' ] +TEST_DIRS += ["tests/gtest"] diff --git a/dom/reporting/tests/gtest/moz.build b/dom/reporting/tests/gtest/moz.build index 0e1b6123e84b34..860ef48d1ed466 100644 --- a/dom/reporting/tests/gtest/moz.build +++ b/dom/reporting/tests/gtest/moz.build @@ -5,9 +5,9 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES = [ - 'TestReportToParser.cpp', + "TestReportToParser.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/script/moz.build b/dom/script/moz.build index 87e86ae9b21da7..8617f2450a5fc9 100644 --- a/dom/script/moz.build +++ b/dom/script/moz.build @@ -8,40 +8,40 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIScriptLoaderObserver.idl', + "nsIScriptLoaderObserver.idl", ] -XPIDL_MODULE = 'dom' +XPIDL_MODULE = "dom" EXPORTS += [ - 'nsIScriptElement.h', + "nsIScriptElement.h", ] EXPORTS.mozilla.dom += [ - 'LoadedScript.h', - 'ScriptDecoding.h', - 'ScriptElement.h', - 'ScriptKind.h', - 'ScriptLoader.h', - 'ScriptLoadRequest.h', - 'ScriptSettings.h', + "LoadedScript.h", + "ScriptDecoding.h", + "ScriptElement.h", + "ScriptKind.h", + "ScriptLoader.h", + "ScriptLoadRequest.h", + "ScriptSettings.h", ] UNIFIED_SOURCES += [ - 'LoadedScript.cpp', - 'ModuleLoadRequest.cpp', - 'ScriptElement.cpp', - 'ScriptLoader.cpp', - 'ScriptLoadHandler.cpp', - 'ScriptLoadRequest.cpp', - 'ScriptSettings.cpp', - 'ScriptTrace.cpp', + "LoadedScript.cpp", + "ModuleLoadRequest.cpp", + "ScriptElement.cpp", + "ScriptLoader.cpp", + "ScriptLoadHandler.cpp", + "ScriptLoadRequest.cpp", + "ScriptSettings.cpp", + "ScriptTrace.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/security/featurepolicy/fuzztest/moz.build b/dom/security/featurepolicy/fuzztest/moz.build index 79c429c1ce1ebf..ea577e83390dc1 100644 --- a/dom/security/featurepolicy/fuzztest/moz.build +++ b/dom/security/featurepolicy/fuzztest/moz.build @@ -4,18 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingFeaturePolicy') +Library("FuzzingFeaturePolicy") LOCAL_INCLUDES += [ - '/dom/security/featurepolicy', - '/netwerk/base', + "/dom/security/featurepolicy", + "/netwerk/base", ] -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -SOURCES += [ - 'fp_fuzzer.cpp' -] - -FINAL_LIBRARY = 'xul-gtest' +SOURCES += ["fp_fuzzer.cpp"] +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/security/featurepolicy/moz.build b/dom/security/featurepolicy/moz.build index 645141c7b2319b..40277836adcd1c 100644 --- a/dom/security/featurepolicy/moz.build +++ b/dom/security/featurepolicy/moz.build @@ -5,34 +5,32 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. with Files("**"): - BUG_COMPONENT = ('Core', 'DOM: Security') + BUG_COMPONENT = ("Core", "DOM: Security") -TEST_DIRS += [ 'test/gtest' ] -MOCHITEST_MANIFESTS += ['test/mochitest/mochitest.ini'] +TEST_DIRS += ["test/gtest"] +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] EXPORTS.mozilla.dom += [ - 'Feature.h', - 'FeaturePolicy.h', - 'FeaturePolicyParser.h', - 'FeaturePolicyUtils.h', + "Feature.h", + "FeaturePolicy.h", + "FeaturePolicyParser.h", + "FeaturePolicyUtils.h", ] UNIFIED_SOURCES += [ - 'Feature.cpp', - 'FeaturePolicy.cpp', - 'FeaturePolicyParser.cpp', - 'FeaturePolicyUtils.cpp', + "Feature.cpp", + "FeaturePolicy.cpp", + "FeaturePolicyParser.cpp", + "FeaturePolicyUtils.cpp", ] LOCAL_INCLUDES += [ - '/netwerk/base', + "/netwerk/base", ] -include('/ipc/chromium/chromium-config.mozbuild') -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") +include("/tools/fuzzing/libfuzzer-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'fuzztest' - ] +if CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["fuzztest"] diff --git a/dom/security/featurepolicy/test/gtest/moz.build b/dom/security/featurepolicy/test/gtest/moz.build index 4333e0424b9bae..e307810ff241fc 100644 --- a/dom/security/featurepolicy/test/gtest/moz.build +++ b/dom/security/featurepolicy/test/gtest/moz.build @@ -5,10 +5,9 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES = [ - 'TestFeaturePolicyParser.cpp', + "TestFeaturePolicyParser.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') - -FINAL_LIBRARY = 'xul-gtest' +include("/ipc/chromium/chromium-config.mozbuild") +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/security/fuzztest/moz.build b/dom/security/fuzztest/moz.build index 400f8d032efbb3..3a1f3f4396e39e 100644 --- a/dom/security/fuzztest/moz.build +++ b/dom/security/fuzztest/moz.build @@ -4,18 +4,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingDOMSecurity') +Library("FuzzingDOMSecurity") LOCAL_INCLUDES += [ - '/dom/security', - '/netwerk/base', + "/dom/security", + "/netwerk/base", ] -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -SOURCES += [ - 'csp_fuzzer.cpp' -] - -FINAL_LIBRARY = 'xul-gtest' +SOURCES += ["csp_fuzzer.cpp"] +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/security/moz.build b/dom/security/moz.build index fc73cbf48591e4..0f2aed6a1fd565 100644 --- a/dom/security/moz.build +++ b/dom/security/moz.build @@ -4,81 +4,79 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('*'): - BUG_COMPONENT = ('Core', 'DOM: Security') +with Files("*"): + BUG_COMPONENT = ("Core", "DOM: Security") -TEST_DIRS += ['test'] +TEST_DIRS += ["test"] -DIRS += [ 'featurepolicy', 'sanitizer' ] +DIRS += ["featurepolicy", "sanitizer"] EXPORTS.mozilla.dom += [ - 'CSPEvalChecker.h', - 'DOMSecurityMonitor.h', - 'FramingChecker.h', - 'nsContentSecurityManager.h', - 'nsContentSecurityUtils.h', - 'nsCSPContext.h', - 'nsCSPService.h', - 'nsCSPUtils.h', - 'nsHTTPSOnlyStreamListener.h', - 'nsHTTPSOnlyUtils.h', - 'nsMixedContentBlocker.h', - 'PolicyTokenizer.h', - 'ReferrerInfo.h', - 'SecFetch.h', - 'SRICheck.h', - 'SRILogHelper.h', - 'SRIMetadata.h', + "CSPEvalChecker.h", + "DOMSecurityMonitor.h", + "FramingChecker.h", + "nsContentSecurityManager.h", + "nsContentSecurityUtils.h", + "nsCSPContext.h", + "nsCSPService.h", + "nsCSPUtils.h", + "nsHTTPSOnlyStreamListener.h", + "nsHTTPSOnlyUtils.h", + "nsMixedContentBlocker.h", + "PolicyTokenizer.h", + "ReferrerInfo.h", + "SecFetch.h", + "SRICheck.h", + "SRILogHelper.h", + "SRIMetadata.h", ] EXPORTS += [ - 'nsContentSecurityManager.h', - 'nsContentSecurityUtils.h', - 'nsMixedContentBlocker.h', - 'ReferrerInfo.h', + "nsContentSecurityManager.h", + "nsContentSecurityUtils.h", + "nsMixedContentBlocker.h", + "ReferrerInfo.h", ] UNIFIED_SOURCES += [ - 'CSPEvalChecker.cpp', - 'DOMSecurityMonitor.cpp', - 'FramingChecker.cpp', - 'nsContentSecurityManager.cpp', - 'nsContentSecurityUtils.cpp', - 'nsCSPContext.cpp', - 'nsCSPParser.cpp', - 'nsCSPService.cpp', - 'nsCSPUtils.cpp', - 'nsHTTPSOnlyStreamListener.cpp', - 'nsHTTPSOnlyUtils.cpp', - 'nsMixedContentBlocker.cpp', - 'PolicyTokenizer.cpp', - 'ReferrerInfo.cpp', - 'SecFetch.cpp', - 'SRICheck.cpp', - 'SRIMetadata.cpp', + "CSPEvalChecker.cpp", + "DOMSecurityMonitor.cpp", + "FramingChecker.cpp", + "nsContentSecurityManager.cpp", + "nsContentSecurityUtils.cpp", + "nsCSPContext.cpp", + "nsCSPParser.cpp", + "nsCSPService.cpp", + "nsCSPUtils.cpp", + "nsHTTPSOnlyStreamListener.cpp", + "nsHTTPSOnlyUtils.cpp", + "nsMixedContentBlocker.cpp", + "PolicyTokenizer.cpp", + "ReferrerInfo.cpp", + "SecFetch.cpp", + "SRICheck.cpp", + "SRIMetadata.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/caps', - '/docshell/base', # for nsDocShell.h - '/netwerk/base', - '/netwerk/protocol/data', # for nsDataHandler.h - '/netwerk/protocol/http', # for HttpBaseChannel.h + "/caps", + "/docshell/base", # for nsDocShell.h + "/netwerk/base", + "/netwerk/protocol/data", # for nsDataHandler.h + "/netwerk/protocol/http", # for HttpBaseChannel.h ] -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -if CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'fuzztest' - ] +if CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["fuzztest"] XPIDL_SOURCES += [ - 'nsIHttpsOnlyModePermission.idl', + "nsIHttpsOnlyModePermission.idl", ] -XPIDL_MODULE = 'dom_security' +XPIDL_MODULE = "dom_security" diff --git a/dom/security/sanitizer/moz.build b/dom/security/sanitizer/moz.build index ba9df4d375a325..4baec486c5e159 100644 --- a/dom/security/sanitizer/moz.build +++ b/dom/security/sanitizer/moz.build @@ -5,33 +5,33 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. with Files("**"): - BUG_COMPONENT = ('Core', 'DOM: Security') + BUG_COMPONENT = ("Core", "DOM: Security") -#TEST_DIRS += [ 'tests' ] +# TEST_DIRS += [ 'tests' ] -MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] EXPORTS.mozilla.dom += [ - 'Sanitizer.h', + "Sanitizer.h", ] UNIFIED_SOURCES += [ - 'Sanitizer.cpp', + "Sanitizer.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/bindings', - '/dom/html', + "/dom/base", + "/dom/bindings", + "/dom/html", ] -#include('/ipc/chromium/chromium-config.mozbuild') -#include('/tools/fuzzing/libfuzzer-config.mozbuild') +# include('/ipc/chromium/chromium-config.mozbuild') +# include('/tools/fuzzing/libfuzzer-config.mozbuild') -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -#if CONFIG['FUZZING_INTERFACES']: +# if CONFIG['FUZZING_INTERFACES']: # TEST_DIRS += [ # 'fuzztest' # ] diff --git a/dom/security/test/csp/file_upgrade_insecure_wsh.py b/dom/security/test/csp/file_upgrade_insecure_wsh.py index f00794e0bed39b..b7159c742b81e7 100644 --- a/dom/security/test/csp/file_upgrade_insecure_wsh.py +++ b/dom/security/test/csp/file_upgrade_insecure_wsh.py @@ -1,5 +1,3 @@ - - def web_socket_do_extra_handshake(request): pass diff --git a/dom/security/test/csp/file_websocket_self_wsh.py b/dom/security/test/csp/file_websocket_self_wsh.py index f761e39af68bc8..eb45e224f34ef7 100644 --- a/dom/security/test/csp/file_websocket_self_wsh.py +++ b/dom/security/test/csp/file_websocket_self_wsh.py @@ -1,5 +1,3 @@ - - def web_socket_do_extra_handshake(request): pass diff --git a/dom/security/test/general/file_sec_fetch_websocket_wsh.py b/dom/security/test/general/file_sec_fetch_websocket_wsh.py index 303add7c50967d..b7159c742b81e7 100644 --- a/dom/security/test/general/file_sec_fetch_websocket_wsh.py +++ b/dom/security/test/general/file_sec_fetch_websocket_wsh.py @@ -1,4 +1,3 @@ - def web_socket_do_extra_handshake(request): pass diff --git a/dom/security/test/gtest/moz.build b/dom/security/test/gtest/moz.build index a9c48ca7cc5d41..6df6679dbe790b 100644 --- a/dom/security/test/gtest/moz.build +++ b/dom/security/test/gtest/moz.build @@ -5,20 +5,20 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestCSPParser.cpp', - 'TestFilenameEvalParser.cpp', - 'TestSecureContext.cpp', + "TestCSPParser.cpp", + "TestFilenameEvalParser.cpp", + "TestSecureContext.cpp", ] -if CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'TestUnexpectedPrivilegedLoads.cpp', + "TestUnexpectedPrivilegedLoads.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" LOCAL_INCLUDES += [ - '/caps', - '/toolkit/components/telemetry/', - '/toolkit/components/telemetry/tests/gtest', + "/caps", + "/toolkit/components/telemetry/", + "/toolkit/components/telemetry/tests/gtest", ] diff --git a/dom/security/test/https-only/file_upgrade_insecure_wsh.py b/dom/security/test/https-only/file_upgrade_insecure_wsh.py index f00794e0bed39b..b7159c742b81e7 100644 --- a/dom/security/test/https-only/file_upgrade_insecure_wsh.py +++ b/dom/security/test/https-only/file_upgrade_insecure_wsh.py @@ -1,5 +1,3 @@ - - def web_socket_do_extra_handshake(request): pass diff --git a/dom/security/test/mixedcontentblocker/file_main_bug803225_websocket_wsh.py b/dom/security/test/mixedcontentblocker/file_main_bug803225_websocket_wsh.py index f00794e0bed39b..b7159c742b81e7 100644 --- a/dom/security/test/mixedcontentblocker/file_main_bug803225_websocket_wsh.py +++ b/dom/security/test/mixedcontentblocker/file_main_bug803225_websocket_wsh.py @@ -1,5 +1,3 @@ - - def web_socket_do_extra_handshake(request): pass diff --git a/dom/security/test/moz.build b/dom/security/test/moz.build index f412486308ae85..59c168f0471141 100644 --- a/dom/security/test/moz.build +++ b/dom/security/test/moz.build @@ -4,35 +4,35 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('cors/**'): - BUG_COMPONENT = ('Core', 'Networking') +with Files("cors/**"): + BUG_COMPONENT = ("Core", "Networking") XPCSHELL_TESTS_MANIFESTS += [ - 'unit/xpcshell.ini', + "unit/xpcshell.ini", ] TEST_DIRS += [ - 'gtest', + "gtest", ] MOCHITEST_MANIFESTS += [ - 'cors/mochitest.ini', - 'csp/mochitest.ini', - 'general/mochitest.ini', - 'https-only/mochitest.ini', - 'mixedcontentblocker/mochitest.ini', - 'referrer-policy/mochitest.ini', - 'sri/mochitest.ini', + "cors/mochitest.ini", + "csp/mochitest.ini", + "general/mochitest.ini", + "https-only/mochitest.ini", + "mixedcontentblocker/mochitest.ini", + "referrer-policy/mochitest.ini", + "sri/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'general/chrome.ini', + "general/chrome.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'cors/browser.ini', - 'csp/browser.ini', - 'general/browser.ini', - 'https-only/browser.ini', - 'mixedcontentblocker/browser.ini' + "cors/browser.ini", + "csp/browser.ini", + "general/browser.ini", + "https-only/browser.ini", + "mixedcontentblocker/browser.ini", ] diff --git a/dom/serviceworkers/moz.build b/dom/serviceworkers/moz.build index d293603e40581f..7f7fac7ba7249d 100644 --- a/dom/serviceworkers/moz.build +++ b/dom/serviceworkers/moz.build @@ -9,124 +9,124 @@ with Files("**"): # Public stuff. EXPORTS.mozilla.dom += [ - 'FetchEventOpChild.h', - 'FetchEventOpParent.h', - 'FetchEventOpProxyChild.h', - 'FetchEventOpProxyParent.h', - 'ServiceWorker.h', - 'ServiceWorkerActors.h', - 'ServiceWorkerChild.h', - 'ServiceWorkerCloneData.h', - 'ServiceWorkerContainer.h', - 'ServiceWorkerContainerChild.h', - 'ServiceWorkerContainerParent.h', - 'ServiceWorkerDescriptor.h', - 'ServiceWorkerEvents.h', - 'ServiceWorkerInfo.h', - 'ServiceWorkerInterceptController.h', - 'ServiceWorkerIPCUtils.h', - 'ServiceWorkerManager.h', - 'ServiceWorkerManagerChild.h', - 'ServiceWorkerManagerParent.h', - 'ServiceWorkerOp.h', - 'ServiceWorkerOpPromise.h', - 'ServiceWorkerParent.h', - 'ServiceWorkerRegistrar.h', - 'ServiceWorkerRegistration.h', - 'ServiceWorkerRegistrationChild.h', - 'ServiceWorkerRegistrationDescriptor.h', - 'ServiceWorkerRegistrationInfo.h', - 'ServiceWorkerRegistrationParent.h', - 'ServiceWorkerShutdownState.h', - 'ServiceWorkerUtils.h', + "FetchEventOpChild.h", + "FetchEventOpParent.h", + "FetchEventOpProxyChild.h", + "FetchEventOpProxyParent.h", + "ServiceWorker.h", + "ServiceWorkerActors.h", + "ServiceWorkerChild.h", + "ServiceWorkerCloneData.h", + "ServiceWorkerContainer.h", + "ServiceWorkerContainerChild.h", + "ServiceWorkerContainerParent.h", + "ServiceWorkerDescriptor.h", + "ServiceWorkerEvents.h", + "ServiceWorkerInfo.h", + "ServiceWorkerInterceptController.h", + "ServiceWorkerIPCUtils.h", + "ServiceWorkerManager.h", + "ServiceWorkerManagerChild.h", + "ServiceWorkerManagerParent.h", + "ServiceWorkerOp.h", + "ServiceWorkerOpPromise.h", + "ServiceWorkerParent.h", + "ServiceWorkerRegistrar.h", + "ServiceWorkerRegistration.h", + "ServiceWorkerRegistrationChild.h", + "ServiceWorkerRegistrationDescriptor.h", + "ServiceWorkerRegistrationInfo.h", + "ServiceWorkerRegistrationParent.h", + "ServiceWorkerShutdownState.h", + "ServiceWorkerUtils.h", ] UNIFIED_SOURCES += [ - 'FetchEventOpChild.cpp', - 'FetchEventOpParent.cpp', - 'FetchEventOpProxyChild.cpp', - 'FetchEventOpProxyParent.cpp', - 'RemoteServiceWorkerContainerImpl.cpp', - 'RemoteServiceWorkerImpl.cpp', - 'RemoteServiceWorkerRegistrationImpl.cpp', - 'ServiceWorker.cpp', - 'ServiceWorkerActors.cpp', - 'ServiceWorkerChild.cpp', - 'ServiceWorkerCloneData.cpp', - 'ServiceWorkerContainer.cpp', - 'ServiceWorkerContainerChild.cpp', - 'ServiceWorkerContainerImpl.cpp', - 'ServiceWorkerContainerParent.cpp', - 'ServiceWorkerContainerProxy.cpp', - 'ServiceWorkerDescriptor.cpp', - 'ServiceWorkerEvents.cpp', - 'ServiceWorkerImpl.cpp', - 'ServiceWorkerInfo.cpp', - 'ServiceWorkerInterceptController.cpp', - 'ServiceWorkerJob.cpp', - 'ServiceWorkerJobQueue.cpp', - 'ServiceWorkerManager.cpp', - 'ServiceWorkerManagerChild.cpp', - 'ServiceWorkerManagerParent.cpp', - 'ServiceWorkerManagerService.cpp', - 'ServiceWorkerOp.cpp', - 'ServiceWorkerParent.cpp', - 'ServiceWorkerPrivate.cpp', - 'ServiceWorkerPrivateImpl.cpp', - 'ServiceWorkerProxy.cpp', - 'ServiceWorkerRegisterJob.cpp', - 'ServiceWorkerRegistrar.cpp', - 'ServiceWorkerRegistration.cpp', - 'ServiceWorkerRegistrationChild.cpp', - 'ServiceWorkerRegistrationDescriptor.cpp', - 'ServiceWorkerRegistrationImpl.cpp', - 'ServiceWorkerRegistrationInfo.cpp', - 'ServiceWorkerRegistrationParent.cpp', - 'ServiceWorkerRegistrationProxy.cpp', - 'ServiceWorkerScriptCache.cpp', - 'ServiceWorkerShutdownBlocker.cpp', - 'ServiceWorkerShutdownState.cpp', - 'ServiceWorkerUnregisterCallback.cpp', - 'ServiceWorkerUnregisterJob.cpp', - 'ServiceWorkerUpdateJob.cpp', - 'ServiceWorkerUpdaterChild.cpp', - 'ServiceWorkerUpdaterParent.cpp', - 'ServiceWorkerUtils.cpp', + "FetchEventOpChild.cpp", + "FetchEventOpParent.cpp", + "FetchEventOpProxyChild.cpp", + "FetchEventOpProxyParent.cpp", + "RemoteServiceWorkerContainerImpl.cpp", + "RemoteServiceWorkerImpl.cpp", + "RemoteServiceWorkerRegistrationImpl.cpp", + "ServiceWorker.cpp", + "ServiceWorkerActors.cpp", + "ServiceWorkerChild.cpp", + "ServiceWorkerCloneData.cpp", + "ServiceWorkerContainer.cpp", + "ServiceWorkerContainerChild.cpp", + "ServiceWorkerContainerImpl.cpp", + "ServiceWorkerContainerParent.cpp", + "ServiceWorkerContainerProxy.cpp", + "ServiceWorkerDescriptor.cpp", + "ServiceWorkerEvents.cpp", + "ServiceWorkerImpl.cpp", + "ServiceWorkerInfo.cpp", + "ServiceWorkerInterceptController.cpp", + "ServiceWorkerJob.cpp", + "ServiceWorkerJobQueue.cpp", + "ServiceWorkerManager.cpp", + "ServiceWorkerManagerChild.cpp", + "ServiceWorkerManagerParent.cpp", + "ServiceWorkerManagerService.cpp", + "ServiceWorkerOp.cpp", + "ServiceWorkerParent.cpp", + "ServiceWorkerPrivate.cpp", + "ServiceWorkerPrivateImpl.cpp", + "ServiceWorkerProxy.cpp", + "ServiceWorkerRegisterJob.cpp", + "ServiceWorkerRegistrar.cpp", + "ServiceWorkerRegistration.cpp", + "ServiceWorkerRegistrationChild.cpp", + "ServiceWorkerRegistrationDescriptor.cpp", + "ServiceWorkerRegistrationImpl.cpp", + "ServiceWorkerRegistrationInfo.cpp", + "ServiceWorkerRegistrationParent.cpp", + "ServiceWorkerRegistrationProxy.cpp", + "ServiceWorkerScriptCache.cpp", + "ServiceWorkerShutdownBlocker.cpp", + "ServiceWorkerShutdownState.cpp", + "ServiceWorkerUnregisterCallback.cpp", + "ServiceWorkerUnregisterJob.cpp", + "ServiceWorkerUpdateJob.cpp", + "ServiceWorkerUpdaterChild.cpp", + "ServiceWorkerUpdaterParent.cpp", + "ServiceWorkerUtils.cpp", ] IPDL_SOURCES += [ - 'IPCServiceWorkerDescriptor.ipdlh', - 'IPCServiceWorkerRegistrationDescriptor.ipdlh', - 'PFetchEventOp.ipdl', - 'PFetchEventOpProxy.ipdl', - 'PServiceWorker.ipdl', - 'PServiceWorkerContainer.ipdl', - 'PServiceWorkerManager.ipdl', - 'PServiceWorkerRegistration.ipdl', - 'PServiceWorkerUpdater.ipdl', - 'ServiceWorkerOpArgs.ipdlh', - 'ServiceWorkerRegistrarTypes.ipdlh', + "IPCServiceWorkerDescriptor.ipdlh", + "IPCServiceWorkerRegistrationDescriptor.ipdlh", + "PFetchEventOp.ipdl", + "PFetchEventOpProxy.ipdl", + "PServiceWorker.ipdl", + "PServiceWorkerContainer.ipdl", + "PServiceWorkerManager.ipdl", + "PServiceWorkerRegistration.ipdl", + "PServiceWorkerUpdater.ipdl", + "ServiceWorkerOpArgs.ipdlh", + "ServiceWorkerRegistrarTypes.ipdlh", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/js/xpconnect/loader', + "/js/xpconnect/loader", ] MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', + "test/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'test/chrome.ini', + "test/chrome.ini", ] BROWSER_CHROME_MANIFESTS += [ - 'test/browser.ini', - 'test/isolated/multi-e10s-update/browser.ini', + "test/browser.ini", + "test/isolated/multi-e10s-update/browser.ini", ] -TEST_DIRS += ['test/gtest'] +TEST_DIRS += ["test/gtest"] diff --git a/dom/serviceworkers/test/gtest/moz.build b/dom/serviceworkers/test/gtest/moz.build index 5f1f185a95fe81..99e29453326c04 100644 --- a/dom/serviceworkers/test/gtest/moz.build +++ b/dom/serviceworkers/test/gtest/moz.build @@ -5,9 +5,9 @@ # file, you can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES = [ - 'TestReadWrite.cpp', + "TestReadWrite.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/dom/simpledb/moz.build b/dom/simpledb/moz.build index ad40fa718f03d1..16db56af065545 100644 --- a/dom/simpledb/moz.build +++ b/dom/simpledb/moz.build @@ -5,39 +5,39 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsISDBCallbacks.idl', - 'nsISDBConnection.idl', - 'nsISDBRequest.idl', - 'nsISDBResults.idl', + "nsISDBCallbacks.idl", + "nsISDBConnection.idl", + "nsISDBRequest.idl", + "nsISDBResults.idl", ] -XPIDL_MODULE = 'dom_simpledb' +XPIDL_MODULE = "dom_simpledb" EXPORTS.mozilla.dom.simpledb += [ - 'ActorsParent.h', + "ActorsParent.h", ] EXPORTS.mozilla.dom += [ - 'SDBConnection.h', + "SDBConnection.h", ] UNIFIED_SOURCES += [ - 'ActorsChild.cpp', - 'ActorsParent.cpp', - 'SDBConnection.cpp', - 'SDBRequest.cpp', - 'SDBResults.cpp', - 'SimpleDBCommon.cpp', + "ActorsChild.cpp", + "ActorsParent.cpp", + "SDBConnection.cpp", + "SDBRequest.cpp", + "SDBResults.cpp", + "SimpleDBCommon.cpp", ] IPDL_SOURCES += [ - 'PBackgroundSDBConnection.ipdl', - 'PBackgroundSDBRequest.ipdl', + "PBackgroundSDBConnection.ipdl", + "PBackgroundSDBRequest.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['GNU_CXX']: - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["GNU_CXX"]: + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/smil/moz.build b/dom/smil/moz.build index 01444e0e2a1f77..c6c3bd7351caba 100644 --- a/dom/smil/moz.build +++ b/dom/smil/moz.build @@ -7,68 +7,68 @@ with Files("**"): BUG_COMPONENT = ("Core", "SVG") -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] EXPORTS.mozilla += [ - 'SMILAnimationController.h', - 'SMILAnimationFunction.h', - 'SMILAttr.h', - 'SMILCompositorTable.h', - 'SMILCSSValueType.h', - 'SMILInstanceTime.h', - 'SMILInterval.h', - 'SMILKeySpline.h', - 'SMILMilestone.h', - 'SMILNullType.h', - 'SMILParserUtils.h', - 'SMILRepeatCount.h', - 'SMILSetAnimationFunction.h', - 'SMILTargetIdentifier.h', - 'SMILTimeContainer.h', - 'SMILTimedElement.h', - 'SMILTimeValue.h', - 'SMILTimeValueSpec.h', - 'SMILTimeValueSpecParams.h', - 'SMILType.h', - 'SMILTypes.h', - 'SMILValue.h', + "SMILAnimationController.h", + "SMILAnimationFunction.h", + "SMILAttr.h", + "SMILCompositorTable.h", + "SMILCSSValueType.h", + "SMILInstanceTime.h", + "SMILInterval.h", + "SMILKeySpline.h", + "SMILMilestone.h", + "SMILNullType.h", + "SMILParserUtils.h", + "SMILRepeatCount.h", + "SMILSetAnimationFunction.h", + "SMILTargetIdentifier.h", + "SMILTimeContainer.h", + "SMILTimedElement.h", + "SMILTimeValue.h", + "SMILTimeValueSpec.h", + "SMILTimeValueSpecParams.h", + "SMILType.h", + "SMILTypes.h", + "SMILValue.h", ] EXPORTS.mozilla.dom += [ - 'TimeEvent.h', + "TimeEvent.h", ] UNIFIED_SOURCES += [ - 'SMILAnimationController.cpp', - 'SMILAnimationFunction.cpp', - 'SMILBoolType.cpp', - 'SMILCompositor.cpp', - 'SMILCSSProperty.cpp', - 'SMILCSSValueType.cpp', - 'SMILEnumType.cpp', - 'SMILFloatType.cpp', - 'SMILInstanceTime.cpp', - 'SMILIntegerType.cpp', - 'SMILInterval.cpp', - 'SMILKeySpline.cpp', - 'SMILNullType.cpp', - 'SMILParserUtils.cpp', - 'SMILRepeatCount.cpp', - 'SMILSetAnimationFunction.cpp', - 'SMILStringType.cpp', - 'SMILTimeContainer.cpp', - 'SMILTimedElement.cpp', - 'SMILTimeValue.cpp', - 'SMILTimeValueSpec.cpp', - 'SMILValue.cpp', - 'TimeEvent.cpp', + "SMILAnimationController.cpp", + "SMILAnimationFunction.cpp", + "SMILBoolType.cpp", + "SMILCompositor.cpp", + "SMILCSSProperty.cpp", + "SMILCSSValueType.cpp", + "SMILEnumType.cpp", + "SMILFloatType.cpp", + "SMILInstanceTime.cpp", + "SMILIntegerType.cpp", + "SMILInterval.cpp", + "SMILKeySpline.cpp", + "SMILNullType.cpp", + "SMILParserUtils.cpp", + "SMILRepeatCount.cpp", + "SMILSetAnimationFunction.cpp", + "SMILStringType.cpp", + "SMILTimeContainer.cpp", + "SMILTimedElement.cpp", + "SMILTimeValue.cpp", + "SMILTimeValueSpec.cpp", + "SMILValue.cpp", + "TimeEvent.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/svg', - '/layout/base', - '/layout/style', + "/dom/base", + "/dom/svg", + "/layout/base", + "/layout/style", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/storage/moz.build b/dom/storage/moz.build index fc9028654c3706..2b9aef50438542 100644 --- a/dom/storage/moz.build +++ b/dom/storage/moz.build @@ -8,52 +8,52 @@ with Files("**"): BUG_COMPONENT = ("Core", "Storage: localStorage & sessionStorage") EXPORTS.mozilla.dom += [ - 'LocalStorage.h', - 'LocalStorageManager.h', - 'PartitionedLocalStorage.h', - 'SessionStorageManager.h', - 'Storage.h', - 'StorageActivityService.h', - 'StorageDBUpdater.h', - 'StorageIPC.h', - 'StorageNotifierService.h', - 'StorageObserver.h', - 'StorageUtils.h', + "LocalStorage.h", + "LocalStorageManager.h", + "PartitionedLocalStorage.h", + "SessionStorageManager.h", + "Storage.h", + "StorageActivityService.h", + "StorageDBUpdater.h", + "StorageIPC.h", + "StorageNotifierService.h", + "StorageObserver.h", + "StorageUtils.h", ] UNIFIED_SOURCES += [ - 'LocalStorage.cpp', - 'LocalStorageCache.cpp', - 'LocalStorageManager.cpp', - 'PartitionedLocalStorage.cpp', - 'SessionStorage.cpp', - 'SessionStorageCache.cpp', - 'SessionStorageManager.cpp', - 'SessionStorageObserver.cpp', - 'Storage.cpp', - 'StorageActivityService.cpp', - 'StorageDBThread.cpp', - 'StorageDBUpdater.cpp', - 'StorageIPC.cpp', - 'StorageNotifierService.cpp', - 'StorageObserver.cpp', - 'StorageUtils.cpp', + "LocalStorage.cpp", + "LocalStorageCache.cpp", + "LocalStorageManager.cpp", + "PartitionedLocalStorage.cpp", + "SessionStorage.cpp", + "SessionStorageCache.cpp", + "SessionStorageManager.cpp", + "SessionStorageObserver.cpp", + "Storage.cpp", + "StorageActivityService.cpp", + "StorageDBThread.cpp", + "StorageDBUpdater.cpp", + "StorageIPC.cpp", + "StorageNotifierService.cpp", + "StorageObserver.cpp", + "StorageUtils.cpp", ] IPDL_SOURCES += [ - 'PBackgroundLocalStorageCache.ipdl', - 'PBackgroundSessionStorageCache.ipdl', - 'PBackgroundSessionStorageManager.ipdl', - 'PBackgroundStorage.ipdl', - 'PSessionStorageObserver.ipdl', + "PBackgroundLocalStorageCache.ipdl", + "PBackgroundSessionStorageCache.ipdl", + "PBackgroundSessionStorageManager.ipdl", + "PBackgroundStorage.ipdl", + "PSessionStorageObserver.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -if CONFIG['ENABLE_TESTS']: - DEFINES['DOM_STORAGE_TESTS'] = True +if CONFIG["ENABLE_TESTS"]: + DEFINES["DOM_STORAGE_TESTS"] = True diff --git a/dom/svg/moz.build b/dom/svg/moz.build index 5e56d78e56c32d..f94ffb6cdc91e5 100644 --- a/dom/svg/moz.build +++ b/dom/svg/moz.build @@ -7,260 +7,260 @@ with Files("**"): BUG_COMPONENT = ("Core", "SVG") -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] EXPORTS.mozilla += [ - 'SVGAnimatedClass.h', - 'SVGAnimatedClassOrString.h', - 'SVGAttrValueWrapper.h', - 'SVGContentUtils.h', - 'SVGFragmentIdentifier.h', - 'SVGPreserveAspectRatio.h', - 'SVGStringList.h', - 'SVGTagList.h', + "SVGAnimatedClass.h", + "SVGAnimatedClassOrString.h", + "SVGAttrValueWrapper.h", + "SVGContentUtils.h", + "SVGFragmentIdentifier.h", + "SVGPreserveAspectRatio.h", + "SVGStringList.h", + "SVGTagList.h", ] EXPORTS.mozilla.dom += [ - 'SVGAElement.h', - 'SVGAnimatedRect.h', - 'SVGAnimateElement.h', - 'SVGAnimateMotionElement.h', - 'SVGAnimateTransformElement.h', - 'SVGAnimationElement.h', - 'SVGCircleElement.h', - 'SVGClipPathElement.h', - 'SVGComponentTransferFunctionElement.h', - 'SVGDefsElement.h', - 'SVGDescElement.h', - 'SVGDocument.h', - 'SVGElement.h', - 'SVGElementFactory.h', - 'SVGEllipseElement.h', - 'SVGFEBlendElement.h', - 'SVGFEColorMatrixElement.h', - 'SVGFEComponentTransferElement.h', - 'SVGFECompositeElement.h', - 'SVGFEConvolveMatrixElement.h', - 'SVGFEDiffuseLightingElement.h', - 'SVGFEDisplacementMapElement.h', - 'SVGFEDistantLightElement.h', - 'SVGFEDropShadowElement.h', - 'SVGFEFloodElement.h', - 'SVGFEGaussianBlurElement.h', - 'SVGFEImageElement.h', - 'SVGFEMergeElement.h', - 'SVGFEMergeNodeElement.h', - 'SVGFEMorphologyElement.h', - 'SVGFEOffsetElement.h', - 'SVGFEPointLightElement.h', - 'SVGFESpecularLightingElement.h', - 'SVGFESpotLightElement.h', - 'SVGFETileElement.h', - 'SVGFETurbulenceElement.h', - 'SVGFilterElement.h', - 'SVGFilters.h', - 'SVGForeignObjectElement.h', - 'SVGGElement.h', - 'SVGGeometryElement.h', - 'SVGGradientElement.h', - 'SVGGraphicsElement.h', - 'SVGImageElement.h', - 'SVGLineElement.h', - 'SVGMarkerElement.h', - 'SVGMaskElement.h', - 'SVGMatrix.h', - 'SVGMetadataElement.h', - 'SVGMPathElement.h', - 'SVGPathData.h', - 'SVGPathElement.h', - 'SVGPatternElement.h', - 'SVGPolygonElement.h', - 'SVGPolylineElement.h', - 'SVGRect.h', - 'SVGRectElement.h', - 'SVGScriptElement.h', - 'SVGSetElement.h', - 'SVGStopElement.h', - 'SVGStyleElement.h', - 'SVGSVGElement.h', - 'SVGSwitchElement.h', - 'SVGSymbolElement.h', - 'SVGTests.h', - 'SVGTextContentElement.h', - 'SVGTextElement.h', - 'SVGTextPathElement.h', - 'SVGTextPositioningElement.h', - 'SVGTitleElement.h', - 'SVGTransformableElement.h', - 'SVGTSpanElement.h', - 'SVGUseElement.h', - 'SVGViewElement.h', - 'SVGViewportElement.h', + "SVGAElement.h", + "SVGAnimatedRect.h", + "SVGAnimateElement.h", + "SVGAnimateMotionElement.h", + "SVGAnimateTransformElement.h", + "SVGAnimationElement.h", + "SVGCircleElement.h", + "SVGClipPathElement.h", + "SVGComponentTransferFunctionElement.h", + "SVGDefsElement.h", + "SVGDescElement.h", + "SVGDocument.h", + "SVGElement.h", + "SVGElementFactory.h", + "SVGEllipseElement.h", + "SVGFEBlendElement.h", + "SVGFEColorMatrixElement.h", + "SVGFEComponentTransferElement.h", + "SVGFECompositeElement.h", + "SVGFEConvolveMatrixElement.h", + "SVGFEDiffuseLightingElement.h", + "SVGFEDisplacementMapElement.h", + "SVGFEDistantLightElement.h", + "SVGFEDropShadowElement.h", + "SVGFEFloodElement.h", + "SVGFEGaussianBlurElement.h", + "SVGFEImageElement.h", + "SVGFEMergeElement.h", + "SVGFEMergeNodeElement.h", + "SVGFEMorphologyElement.h", + "SVGFEOffsetElement.h", + "SVGFEPointLightElement.h", + "SVGFESpecularLightingElement.h", + "SVGFESpotLightElement.h", + "SVGFETileElement.h", + "SVGFETurbulenceElement.h", + "SVGFilterElement.h", + "SVGFilters.h", + "SVGForeignObjectElement.h", + "SVGGElement.h", + "SVGGeometryElement.h", + "SVGGradientElement.h", + "SVGGraphicsElement.h", + "SVGImageElement.h", + "SVGLineElement.h", + "SVGMarkerElement.h", + "SVGMaskElement.h", + "SVGMatrix.h", + "SVGMetadataElement.h", + "SVGMPathElement.h", + "SVGPathData.h", + "SVGPathElement.h", + "SVGPatternElement.h", + "SVGPolygonElement.h", + "SVGPolylineElement.h", + "SVGRect.h", + "SVGRectElement.h", + "SVGScriptElement.h", + "SVGSetElement.h", + "SVGStopElement.h", + "SVGStyleElement.h", + "SVGSVGElement.h", + "SVGSwitchElement.h", + "SVGSymbolElement.h", + "SVGTests.h", + "SVGTextContentElement.h", + "SVGTextElement.h", + "SVGTextPathElement.h", + "SVGTextPositioningElement.h", + "SVGTitleElement.h", + "SVGTransformableElement.h", + "SVGTSpanElement.h", + "SVGUseElement.h", + "SVGViewElement.h", + "SVGViewportElement.h", ] UNIFIED_SOURCES += [ - 'DOMSVGAngle.cpp', - 'DOMSVGAnimatedAngle.cpp', - 'DOMSVGAnimatedBoolean.cpp', - 'DOMSVGAnimatedEnumeration.cpp', - 'DOMSVGAnimatedInteger.cpp', - 'DOMSVGAnimatedLength.cpp', - 'DOMSVGAnimatedLengthList.cpp', - 'DOMSVGAnimatedNumber.cpp', - 'DOMSVGAnimatedNumberList.cpp', - 'DOMSVGAnimatedString.cpp', - 'DOMSVGAnimatedTransformList.cpp', - 'DOMSVGLength.cpp', - 'DOMSVGLengthList.cpp', - 'DOMSVGNumber.cpp', - 'DOMSVGNumberList.cpp', - 'DOMSVGPathSeg.cpp', - 'DOMSVGPathSegList.cpp', - 'DOMSVGPoint.cpp', - 'DOMSVGPointList.cpp', - 'DOMSVGStringList.cpp', - 'DOMSVGTransform.cpp', - 'DOMSVGTransformList.cpp', - 'SVGAElement.cpp', - 'SVGAnimatedBoolean.cpp', - 'SVGAnimatedClass.cpp', - 'SVGAnimatedClassOrString.cpp', - 'SVGAnimatedEnumeration.cpp', - 'SVGAnimatedInteger.cpp', - 'SVGAnimatedIntegerPair.cpp', - 'SVGAnimatedLength.cpp', - 'SVGAnimatedLengthList.cpp', - 'SVGAnimatedNumber.cpp', - 'SVGAnimatedNumberList.cpp', - 'SVGAnimatedNumberPair.cpp', - 'SVGAnimatedOrient.cpp', - 'SVGAnimatedPathSegList.cpp', - 'SVGAnimatedPointList.cpp', - 'SVGAnimatedPreserveAspectRatio.cpp', - 'SVGAnimatedRect.cpp', - 'SVGAnimatedString.cpp', - 'SVGAnimatedTransformList.cpp', - 'SVGAnimatedViewBox.cpp', - 'SVGAnimateElement.cpp', - 'SVGAnimateMotionElement.cpp', - 'SVGAnimateTransformElement.cpp', - 'SVGAnimationElement.cpp', - 'SVGAttrValueWrapper.cpp', - 'SVGCircleElement.cpp', - 'SVGClipPathElement.cpp', - 'SVGContentUtils.cpp', - 'SVGDataParser.cpp', - 'SVGDefsElement.cpp', - 'SVGDescElement.cpp', - 'SVGDocument.cpp', - 'SVGElement.cpp', - 'SVGElementFactory.cpp', - 'SVGEllipseElement.cpp', - 'SVGFEBlendElement.cpp', - 'SVGFEColorMatrixElement.cpp', - 'SVGFEComponentTransferElement.cpp', - 'SVGFECompositeElement.cpp', - 'SVGFEConvolveMatrixElement.cpp', - 'SVGFEDiffuseLightingElement.cpp', - 'SVGFEDisplacementMapElement.cpp', - 'SVGFEDistantLightElement.cpp', - 'SVGFEDropShadowElement.cpp', - 'SVGFEFloodElement.cpp', - 'SVGFEGaussianBlurElement.cpp', - 'SVGFEImageElement.cpp', - 'SVGFEMergeElement.cpp', - 'SVGFEMergeNodeElement.cpp', - 'SVGFEMorphologyElement.cpp', - 'SVGFEOffsetElement.cpp', - 'SVGFEPointLightElement.cpp', - 'SVGFESpecularLightingElement.cpp', - 'SVGFESpotLightElement.cpp', - 'SVGFETileElement.cpp', - 'SVGFETurbulenceElement.cpp', - 'SVGFilterElement.cpp', - 'SVGFilters.cpp', - 'SVGForeignObjectElement.cpp', - 'SVGFragmentIdentifier.cpp', - 'SVGGElement.cpp', - 'SVGGeometryElement.cpp', - 'SVGGeometryProperty.cpp', - 'SVGGradientElement.cpp', - 'SVGGraphicsElement.cpp', - 'SVGImageElement.cpp', - 'SVGIntegerPairSMILType.cpp', - 'SVGLength.cpp', - 'SVGLengthList.cpp', - 'SVGLengthListSMILType.cpp', - 'SVGLineElement.cpp', - 'SVGMarkerElement.cpp', - 'SVGMaskElement.cpp', - 'SVGMatrix.cpp', - 'SVGMetadataElement.cpp', - 'SVGMotionSMILAnimationFunction.cpp', - 'SVGMotionSMILAttr.cpp', - 'SVGMotionSMILPathUtils.cpp', - 'SVGMotionSMILType.cpp', - 'SVGMPathElement.cpp', - 'SVGNumberList.cpp', - 'SVGNumberListSMILType.cpp', - 'SVGNumberPairSMILType.cpp', - 'SVGOrientSMILType.cpp', - 'SVGPathData.cpp', - 'SVGPathDataParser.cpp', - 'SVGPathElement.cpp', - 'SVGPathSegListSMILType.cpp', - 'SVGPathSegUtils.cpp', - 'SVGPatternElement.cpp', - 'SVGPointList.cpp', - 'SVGPointListSMILType.cpp', - 'SVGPolyElement.cpp', - 'SVGPolygonElement.cpp', - 'SVGPolylineElement.cpp', - 'SVGPreserveAspectRatio.cpp', - 'SVGRect.cpp', - 'SVGRectElement.cpp', - 'SVGScriptElement.cpp', - 'SVGSetElement.cpp', - 'SVGStopElement.cpp', - 'SVGStringList.cpp', - 'SVGStyleElement.cpp', - 'SVGSVGElement.cpp', - 'SVGSwitchElement.cpp', - 'SVGSymbolElement.cpp', - 'SVGTests.cpp', - 'SVGTextContentElement.cpp', - 'SVGTextElement.cpp', - 'SVGTextPathElement.cpp', - 'SVGTextPositioningElement.cpp', - 'SVGTitleElement.cpp', - 'SVGTransform.cpp', - 'SVGTransformableElement.cpp', - 'SVGTransformList.cpp', - 'SVGTransformListParser.cpp', - 'SVGTransformListSMILType.cpp', - 'SVGTSpanElement.cpp', - 'SVGUseElement.cpp', - 'SVGViewBoxSMILType.cpp', - 'SVGViewElement.cpp', - 'SVGViewportElement.cpp', + "DOMSVGAngle.cpp", + "DOMSVGAnimatedAngle.cpp", + "DOMSVGAnimatedBoolean.cpp", + "DOMSVGAnimatedEnumeration.cpp", + "DOMSVGAnimatedInteger.cpp", + "DOMSVGAnimatedLength.cpp", + "DOMSVGAnimatedLengthList.cpp", + "DOMSVGAnimatedNumber.cpp", + "DOMSVGAnimatedNumberList.cpp", + "DOMSVGAnimatedString.cpp", + "DOMSVGAnimatedTransformList.cpp", + "DOMSVGLength.cpp", + "DOMSVGLengthList.cpp", + "DOMSVGNumber.cpp", + "DOMSVGNumberList.cpp", + "DOMSVGPathSeg.cpp", + "DOMSVGPathSegList.cpp", + "DOMSVGPoint.cpp", + "DOMSVGPointList.cpp", + "DOMSVGStringList.cpp", + "DOMSVGTransform.cpp", + "DOMSVGTransformList.cpp", + "SVGAElement.cpp", + "SVGAnimatedBoolean.cpp", + "SVGAnimatedClass.cpp", + "SVGAnimatedClassOrString.cpp", + "SVGAnimatedEnumeration.cpp", + "SVGAnimatedInteger.cpp", + "SVGAnimatedIntegerPair.cpp", + "SVGAnimatedLength.cpp", + "SVGAnimatedLengthList.cpp", + "SVGAnimatedNumber.cpp", + "SVGAnimatedNumberList.cpp", + "SVGAnimatedNumberPair.cpp", + "SVGAnimatedOrient.cpp", + "SVGAnimatedPathSegList.cpp", + "SVGAnimatedPointList.cpp", + "SVGAnimatedPreserveAspectRatio.cpp", + "SVGAnimatedRect.cpp", + "SVGAnimatedString.cpp", + "SVGAnimatedTransformList.cpp", + "SVGAnimatedViewBox.cpp", + "SVGAnimateElement.cpp", + "SVGAnimateMotionElement.cpp", + "SVGAnimateTransformElement.cpp", + "SVGAnimationElement.cpp", + "SVGAttrValueWrapper.cpp", + "SVGCircleElement.cpp", + "SVGClipPathElement.cpp", + "SVGContentUtils.cpp", + "SVGDataParser.cpp", + "SVGDefsElement.cpp", + "SVGDescElement.cpp", + "SVGDocument.cpp", + "SVGElement.cpp", + "SVGElementFactory.cpp", + "SVGEllipseElement.cpp", + "SVGFEBlendElement.cpp", + "SVGFEColorMatrixElement.cpp", + "SVGFEComponentTransferElement.cpp", + "SVGFECompositeElement.cpp", + "SVGFEConvolveMatrixElement.cpp", + "SVGFEDiffuseLightingElement.cpp", + "SVGFEDisplacementMapElement.cpp", + "SVGFEDistantLightElement.cpp", + "SVGFEDropShadowElement.cpp", + "SVGFEFloodElement.cpp", + "SVGFEGaussianBlurElement.cpp", + "SVGFEImageElement.cpp", + "SVGFEMergeElement.cpp", + "SVGFEMergeNodeElement.cpp", + "SVGFEMorphologyElement.cpp", + "SVGFEOffsetElement.cpp", + "SVGFEPointLightElement.cpp", + "SVGFESpecularLightingElement.cpp", + "SVGFESpotLightElement.cpp", + "SVGFETileElement.cpp", + "SVGFETurbulenceElement.cpp", + "SVGFilterElement.cpp", + "SVGFilters.cpp", + "SVGForeignObjectElement.cpp", + "SVGFragmentIdentifier.cpp", + "SVGGElement.cpp", + "SVGGeometryElement.cpp", + "SVGGeometryProperty.cpp", + "SVGGradientElement.cpp", + "SVGGraphicsElement.cpp", + "SVGImageElement.cpp", + "SVGIntegerPairSMILType.cpp", + "SVGLength.cpp", + "SVGLengthList.cpp", + "SVGLengthListSMILType.cpp", + "SVGLineElement.cpp", + "SVGMarkerElement.cpp", + "SVGMaskElement.cpp", + "SVGMatrix.cpp", + "SVGMetadataElement.cpp", + "SVGMotionSMILAnimationFunction.cpp", + "SVGMotionSMILAttr.cpp", + "SVGMotionSMILPathUtils.cpp", + "SVGMotionSMILType.cpp", + "SVGMPathElement.cpp", + "SVGNumberList.cpp", + "SVGNumberListSMILType.cpp", + "SVGNumberPairSMILType.cpp", + "SVGOrientSMILType.cpp", + "SVGPathData.cpp", + "SVGPathDataParser.cpp", + "SVGPathElement.cpp", + "SVGPathSegListSMILType.cpp", + "SVGPathSegUtils.cpp", + "SVGPatternElement.cpp", + "SVGPointList.cpp", + "SVGPointListSMILType.cpp", + "SVGPolyElement.cpp", + "SVGPolygonElement.cpp", + "SVGPolylineElement.cpp", + "SVGPreserveAspectRatio.cpp", + "SVGRect.cpp", + "SVGRectElement.cpp", + "SVGScriptElement.cpp", + "SVGSetElement.cpp", + "SVGStopElement.cpp", + "SVGStringList.cpp", + "SVGStyleElement.cpp", + "SVGSVGElement.cpp", + "SVGSwitchElement.cpp", + "SVGSymbolElement.cpp", + "SVGTests.cpp", + "SVGTextContentElement.cpp", + "SVGTextElement.cpp", + "SVGTextPathElement.cpp", + "SVGTextPositioningElement.cpp", + "SVGTitleElement.cpp", + "SVGTransform.cpp", + "SVGTransformableElement.cpp", + "SVGTransformList.cpp", + "SVGTransformListParser.cpp", + "SVGTransformListSMILType.cpp", + "SVGTSpanElement.cpp", + "SVGUseElement.cpp", + "SVGViewBoxSMILType.cpp", + "SVGViewElement.cpp", + "SVGViewportElement.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom', - '/dom/base', - '/dom/html', - '/dom/smil', - '/dom/svg', - '/dom/xml', - '/layout/base', - '/layout/generic', - '/layout/style', - '/layout/svg', - '/layout/xul', + "/dom", + "/dom/base", + "/dom/html", + "/dom/smil", + "/dom/svg", + "/dom/xml", + "/layout/base", + "/layout/generic", + "/layout/style", + "/layout/svg", + "/layout/xul", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/system/android/moz.build b/dom/system/android/moz.build index 53ac80f8993971..04dffba0248328 100644 --- a/dom/system/android/moz.build +++ b/dom/system/android/moz.build @@ -5,13 +5,13 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'AndroidLocationProvider.cpp', - 'nsHapticFeedback.cpp', + "AndroidLocationProvider.cpp", + "nsHapticFeedback.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/geolocation', + "/dom/geolocation", ] diff --git a/dom/system/linux/moz.build b/dom/system/linux/moz.build index d9e50cb311caa7..ab9d076deb16ec 100644 --- a/dom/system/linux/moz.build +++ b/dom/system/linux/moz.build @@ -4,17 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_GPSD']: - SOURCES += [ - 'GpsdLocationProvider.cpp' - ] +if CONFIG["MOZ_GPSD"]: + SOURCES += ["GpsdLocationProvider.cpp"] - CXXFLAGS += CONFIG['MOZ_GPSD_CFLAGS'] + CXXFLAGS += CONFIG["MOZ_GPSD_CFLAGS"] - OS_LIBS += CONFIG['MOZ_GPSD_LIBS'] + OS_LIBS += CONFIG["MOZ_GPSD_LIBS"] - LOCAL_INCLUDES += [ - '/dom/geolocation' - ] + LOCAL_INCLUDES += ["/dom/geolocation"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/system/mac/moz.build b/dom/system/mac/moz.build index 8fb85bbe309eab..6a10090793291d 100644 --- a/dom/system/mac/moz.build +++ b/dom/system/mac/moz.build @@ -5,18 +5,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'CoreLocationLocationProvider.mm', - 'nsOSPermissionRequest.mm', + "CoreLocationLocationProvider.mm", + "nsOSPermissionRequest.mm", ] EXPORTS += [ - 'nsOSPermissionRequest.h', + "nsOSPermissionRequest.h", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/geolocation', + "/dom/geolocation", ] - diff --git a/dom/system/moz.build b/dom/system/moz.build index f93cca0b6f48d7..7b26b668fab93d 100644 --- a/dom/system/moz.build +++ b/dom/system/moz.build @@ -41,67 +41,67 @@ with Files("tests/mochitest.ini"): with Files("tests/*1197901*"): BUG_COMPONENT = ("Core", "DOM: Device Interfaces") -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] - -if toolkit == 'windows': - DIRS += ['windows'] -elif toolkit == 'cocoa': - DIRS += ['mac'] -elif toolkit == 'android': - DIRS += ['android'] -elif toolkit == 'gtk': - DIRS += ['linux'] - -if toolkit != 'cocoa': +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] + +if toolkit == "windows": + DIRS += ["windows"] +elif toolkit == "cocoa": + DIRS += ["mac"] +elif toolkit == "android": + DIRS += ["android"] +elif toolkit == "gtk": + DIRS += ["linux"] + +if toolkit != "cocoa": EXPORTS += [ - 'nsOSPermissionRequest.h', + "nsOSPermissionRequest.h", ] XPIDL_SOURCES += [ - 'nsIOSFileConstantsService.idl', - 'nsIOSPermissionRequest.idl', + "nsIOSFileConstantsService.idl", + "nsIOSPermissionRequest.idl", ] -XPIDL_MODULE = 'dom_system' +XPIDL_MODULE = "dom_system" EXPORTS += [ - 'nsDeviceSensors.h', - 'nsOSPermissionRequestBase.h', + "nsDeviceSensors.h", + "nsOSPermissionRequestBase.h", ] EXPORTS.mozilla += [ - 'OSFileConstants.h', + "OSFileConstants.h", ] EXPORTS.mozilla.dom += [ - 'IOUtils.h', + "IOUtils.h", ] UNIFIED_SOURCES += [ - 'IOUtils.cpp', - 'nsDeviceSensors.cpp', - 'nsOSPermissionRequestBase.cpp', - 'OSFileConstants.cpp', + "IOUtils.cpp", + "nsDeviceSensors.cpp", + "nsOSPermissionRequestBase.cpp", + "OSFileConstants.cpp", ] EXTRA_JS_MODULES += [ - 'NetworkGeolocationProvider.jsm', + "NetworkGeolocationProvider.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # We fire the nsDOMDeviceAcceleration LOCAL_INCLUDES += [ - '/dom/base', - '/dom/bindings', - '/js/xpconnect/loader', - '/xpcom/base', + "/dom/base", + "/dom/bindings", + "/js/xpconnect/loader", + "/xpcom/base", ] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome.ini', 'tests/ioutils/chrome.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini", "tests/ioutils/chrome.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] diff --git a/dom/system/windows/moz.build b/dom/system/windows/moz.build index 54765299636b52..45131af39e92aa 100644 --- a/dom/system/windows/moz.build +++ b/dom/system/windows/moz.build @@ -4,13 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += [ - 'nsHapticFeedback.cpp', - 'WindowsLocationProvider.cpp' -] +SOURCES += ["nsHapticFeedback.cpp", "WindowsLocationProvider.cpp"] -LOCAL_INCLUDES += [ - '/dom/geolocation' -] +LOCAL_INCLUDES += ["/dom/geolocation"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/tests/moz.build b/dom/tests/moz.build index e8c3af7eca69d8..e89073cd7ee83c 100644 --- a/dom/tests/moz.build +++ b/dom/tests/moz.build @@ -149,88 +149,90 @@ with Files("unit/test_PromiseDebugging.js"): BUG_COMPONENT = ("Core", "XPConnect") MOCHITEST_MANIFESTS += [ - 'mochitest/ajax/jquery/mochitest.ini', - 'mochitest/ajax/mochikit/mochitest.ini', - 'mochitest/ajax/offline/mochitest.ini', - 'mochitest/ajax/prototype/mochitest.ini', - 'mochitest/ajax/scriptaculous/mochitest.ini', - 'mochitest/beacon/mochitest.ini', - 'mochitest/bugs/mochitest.ini', - 'mochitest/crypto/mochitest.ini', - 'mochitest/dom-level0/mochitest.ini', - 'mochitest/dom-level1-core/mochitest.ini', - 'mochitest/dom-level2-core/mochitest.ini', - 'mochitest/dom-level2-html/mochitest.ini', - 'mochitest/fetch/mochitest.ini', - 'mochitest/gamepad/mochitest.ini', - 'mochitest/general/mochitest.ini', - 'mochitest/geolocation/mochitest.ini', - 'mochitest/keyhandling/mochitest.ini', - 'mochitest/localstorage/mochitest.ini', - 'mochitest/orientation/mochitest.ini', - 'mochitest/pointerlock/mochitest.ini', - 'mochitest/script/mochitest.ini', - 'mochitest/sessionstorage/mochitest.ini', - 'mochitest/storageevent/mochitest.ini', - 'mochitest/webcomponents/mochitest.ini', - 'mochitest/whatwg/mochitest.ini', + "mochitest/ajax/jquery/mochitest.ini", + "mochitest/ajax/mochikit/mochitest.ini", + "mochitest/ajax/offline/mochitest.ini", + "mochitest/ajax/prototype/mochitest.ini", + "mochitest/ajax/scriptaculous/mochitest.ini", + "mochitest/beacon/mochitest.ini", + "mochitest/bugs/mochitest.ini", + "mochitest/crypto/mochitest.ini", + "mochitest/dom-level0/mochitest.ini", + "mochitest/dom-level1-core/mochitest.ini", + "mochitest/dom-level2-core/mochitest.ini", + "mochitest/dom-level2-html/mochitest.ini", + "mochitest/fetch/mochitest.ini", + "mochitest/gamepad/mochitest.ini", + "mochitest/general/mochitest.ini", + "mochitest/geolocation/mochitest.ini", + "mochitest/keyhandling/mochitest.ini", + "mochitest/localstorage/mochitest.ini", + "mochitest/orientation/mochitest.ini", + "mochitest/pointerlock/mochitest.ini", + "mochitest/script/mochitest.ini", + "mochitest/sessionstorage/mochitest.ini", + "mochitest/storageevent/mochitest.ini", + "mochitest/webcomponents/mochitest.ini", + "mochitest/whatwg/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'mochitest/beacon/chrome.ini', - 'mochitest/chrome/chrome.ini', - 'mochitest/general/chrome.ini', - 'mochitest/geolocation/chrome.ini', - 'mochitest/keyhandling/chrome.ini', - 'mochitest/localstorage/chrome.ini', - 'mochitest/sessionstorage/chrome.ini', - 'mochitest/webcomponents/chrome.ini', - 'mochitest/whatwg/chrome.ini', + "mochitest/beacon/chrome.ini", + "mochitest/chrome/chrome.ini", + "mochitest/general/chrome.ini", + "mochitest/geolocation/chrome.ini", + "mochitest/keyhandling/chrome.ini", + "mochitest/localstorage/chrome.ini", + "mochitest/sessionstorage/chrome.ini", + "mochitest/webcomponents/chrome.ini", + "mochitest/whatwg/chrome.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini'] -BROWSER_CHROME_MANIFESTS += ['browser/browser.ini', 'browser/perfmetrics/browser.ini', - 'mochitest/ajax/offline/browser.ini'] +XPCSHELL_TESTS_MANIFESTS += ["unit/xpcshell.ini"] +BROWSER_CHROME_MANIFESTS += [ + "browser/browser.ini", + "browser/perfmetrics/browser.ini", + "mochitest/ajax/offline/browser.ini", +] TEST_HARNESS_FILES.testing.mochitest.tests.dom.tests.mochitest.ajax.lib += [ - 'mochitest/ajax/lib/AJAX_setup.js', - 'mochitest/ajax/lib/test.css', + "mochitest/ajax/lib/AJAX_setup.js", + "mochitest/ajax/lib/test.css", ] TEST_HARNESS_FILES.testing.mochitest.tests.dom.tests.mochitest.ajax.mochikit.tests += [ - 'mochitest/ajax/mochikit/tests/cli.js', - 'mochitest/ajax/mochikit/tests/FakeJSAN.js', - 'mochitest/ajax/mochikit/tests/index.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Async.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Base.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Color.html', - 'mochitest/ajax/mochikit/tests/MochiKit-DateTime.html', - 'mochitest/ajax/mochikit/tests/MochiKit-DOM.html', - 'mochitest/ajax/mochikit/tests/MochiKit-DragAndDrop.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Format.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Iter.html', - 'mochitest/ajax/mochikit/tests/MochiKit-JSAN.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Logging.html', - 'mochitest/ajax/mochikit/tests/MochiKit-MochiKit.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Selector.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Signal.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Style.html', - 'mochitest/ajax/mochikit/tests/MochiKit-Visual.html', - 'mochitest/ajax/mochikit/tests/standalone.js', - 'mochitest/ajax/mochikit/tests/test_Base.js', - 'mochitest/ajax/mochikit/tests/test_Color.js', - 'mochitest/ajax/mochikit/tests/test_DateTime.js', - 'mochitest/ajax/mochikit/tests/test_DragAndDrop.js', - 'mochitest/ajax/mochikit/tests/test_Format.js', - 'mochitest/ajax/mochikit/tests/test_Iter.js', - 'mochitest/ajax/mochikit/tests/test_Logging.js', - 'mochitest/ajax/mochikit/tests/test_MochiKit-Async.json', - 'mochitest/ajax/mochikit/tests/test_Signal.js', + "mochitest/ajax/mochikit/tests/cli.js", + "mochitest/ajax/mochikit/tests/FakeJSAN.js", + "mochitest/ajax/mochikit/tests/index.html", + "mochitest/ajax/mochikit/tests/MochiKit-Async.html", + "mochitest/ajax/mochikit/tests/MochiKit-Base.html", + "mochitest/ajax/mochikit/tests/MochiKit-Color.html", + "mochitest/ajax/mochikit/tests/MochiKit-DateTime.html", + "mochitest/ajax/mochikit/tests/MochiKit-DOM.html", + "mochitest/ajax/mochikit/tests/MochiKit-DragAndDrop.html", + "mochitest/ajax/mochikit/tests/MochiKit-Format.html", + "mochitest/ajax/mochikit/tests/MochiKit-Iter.html", + "mochitest/ajax/mochikit/tests/MochiKit-JSAN.html", + "mochitest/ajax/mochikit/tests/MochiKit-Logging.html", + "mochitest/ajax/mochikit/tests/MochiKit-MochiKit.html", + "mochitest/ajax/mochikit/tests/MochiKit-Selector.html", + "mochitest/ajax/mochikit/tests/MochiKit-Signal.html", + "mochitest/ajax/mochikit/tests/MochiKit-Style.html", + "mochitest/ajax/mochikit/tests/MochiKit-Visual.html", + "mochitest/ajax/mochikit/tests/standalone.js", + "mochitest/ajax/mochikit/tests/test_Base.js", + "mochitest/ajax/mochikit/tests/test_Color.js", + "mochitest/ajax/mochikit/tests/test_DateTime.js", + "mochitest/ajax/mochikit/tests/test_DragAndDrop.js", + "mochitest/ajax/mochikit/tests/test_Format.js", + "mochitest/ajax/mochikit/tests/test_Iter.js", + "mochitest/ajax/mochikit/tests/test_Logging.js", + "mochitest/ajax/mochikit/tests/test_MochiKit-Async.json", + "mochitest/ajax/mochikit/tests/test_Signal.js", ] TEST_HARNESS_FILES.testing.mochitest.tests.dom.tests.mochitest.ajax.mochikit.tests.SimpleTest += [ - 'mochitest/ajax/mochikit/tests/SimpleTest/SimpleTest.js', - 'mochitest/ajax/mochikit/tests/SimpleTest/test.css', - 'mochitest/ajax/mochikit/tests/SimpleTest/TestRunner.js', + "mochitest/ajax/mochikit/tests/SimpleTest/SimpleTest.js", + "mochitest/ajax/mochikit/tests/SimpleTest/test.css", + "mochitest/ajax/mochikit/tests/SimpleTest/TestRunner.js", ] - diff --git a/dom/u2f/moz.build b/dom/u2f/moz.build index 5fe7cbe3e54e44..c69d12a0e167d7 100644 --- a/dom/u2f/moz.build +++ b/dom/u2f/moz.build @@ -8,24 +8,24 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Web Authentication") EXPORTS.mozilla.dom += [ - 'U2F.h', - 'U2FAuthenticator.h', + "U2F.h", + "U2FAuthenticator.h", ] UNIFIED_SOURCES += [ - 'U2F.cpp', + "U2F.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', - '/dom/crypto', - '/dom/webauthn', - '/security/manager/ssl', + "/dom/base", + "/dom/crypto", + "/dom/webauthn", + "/security/manager/ssl", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] diff --git a/dom/url/moz.build b/dom/url/moz.build index 1c7ddb5aa5d44a..e001ccdd5ff63f 100644 --- a/dom/url/moz.build +++ b/dom/url/moz.build @@ -8,25 +8,25 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Networking") EXPORTS.mozilla.dom += [ - 'URL.h', - 'URLSearchParams.h', + "URL.h", + "URLSearchParams.h", ] UNIFIED_SOURCES += [ - 'URL.cpp', - 'URLMainThread.cpp', - 'URLSearchParams.cpp', - 'URLWorker.cpp', + "URL.cpp", + "URLMainThread.cpp", + "URLSearchParams.cpp", + "URLWorker.cpp", ] LOCAL_INCLUDES += [ - '/netwerk/base', + "/netwerk/base", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += [ 'tests/chrome.ini' ] -BROWSER_CHROME_MANIFESTS += [ 'tests/browser.ini' ] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/vr/moz.build b/dom/vr/moz.build index ef7e046605f3aa..f0c6fc70c059a6 100644 --- a/dom/vr/moz.build +++ b/dom/vr/moz.build @@ -8,62 +8,60 @@ with Files("**"): BUG_COMPONENT = ("Core", "WebVR") EXPORTS.mozilla.dom += [ - 'VRDisplay.h', - 'VRDisplayEvent.h', - 'VREventObserver.h', - 'VRServiceTest.h', - 'XRBoundedReferenceSpace.h', - 'XRFrame.h', - 'XRInputSource.h', - 'XRInputSourceArray.h', - 'XRNativeOrigin.h', - 'XRPermissionRequest.h', - 'XRPose.h', - 'XRReferenceSpace.h', - 'XRRenderState.h', - 'XRRigidTransform.h', - 'XRSession.h', - 'XRSpace.h', - 'XRSystem.h', - 'XRView.h', - 'XRViewerPose.h', - 'XRViewport.h', - ] + "VRDisplay.h", + "VRDisplayEvent.h", + "VREventObserver.h", + "VRServiceTest.h", + "XRBoundedReferenceSpace.h", + "XRFrame.h", + "XRInputSource.h", + "XRInputSourceArray.h", + "XRNativeOrigin.h", + "XRPermissionRequest.h", + "XRPose.h", + "XRReferenceSpace.h", + "XRRenderState.h", + "XRRigidTransform.h", + "XRSession.h", + "XRSpace.h", + "XRSystem.h", + "XRView.h", + "XRViewerPose.h", + "XRViewport.h", +] UNIFIED_SOURCES = [ - 'VRDisplay.cpp', - 'VRDisplayEvent.cpp', - 'VREventObserver.cpp', - 'VRServiceTest.cpp', - 'XRBoundedReferenceSpace.cpp', - 'XRFrame.cpp', - 'XRInputSource.cpp', - 'XRInputSourceArray.cpp', - 'XRInputSpace.cpp', - 'XRNativeOriginFixed.cpp', - 'XRNativeOriginLocal.cpp', - 'XRNativeOriginLocalFloor.cpp', - 'XRNativeOriginTracker.cpp', - 'XRNativeOriginViewer.cpp', - 'XRPermissionRequest.cpp', - 'XRPose.cpp', - 'XRReferenceSpace.cpp', - 'XRRenderState.cpp', - 'XRRigidTransform.cpp', - 'XRSession.cpp', - 'XRSpace.cpp', - 'XRSystem.cpp', - 'XRView.cpp', - 'XRViewerPose.cpp', - 'XRViewport.cpp', - ] + "VRDisplay.cpp", + "VRDisplayEvent.cpp", + "VREventObserver.cpp", + "VRServiceTest.cpp", + "XRBoundedReferenceSpace.cpp", + "XRFrame.cpp", + "XRInputSource.cpp", + "XRInputSourceArray.cpp", + "XRInputSpace.cpp", + "XRNativeOriginFixed.cpp", + "XRNativeOriginLocal.cpp", + "XRNativeOriginLocalFloor.cpp", + "XRNativeOriginTracker.cpp", + "XRNativeOriginViewer.cpp", + "XRPermissionRequest.cpp", + "XRPose.cpp", + "XRReferenceSpace.cpp", + "XRRenderState.cpp", + "XRRigidTransform.cpp", + "XRSession.cpp", + "XRSpace.cpp", + "XRSystem.cpp", + "XRView.cpp", + "XRViewerPose.cpp", + "XRViewport.cpp", +] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' -LOCAL_INCLUDES += [ - '/dom/base' -] +FINAL_LIBRARY = "xul" +LOCAL_INCLUDES += ["/dom/base"] -MOCHITEST_MANIFESTS += ['test/mochitest/mochitest.ini'] -REFTEST_MANIFESTS += ['test/reftest/reftest.list'] \ No newline at end of file +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] +REFTEST_MANIFESTS += ["test/reftest/reftest.list"] diff --git a/dom/webauthn/moz.build b/dom/webauthn/moz.build index aff35672c2e2ba..dab54ae27a4ae2 100644 --- a/dom/webauthn/moz.build +++ b/dom/webauthn/moz.build @@ -7,83 +7,79 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Web Authentication") -IPDL_SOURCES += [ - 'PWebAuthnTransaction.ipdl' -] +IPDL_SOURCES += ["PWebAuthnTransaction.ipdl"] -XPIDL_SOURCES += [ - 'nsIU2FTokenManager.idl' -] +XPIDL_SOURCES += ["nsIU2FTokenManager.idl"] -XPIDL_MODULE = 'dom_webauthn' +XPIDL_MODULE = "dom_webauthn" EXPORTS.mozilla.dom += [ - 'AuthenticatorAssertionResponse.h', - 'AuthenticatorAttestationResponse.h', - 'AuthenticatorResponse.h', - 'PublicKeyCredential.h', - 'U2FHIDTokenManager.h', - 'U2FSoftTokenManager.h', - 'U2FTokenManager.h', - 'U2FTokenTransport.h', - 'WebAuthnCBORUtil.h', - 'WebAuthnManager.h', - 'WebAuthnManagerBase.h', - 'WebAuthnTransactionChild.h', - 'WebAuthnTransactionParent.h', - 'WebAuthnUtil.h', - 'winwebauthn/webauthn.h', + "AuthenticatorAssertionResponse.h", + "AuthenticatorAttestationResponse.h", + "AuthenticatorResponse.h", + "PublicKeyCredential.h", + "U2FHIDTokenManager.h", + "U2FSoftTokenManager.h", + "U2FTokenManager.h", + "U2FTokenTransport.h", + "WebAuthnCBORUtil.h", + "WebAuthnManager.h", + "WebAuthnManagerBase.h", + "WebAuthnTransactionChild.h", + "WebAuthnTransactionParent.h", + "WebAuthnUtil.h", + "winwebauthn/webauthn.h", ] UNIFIED_SOURCES += [ - 'AuthenticatorAssertionResponse.cpp', - 'AuthenticatorAttestationResponse.cpp', - 'AuthenticatorResponse.cpp', - 'cbor-cpp/src/encoder.cpp', - 'cbor-cpp/src/output_dynamic.cpp', - 'PublicKeyCredential.cpp', - 'U2FHIDTokenManager.cpp', - 'U2FSoftTokenManager.cpp', - 'U2FTokenManager.cpp', - 'WebAuthnCBORUtil.cpp', - 'WebAuthnManager.cpp', - 'WebAuthnManagerBase.cpp', - 'WebAuthnTransactionChild.cpp', - 'WebAuthnTransactionParent.cpp', - 'WebAuthnUtil.cpp', + "AuthenticatorAssertionResponse.cpp", + "AuthenticatorAttestationResponse.cpp", + "AuthenticatorResponse.cpp", + "cbor-cpp/src/encoder.cpp", + "cbor-cpp/src/output_dynamic.cpp", + "PublicKeyCredential.cpp", + "U2FHIDTokenManager.cpp", + "U2FSoftTokenManager.cpp", + "U2FTokenManager.cpp", + "WebAuthnCBORUtil.cpp", + "WebAuthnManager.cpp", + "WebAuthnManagerBase.cpp", + "WebAuthnTransactionChild.cpp", + "WebAuthnTransactionParent.cpp", + "WebAuthnUtil.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', - '/dom/crypto', - '/security/manager/ssl', - '/third_party/rust', + "/dom/base", + "/dom/crypto", + "/security/manager/ssl", + "/third_party/rust", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXPORTS.mozilla.dom += [ - 'AndroidWebAuthnTokenManager.h', + "AndroidWebAuthnTokenManager.h", ] UNIFIED_SOURCES += [ - 'AndroidWebAuthnTokenManager.cpp', + "AndroidWebAuthnTokenManager.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": OS_LIBS += [ - 'hid', + "hid", ] -if CONFIG['OS_TARGET'] == 'WINNT': +if CONFIG["OS_TARGET"] == "WINNT": EXPORTS.mozilla.dom += [ - 'WinWebAuthnManager.h', + "WinWebAuthnManager.h", ] UNIFIED_SOURCES += [ - 'WinWebAuthnManager.cpp', + "WinWebAuthnManager.cpp", ] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['tests/browser/browser.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["tests/browser/browser.ini"] diff --git a/dom/webbrowserpersist/moz.build b/dom/webbrowserpersist/moz.build index 686d6ef17a0437..cbcdeab61b4ca0 100644 --- a/dom/webbrowserpersist/moz.build +++ b/dom/webbrowserpersist/moz.build @@ -9,44 +9,44 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") XPIDL_SOURCES += [ - 'nsIWebBrowserPersist.idl', - 'nsIWebBrowserPersistDocument.idl', + "nsIWebBrowserPersist.idl", + "nsIWebBrowserPersistDocument.idl", ] -XPIDL_MODULE = 'webbrowserpersist' +XPIDL_MODULE = "webbrowserpersist" IPDL_SOURCES += [ - 'PWebBrowserPersistDocument.ipdl', - 'PWebBrowserPersistResources.ipdl', - 'PWebBrowserPersistSerialize.ipdl', + "PWebBrowserPersistDocument.ipdl", + "PWebBrowserPersistResources.ipdl", + "PWebBrowserPersistSerialize.ipdl", ] UNIFIED_SOURCES += [ - 'nsWebBrowserPersist.cpp', - 'WebBrowserPersistDocumentChild.cpp', - 'WebBrowserPersistDocumentParent.cpp', - 'WebBrowserPersistLocalDocument.cpp', - 'WebBrowserPersistRemoteDocument.cpp', - 'WebBrowserPersistResourcesChild.cpp', - 'WebBrowserPersistResourcesParent.cpp', - 'WebBrowserPersistSerializeChild.cpp', - 'WebBrowserPersistSerializeParent.cpp', + "nsWebBrowserPersist.cpp", + "WebBrowserPersistDocumentChild.cpp", + "WebBrowserPersistDocumentParent.cpp", + "WebBrowserPersistLocalDocument.cpp", + "WebBrowserPersistRemoteDocument.cpp", + "WebBrowserPersistResourcesChild.cpp", + "WebBrowserPersistResourcesParent.cpp", + "WebBrowserPersistSerializeChild.cpp", + "WebBrowserPersistSerializeParent.cpp", ] EXPORTS.mozilla += [ - 'WebBrowserPersistDocumentChild.h', - 'WebBrowserPersistDocumentParent.h', - 'WebBrowserPersistLocalDocument.h', + "WebBrowserPersistDocumentChild.h", + "WebBrowserPersistDocumentParent.h", + "WebBrowserPersistLocalDocument.h", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/docshell/shistory', - '/dom/base', - '/dom/html', + "/docshell/shistory", + "/dom/base", + "/dom/html", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/webgpu/moz.build b/dom/webgpu/moz.build index 0be1b01d886673..17631f87db21a9 100644 --- a/dom/webgpu/moz.build +++ b/dom/webgpu/moz.build @@ -4,68 +4,67 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Graphics: WebGPU') +with Files("**"): + BUG_COMPONENT = ("Core", "Graphics: WebGPU") MOCHITEST_MANIFESTS += [ - 'mochitest/mochitest-no-pref.ini', - 'mochitest/mochitest.ini', + "mochitest/mochitest-no-pref.ini", + "mochitest/mochitest.ini", ] -DIRS += [ -] +DIRS += [] h_and_cpp = [ - 'Adapter', - 'BindGroup', - 'BindGroupLayout', - 'Buffer', - 'CanvasContext', - 'CommandBuffer', - 'CommandEncoder', - 'ComputePassEncoder', - 'ComputePipeline', - 'Device', - 'DeviceLostInfo', - 'Fence', - 'Instance', - 'ObjectModel', - 'OutOfMemoryError', - 'PipelineLayout', - 'Queue', - 'RenderBundle', - 'RenderBundleEncoder', - 'RenderPassEncoder', - 'RenderPipeline', - 'Sampler', - 'ShaderModule', - 'SwapChain', - 'Texture', - 'TextureView', - 'ValidationError', + "Adapter", + "BindGroup", + "BindGroupLayout", + "Buffer", + "CanvasContext", + "CommandBuffer", + "CommandEncoder", + "ComputePassEncoder", + "ComputePipeline", + "Device", + "DeviceLostInfo", + "Fence", + "Instance", + "ObjectModel", + "OutOfMemoryError", + "PipelineLayout", + "Queue", + "RenderBundle", + "RenderBundleEncoder", + "RenderPassEncoder", + "RenderPipeline", + "Sampler", + "ShaderModule", + "SwapChain", + "Texture", + "TextureView", + "ValidationError", ] -EXPORTS.mozilla.webgpu += [x + '.h' for x in h_and_cpp] -UNIFIED_SOURCES += [x + '.cpp' for x in h_and_cpp] +EXPORTS.mozilla.webgpu += [x + ".h" for x in h_and_cpp] +UNIFIED_SOURCES += [x + ".cpp" for x in h_and_cpp] IPDL_SOURCES += [ - 'ipc/PWebGPU.ipdl', + "ipc/PWebGPU.ipdl", ] EXPORTS.mozilla.webgpu += [ - 'ipc/WebGPUChild.h', - 'ipc/WebGPUParent.h', - 'ipc/WebGPUSerialize.h', - 'ipc/WebGPUTypes.h', + "ipc/WebGPUChild.h", + "ipc/WebGPUParent.h", + "ipc/WebGPUSerialize.h", + "ipc/WebGPUTypes.h", ] UNIFIED_SOURCES += [ - 'ipc/WebGPUChild.cpp', - 'ipc/WebGPUParent.cpp', + "ipc/WebGPUChild.cpp", + "ipc/WebGPUParent.cpp", ] -if CONFIG['CC_TYPE'] in ('clang', 'clang-cl'): - CXXFLAGS += ['-Werror=implicit-int-conversion'] +if CONFIG["CC_TYPE"] in ("clang", "clang-cl"): + CXXFLAGS += ["-Werror=implicit-int-conversion"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/webidl/moz.build b/dom/webidl/moz.build index 8b2b2741d28620..dff59bba6f4d5e 100644 --- a/dom/webidl/moz.build +++ b/dom/webidl/moz.build @@ -377,769 +377,771 @@ with Files("Worker*"): BUG_COMPONENT = ("Core", "DOM: Workers") GENERATED_WEBIDL_FILES = [ - 'CSS2Properties.webidl', + "CSS2Properties.webidl", ] PREPROCESSED_WEBIDL_FILES = [ - 'Animation.webidl', - 'Node.webidl', - 'Window.webidl', + "Animation.webidl", + "Node.webidl", + "Window.webidl", ] WEBIDL_FILES = [ - 'AbortController.webidl', - 'AbortSignal.webidl', - 'AbstractRange.webidl', - 'AbstractWorker.webidl', - 'AccessibilityRole.webidl', - 'AddonManager.webidl', - 'AnalyserNode.webidl', - 'Animatable.webidl', - 'AnimationEffect.webidl', - 'AnimationEvent.webidl', - 'AnimationTimeline.webidl', - 'AnonymousContent.webidl', - 'AppInfo.webidl', - 'AppNotificationServiceOptions.webidl', - 'APZTestData.webidl', - 'AriaAttributes.webidl', - 'Attr.webidl', - 'AudioBuffer.webidl', - 'AudioBufferSourceNode.webidl', - 'AudioContext.webidl', - 'AudioDestinationNode.webidl', - 'AudioListener.webidl', - 'AudioNode.webidl', - 'AudioParam.webidl', - 'AudioParamDescriptor.webidl', - 'AudioParamMap.webidl', - 'AudioProcessingEvent.webidl', - 'AudioScheduledSourceNode.webidl', - 'AudioTrack.webidl', - 'AudioTrackList.webidl', - 'AudioWorklet.webidl', - 'AudioWorkletGlobalScope.webidl', - 'AudioWorkletNode.webidl', - 'AudioWorkletProcessor.webidl', - 'AutocompleteInfo.webidl', - 'BarProp.webidl', - 'BaseAudioContext.webidl', - 'BaseKeyframeTypes.webidl', - 'BasicCardPayment.webidl', - 'BatteryManager.webidl', - 'BeforeUnloadEvent.webidl', - 'BiquadFilterNode.webidl', - 'Blob.webidl', - 'BroadcastChannel.webidl', - 'BrowserElementDictionaries.webidl', - 'Cache.webidl', - 'CacheStorage.webidl', - 'CancelContentJSOptions.webidl', - 'CanvasCaptureMediaStream.webidl', - 'CanvasRenderingContext2D.webidl', - 'CaretPosition.webidl', - 'CDATASection.webidl', - 'ChannelMergerNode.webidl', - 'ChannelSplitterNode.webidl', - 'CharacterData.webidl', - 'CheckerboardReportService.webidl', - 'ChildNode.webidl', - 'ChildSHistory.webidl', - 'ChromeNodeList.webidl', - 'Client.webidl', - 'Clients.webidl', - 'Clipboard.webidl', - 'ClipboardEvent.webidl', - 'CommandEvent.webidl', - 'Comment.webidl', - 'CompositionEvent.webidl', - 'Console.webidl', - 'ConstantSourceNode.webidl', - 'ConvolverNode.webidl', - 'CreateOfferRequest.webidl', - 'CredentialManagement.webidl', - 'Crypto.webidl', - 'CSPDictionaries.webidl', - 'CSPReport.webidl', - 'CSS.webidl', - 'CSSAnimation.webidl', - 'CSSConditionRule.webidl', - 'CSSCounterStyleRule.webidl', - 'CSSFontFaceRule.webidl', - 'CSSFontFeatureValuesRule.webidl', - 'CSSGroupingRule.webidl', - 'CSSImportRule.webidl', - 'CSSKeyframeRule.webidl', - 'CSSKeyframesRule.webidl', - 'CSSMediaRule.webidl', - 'CSSMozDocumentRule.webidl', - 'CSSNamespaceRule.webidl', - 'CSSPageRule.webidl', - 'CSSPseudoElement.webidl', - 'CSSRule.webidl', - 'CSSRuleList.webidl', - 'CSSStyleDeclaration.webidl', - 'CSSStyleRule.webidl', - 'CSSStyleSheet.webidl', - 'CSSSupportsRule.webidl', - 'CSSTransition.webidl', - 'CustomElementRegistry.webidl', - 'DataTransfer.webidl', - 'DataTransferItem.webidl', - 'DataTransferItemList.webidl', - 'DecoderDoctorNotification.webidl', - 'DedicatedWorkerGlobalScope.webidl', - 'DelayNode.webidl', - 'DeviceMotionEvent.webidl', - 'Directory.webidl', - 'Document.webidl', - 'DocumentFragment.webidl', - 'DocumentOrShadowRoot.webidl', - 'DocumentTimeline.webidl', - 'DocumentType.webidl', - 'DOMException.webidl', - 'DOMImplementation.webidl', - 'DOMLocalization.webidl', - 'DOMMatrix.webidl', - 'DOMParser.webidl', - 'DOMPoint.webidl', - 'DOMQuad.webidl', - 'DOMRect.webidl', - 'DOMRectList.webidl', - 'DOMRequest.webidl', - 'DOMStringList.webidl', - 'DOMStringMap.webidl', - 'DOMTokenList.webidl', - 'DragEvent.webidl', - 'DynamicsCompressorNode.webidl', - 'Element.webidl', - 'ElementInternals.webidl', - 'Event.webidl', - 'EventHandler.webidl', - 'EventListener.webidl', - 'EventSource.webidl', - 'EventTarget.webidl', - 'ExtendableEvent.webidl', - 'ExtendableMessageEvent.webidl', - 'FailedCertSecurityInfo.webidl', - 'FakePluginTagInit.webidl', - 'FeaturePolicy.webidl', - 'Fetch.webidl', - 'FetchEvent.webidl', - 'FetchObserver.webidl', - 'File.webidl', - 'FileList.webidl', - 'FileMode.webidl', - 'FileReader.webidl', - 'FileReaderSync.webidl', - 'FileSystem.webidl', - 'FileSystemDirectoryEntry.webidl', - 'FileSystemDirectoryReader.webidl', - 'FileSystemEntry.webidl', - 'FileSystemFileEntry.webidl', - 'FinalizationRegistry.webidl', - 'FocusEvent.webidl', - 'FontFace.webidl', - 'FontFaceSet.webidl', - 'FontFaceSource.webidl', - 'FormData.webidl', - 'Function.webidl', - 'GainNode.webidl', - 'Gamepad.webidl', - 'GamepadHapticActuator.webidl', - 'GamepadLightIndicator.webidl', - 'GamepadPose.webidl', - 'GamepadServiceTest.webidl', - 'GamepadTouch.webidl', - 'Geolocation.webidl', - 'GeolocationCoordinates.webidl', - 'GeolocationPosition.webidl', - 'GeolocationPositionError.webidl', - 'GeometryUtils.webidl', - 'GetUserMediaRequest.webidl', - 'Grid.webidl', - 'Headers.webidl', - 'History.webidl', - 'HTMLAllCollection.webidl', - 'HTMLAnchorElement.webidl', - 'HTMLAreaElement.webidl', - 'HTMLAudioElement.webidl', - 'HTMLBaseElement.webidl', - 'HTMLBodyElement.webidl', - 'HTMLBRElement.webidl', - 'HTMLButtonElement.webidl', - 'HTMLCanvasElement.webidl', - 'HTMLCollection.webidl', - 'HTMLDataElement.webidl', - 'HTMLDataListElement.webidl', - 'HTMLDetailsElement.webidl', - 'HTMLDialogElement.webidl', - 'HTMLDirectoryElement.webidl', - 'HTMLDivElement.webidl', - 'HTMLDListElement.webidl', - 'HTMLDocument.webidl', - 'HTMLElement.webidl', - 'HTMLEmbedElement.webidl', - 'HTMLFieldSetElement.webidl', - 'HTMLFontElement.webidl', - 'HTMLFormControlsCollection.webidl', - 'HTMLFormElement.webidl', - 'HTMLFrameElement.webidl', - 'HTMLFrameSetElement.webidl', - 'HTMLHeadElement.webidl', - 'HTMLHeadingElement.webidl', - 'HTMLHRElement.webidl', - 'HTMLHtmlElement.webidl', - 'HTMLHyperlinkElementUtils.webidl', - 'HTMLIFrameElement.webidl', - 'HTMLImageElement.webidl', - 'HTMLInputElement.webidl', - 'HTMLLabelElement.webidl', - 'HTMLLegendElement.webidl', - 'HTMLLIElement.webidl', - 'HTMLLinkElement.webidl', - 'HTMLMapElement.webidl', - 'HTMLMarqueeElement.webidl', - 'HTMLMediaElement.webidl', - 'HTMLMenuElement.webidl', - 'HTMLMenuItemElement.webidl', - 'HTMLMetaElement.webidl', - 'HTMLMeterElement.webidl', - 'HTMLModElement.webidl', - 'HTMLObjectElement.webidl', - 'HTMLOListElement.webidl', - 'HTMLOptGroupElement.webidl', - 'HTMLOptionElement.webidl', - 'HTMLOptionsCollection.webidl', - 'HTMLOutputElement.webidl', - 'HTMLParagraphElement.webidl', - 'HTMLParamElement.webidl', - 'HTMLPictureElement.webidl', - 'HTMLPreElement.webidl', - 'HTMLProgressElement.webidl', - 'HTMLQuoteElement.webidl', - 'HTMLScriptElement.webidl', - 'HTMLSelectElement.webidl', - 'HTMLSlotElement.webidl', - 'HTMLSourceElement.webidl', - 'HTMLSpanElement.webidl', - 'HTMLStyleElement.webidl', - 'HTMLTableCaptionElement.webidl', - 'HTMLTableCellElement.webidl', - 'HTMLTableColElement.webidl', - 'HTMLTableElement.webidl', - 'HTMLTableRowElement.webidl', - 'HTMLTableSectionElement.webidl', - 'HTMLTemplateElement.webidl', - 'HTMLTextAreaElement.webidl', - 'HTMLTimeElement.webidl', - 'HTMLTitleElement.webidl', - 'HTMLTrackElement.webidl', - 'HTMLUListElement.webidl', - 'HTMLVideoElement.webidl', - 'IDBCursor.webidl', - 'IDBDatabase.webidl', - 'IDBFactory.webidl', - 'IDBFileHandle.webidl', - 'IDBFileRequest.webidl', - 'IDBIndex.webidl', - 'IDBKeyRange.webidl', - 'IDBMutableFile.webidl', - 'IDBObjectStore.webidl', - 'IDBOpenDBRequest.webidl', - 'IDBRequest.webidl', - 'IDBTransaction.webidl', - 'IDBVersionChangeEvent.webidl', - 'IdleDeadline.webidl', - 'IIRFilterNode.webidl', - 'ImageBitmap.webidl', - 'ImageBitmapRenderingContext.webidl', - 'ImageCapture.webidl', - 'ImageData.webidl', - 'ImageDocument.webidl', - 'InputEvent.webidl', - 'IntersectionObserver.webidl', - 'IntlUtils.webidl', - 'IterableIterator.webidl', - 'KeyAlgorithm.webidl', - 'KeyboardEvent.webidl', - 'KeyEvent.webidl', - 'KeyframeAnimationOptions.webidl', - 'KeyframeEffect.webidl', - 'KeyIdsInitData.webidl', - 'LinkStyle.webidl', - 'LoadURIOptions.webidl', - 'Localization.webidl', - 'Location.webidl', - 'MathMLElement.webidl', - 'MediaCapabilities.webidl', - 'MediaDebugInfo.webidl', - 'MediaDeviceInfo.webidl', - 'MediaDevices.webidl', - 'MediaElementAudioSourceNode.webidl', - 'MediaEncryptedEvent.webidl', - 'MediaError.webidl', - 'MediaKeyError.webidl', - 'MediaKeyMessageEvent.webidl', - 'MediaKeys.webidl', - 'MediaKeySession.webidl', - 'MediaKeysRequestStatus.webidl', - 'MediaKeyStatusMap.webidl', - 'MediaKeySystemAccess.webidl', - 'MediaList.webidl', - 'MediaQueryList.webidl', - 'MediaRecorder.webidl', - 'MediaSession.webidl', - 'MediaSource.webidl', - 'MediaStream.webidl', - 'MediaStreamAudioDestinationNode.webidl', - 'MediaStreamAudioSourceNode.webidl', - 'MediaStreamError.webidl', - 'MediaStreamTrack.webidl', - 'MediaStreamTrackAudioSourceNode.webidl', - 'MediaTrackSettings.webidl', - 'MediaTrackSupportedConstraints.webidl', - 'MerchantValidationEvent.webidl', - 'MessageChannel.webidl', - 'MessageEvent.webidl', - 'MessagePort.webidl', - 'MIDIAccess.webidl', - 'MIDIInput.webidl', - 'MIDIInputMap.webidl', - 'MIDIMessageEvent.webidl', - 'MIDIOptions.webidl', - 'MIDIOutput.webidl', - 'MIDIOutputMap.webidl', - 'MIDIPort.webidl', - 'MimeType.webidl', - 'MimeTypeArray.webidl', - 'MouseEvent.webidl', - 'MouseScrollEvent.webidl', - 'MozFrameLoaderOwner.webidl', - 'MutationEvent.webidl', - 'MutationObserver.webidl', - 'NamedNodeMap.webidl', - 'NativeOSFileInternals.webidl', - 'Navigator.webidl', - 'NetDashboard.webidl', - 'NetErrorInfo.webidl', - 'NetworkInformation.webidl', - 'NetworkOptions.webidl', - 'NodeFilter.webidl', - 'NodeIterator.webidl', - 'NodeList.webidl', - 'Notification.webidl', - 'NotificationEvent.webidl', - 'NotifyPaintEvent.webidl', - 'OfflineAudioContext.webidl', - 'OfflineResourceList.webidl', - 'OffscreenCanvas.webidl', - 'OscillatorNode.webidl', - 'PaintRequest.webidl', - 'PaintRequestList.webidl', - 'PaintWorkletGlobalScope.webidl', - 'PannerNode.webidl', - 'ParentNode.webidl', - 'PaymentAddress.webidl', - 'PaymentMethodChangeEvent.webidl', - 'PaymentRequest.webidl', - 'PaymentRequestUpdateEvent.webidl', - 'PaymentResponse.webidl', - 'Performance.webidl', - 'PerformanceEntry.webidl', - 'PerformanceMark.webidl', - 'PerformanceMeasure.webidl', - 'PerformanceNavigation.webidl', - 'PerformanceNavigationTiming.webidl', - 'PerformanceObserver.webidl', - 'PerformanceObserverEntryList.webidl', - 'PerformanceResourceTiming.webidl', - 'PerformanceServerTiming.webidl', - 'PerformanceTiming.webidl', - 'PeriodicWave.webidl', - 'Permissions.webidl', - 'PermissionStatus.webidl', - 'Plugin.webidl', - 'PluginArray.webidl', - 'PointerEvent.webidl', - 'Presentation.webidl', - 'PresentationAvailability.webidl', - 'PresentationConnection.webidl', - 'PresentationConnectionList.webidl', - 'PresentationReceiver.webidl', - 'PresentationRequest.webidl', - 'ProcessingInstruction.webidl', - 'ProfileTimelineMarker.webidl', - 'Promise.webidl', - 'PushEvent.webidl', - 'PushManager.webidl', - 'PushManager.webidl', - 'PushMessageData.webidl', - 'PushSubscription.webidl', - 'PushSubscriptionOptions.webidl', - 'RadioNodeList.webidl', - 'Range.webidl', - 'ReferrerPolicy.webidl', - 'Reporting.webidl', - 'Request.webidl', - 'ResizeObserver.webidl', - 'Response.webidl', - 'Sanitizer.webidl', - 'Screen.webidl', - 'ScreenOrientation.webidl', - 'ScriptProcessorNode.webidl', - 'ScrollAreaEvent.webidl', - 'Selection.webidl', - 'ServiceWorker.webidl', - 'ServiceWorkerContainer.webidl', - 'ServiceWorkerGlobalScope.webidl', - 'ServiceWorkerRegistration.webidl', - 'ShadowRoot.webidl', - 'SharedWorker.webidl', - 'SharedWorkerGlobalScope.webidl', - 'SimpleGestureEvent.webidl', - 'SocketCommon.webidl', - 'SourceBuffer.webidl', - 'SourceBufferList.webidl', - 'StaticRange.webidl', - 'StereoPannerNode.webidl', - 'Storage.webidl', - 'StorageEvent.webidl', - 'StorageManager.webidl', - 'StorageType.webidl', - 'StreamFilter.webidl', - 'StreamFilterDataEvent.webidl', - 'StructuredCloneTester.webidl', - 'StyleSheet.webidl', - 'StyleSheetList.webidl', - 'SubtleCrypto.webidl', - 'SVGAElement.webidl', - 'SVGAngle.webidl', - 'SVGAnimatedAngle.webidl', - 'SVGAnimatedBoolean.webidl', - 'SVGAnimatedEnumeration.webidl', - 'SVGAnimatedInteger.webidl', - 'SVGAnimatedLength.webidl', - 'SVGAnimatedLengthList.webidl', - 'SVGAnimatedNumber.webidl', - 'SVGAnimatedNumberList.webidl', - 'SVGAnimatedPathData.webidl', - 'SVGAnimatedPoints.webidl', - 'SVGAnimatedPreserveAspectRatio.webidl', - 'SVGAnimatedRect.webidl', - 'SVGAnimatedString.webidl', - 'SVGAnimatedTransformList.webidl', - 'SVGAnimateElement.webidl', - 'SVGAnimateMotionElement.webidl', - 'SVGAnimateTransformElement.webidl', - 'SVGAnimationElement.webidl', - 'SVGCircleElement.webidl', - 'SVGClipPathElement.webidl', - 'SVGComponentTransferFunctionElement.webidl', - 'SVGDefsElement.webidl', - 'SVGDescElement.webidl', - 'SVGElement.webidl', - 'SVGEllipseElement.webidl', - 'SVGFEBlendElement.webidl', - 'SVGFEColorMatrixElement.webidl', - 'SVGFEComponentTransferElement.webidl', - 'SVGFECompositeElement.webidl', - 'SVGFEConvolveMatrixElement.webidl', - 'SVGFEDiffuseLightingElement.webidl', - 'SVGFEDisplacementMapElement.webidl', - 'SVGFEDistantLightElement.webidl', - 'SVGFEDropShadowElement.webidl', - 'SVGFEFloodElement.webidl', - 'SVGFEFuncAElement.webidl', - 'SVGFEFuncBElement.webidl', - 'SVGFEFuncGElement.webidl', - 'SVGFEFuncRElement.webidl', - 'SVGFEGaussianBlurElement.webidl', - 'SVGFEImageElement.webidl', - 'SVGFEMergeElement.webidl', - 'SVGFEMergeNodeElement.webidl', - 'SVGFEMorphologyElement.webidl', - 'SVGFEOffsetElement.webidl', - 'SVGFEPointLightElement.webidl', - 'SVGFESpecularLightingElement.webidl', - 'SVGFESpotLightElement.webidl', - 'SVGFETileElement.webidl', - 'SVGFETurbulenceElement.webidl', - 'SVGFilterElement.webidl', - 'SVGFilterPrimitiveStandardAttributes.webidl', - 'SVGFitToViewBox.webidl', - 'SVGForeignObjectElement.webidl', - 'SVGGElement.webidl', - 'SVGGeometryElement.webidl', - 'SVGGradientElement.webidl', - 'SVGGraphicsElement.webidl', - 'SVGImageElement.webidl', - 'SVGLength.webidl', - 'SVGLengthList.webidl', - 'SVGLinearGradientElement.webidl', - 'SVGLineElement.webidl', - 'SVGMarkerElement.webidl', - 'SVGMaskElement.webidl', - 'SVGMatrix.webidl', - 'SVGMetadataElement.webidl', - 'SVGMPathElement.webidl', - 'SVGNumber.webidl', - 'SVGNumberList.webidl', - 'SVGPathElement.webidl', - 'SVGPathSeg.webidl', - 'SVGPathSegList.webidl', - 'SVGPatternElement.webidl', - 'SVGPoint.webidl', - 'SVGPointList.webidl', - 'SVGPolygonElement.webidl', - 'SVGPolylineElement.webidl', - 'SVGPreserveAspectRatio.webidl', - 'SVGRadialGradientElement.webidl', - 'SVGRect.webidl', - 'SVGRectElement.webidl', - 'SVGScriptElement.webidl', - 'SVGSetElement.webidl', - 'SVGStopElement.webidl', - 'SVGStringList.webidl', - 'SVGStyleElement.webidl', - 'SVGSVGElement.webidl', - 'SVGSwitchElement.webidl', - 'SVGSymbolElement.webidl', - 'SVGTests.webidl', - 'SVGTextContentElement.webidl', - 'SVGTextElement.webidl', - 'SVGTextPathElement.webidl', - 'SVGTextPositioningElement.webidl', - 'SVGTitleElement.webidl', - 'SVGTransform.webidl', - 'SVGTransformList.webidl', - 'SVGTSpanElement.webidl', - 'SVGUnitTypes.webidl', - 'SVGURIReference.webidl', - 'SVGUseElement.webidl', - 'SVGViewElement.webidl', - 'SVGZoomAndPan.webidl', - 'TCPServerSocket.webidl', - 'TCPServerSocketEvent.webidl', - 'TCPSocket.webidl', - 'TCPSocketErrorEvent.webidl', - 'TCPSocketEvent.webidl', - 'Text.webidl', - 'TextClause.webidl', - 'TextDecoder.webidl', - 'TextEncoder.webidl', - 'TextTrack.webidl', - 'TextTrackCue.webidl', - 'TextTrackCueList.webidl', - 'TextTrackList.webidl', - 'TimeEvent.webidl', - 'TimeRanges.webidl', - 'Touch.webidl', - 'TouchEvent.webidl', - 'TouchList.webidl', - 'TransitionEvent.webidl', - 'TreeColumn.webidl', - 'TreeColumns.webidl', - 'TreeContentView.webidl', - 'TreeView.webidl', - 'TreeWalker.webidl', - 'U2F.webidl', - 'UDPMessageEvent.webidl', - 'UDPSocket.webidl', - 'UIEvent.webidl', - 'URL.webidl', - 'URLSearchParams.webidl', - 'ValidityState.webidl', - 'VideoPlaybackQuality.webidl', - 'VideoTrack.webidl', - 'VideoTrackList.webidl', - 'VisualViewport.webidl', - 'VRDisplay.webidl', - 'VRDisplayEvent.webidl', - 'VRServiceTest.webidl', - 'VTTCue.webidl', - 'VTTRegion.webidl', - 'WaveShaperNode.webidl', - 'WebAuthentication.webidl', - 'WebComponents.webidl', - 'WebGL2RenderingContext.webidl', - 'WebGLRenderingContext.webidl', - 'WebGPU.webidl', - 'WebSocket.webidl', - 'WebXR.webidl', - 'WheelEvent.webidl', - 'WidevineCDMManifest.webidl', - 'WindowOrWorkerGlobalScope.webidl', - 'WindowRoot.webidl', - 'Worker.webidl', - 'WorkerDebuggerGlobalScope.webidl', - 'WorkerGlobalScope.webidl', - 'WorkerLocation.webidl', - 'WorkerNavigator.webidl', - 'Worklet.webidl', - 'WorkletGlobalScope.webidl', - 'XMLDocument.webidl', - 'XMLHttpRequest.webidl', - 'XMLHttpRequestEventTarget.webidl', - 'XMLHttpRequestUpload.webidl', - 'XMLSerializer.webidl', - 'XPathEvaluator.webidl', - 'XPathExpression.webidl', - 'XPathNSResolver.webidl', - 'XPathResult.webidl', - 'XSLTProcessor.webidl', - 'XULCommandEvent.webidl', - 'XULElement.webidl', - 'XULPopupElement.webidl', + "AbortController.webidl", + "AbortSignal.webidl", + "AbstractRange.webidl", + "AbstractWorker.webidl", + "AccessibilityRole.webidl", + "AddonManager.webidl", + "AnalyserNode.webidl", + "Animatable.webidl", + "AnimationEffect.webidl", + "AnimationEvent.webidl", + "AnimationTimeline.webidl", + "AnonymousContent.webidl", + "AppInfo.webidl", + "AppNotificationServiceOptions.webidl", + "APZTestData.webidl", + "AriaAttributes.webidl", + "Attr.webidl", + "AudioBuffer.webidl", + "AudioBufferSourceNode.webidl", + "AudioContext.webidl", + "AudioDestinationNode.webidl", + "AudioListener.webidl", + "AudioNode.webidl", + "AudioParam.webidl", + "AudioParamDescriptor.webidl", + "AudioParamMap.webidl", + "AudioProcessingEvent.webidl", + "AudioScheduledSourceNode.webidl", + "AudioTrack.webidl", + "AudioTrackList.webidl", + "AudioWorklet.webidl", + "AudioWorkletGlobalScope.webidl", + "AudioWorkletNode.webidl", + "AudioWorkletProcessor.webidl", + "AutocompleteInfo.webidl", + "BarProp.webidl", + "BaseAudioContext.webidl", + "BaseKeyframeTypes.webidl", + "BasicCardPayment.webidl", + "BatteryManager.webidl", + "BeforeUnloadEvent.webidl", + "BiquadFilterNode.webidl", + "Blob.webidl", + "BroadcastChannel.webidl", + "BrowserElementDictionaries.webidl", + "Cache.webidl", + "CacheStorage.webidl", + "CancelContentJSOptions.webidl", + "CanvasCaptureMediaStream.webidl", + "CanvasRenderingContext2D.webidl", + "CaretPosition.webidl", + "CDATASection.webidl", + "ChannelMergerNode.webidl", + "ChannelSplitterNode.webidl", + "CharacterData.webidl", + "CheckerboardReportService.webidl", + "ChildNode.webidl", + "ChildSHistory.webidl", + "ChromeNodeList.webidl", + "Client.webidl", + "Clients.webidl", + "Clipboard.webidl", + "ClipboardEvent.webidl", + "CommandEvent.webidl", + "Comment.webidl", + "CompositionEvent.webidl", + "Console.webidl", + "ConstantSourceNode.webidl", + "ConvolverNode.webidl", + "CreateOfferRequest.webidl", + "CredentialManagement.webidl", + "Crypto.webidl", + "CSPDictionaries.webidl", + "CSPReport.webidl", + "CSS.webidl", + "CSSAnimation.webidl", + "CSSConditionRule.webidl", + "CSSCounterStyleRule.webidl", + "CSSFontFaceRule.webidl", + "CSSFontFeatureValuesRule.webidl", + "CSSGroupingRule.webidl", + "CSSImportRule.webidl", + "CSSKeyframeRule.webidl", + "CSSKeyframesRule.webidl", + "CSSMediaRule.webidl", + "CSSMozDocumentRule.webidl", + "CSSNamespaceRule.webidl", + "CSSPageRule.webidl", + "CSSPseudoElement.webidl", + "CSSRule.webidl", + "CSSRuleList.webidl", + "CSSStyleDeclaration.webidl", + "CSSStyleRule.webidl", + "CSSStyleSheet.webidl", + "CSSSupportsRule.webidl", + "CSSTransition.webidl", + "CustomElementRegistry.webidl", + "DataTransfer.webidl", + "DataTransferItem.webidl", + "DataTransferItemList.webidl", + "DecoderDoctorNotification.webidl", + "DedicatedWorkerGlobalScope.webidl", + "DelayNode.webidl", + "DeviceMotionEvent.webidl", + "Directory.webidl", + "Document.webidl", + "DocumentFragment.webidl", + "DocumentOrShadowRoot.webidl", + "DocumentTimeline.webidl", + "DocumentType.webidl", + "DOMException.webidl", + "DOMImplementation.webidl", + "DOMLocalization.webidl", + "DOMMatrix.webidl", + "DOMParser.webidl", + "DOMPoint.webidl", + "DOMQuad.webidl", + "DOMRect.webidl", + "DOMRectList.webidl", + "DOMRequest.webidl", + "DOMStringList.webidl", + "DOMStringMap.webidl", + "DOMTokenList.webidl", + "DragEvent.webidl", + "DynamicsCompressorNode.webidl", + "Element.webidl", + "ElementInternals.webidl", + "Event.webidl", + "EventHandler.webidl", + "EventListener.webidl", + "EventSource.webidl", + "EventTarget.webidl", + "ExtendableEvent.webidl", + "ExtendableMessageEvent.webidl", + "FailedCertSecurityInfo.webidl", + "FakePluginTagInit.webidl", + "FeaturePolicy.webidl", + "Fetch.webidl", + "FetchEvent.webidl", + "FetchObserver.webidl", + "File.webidl", + "FileList.webidl", + "FileMode.webidl", + "FileReader.webidl", + "FileReaderSync.webidl", + "FileSystem.webidl", + "FileSystemDirectoryEntry.webidl", + "FileSystemDirectoryReader.webidl", + "FileSystemEntry.webidl", + "FileSystemFileEntry.webidl", + "FinalizationRegistry.webidl", + "FocusEvent.webidl", + "FontFace.webidl", + "FontFaceSet.webidl", + "FontFaceSource.webidl", + "FormData.webidl", + "Function.webidl", + "GainNode.webidl", + "Gamepad.webidl", + "GamepadHapticActuator.webidl", + "GamepadLightIndicator.webidl", + "GamepadPose.webidl", + "GamepadServiceTest.webidl", + "GamepadTouch.webidl", + "Geolocation.webidl", + "GeolocationCoordinates.webidl", + "GeolocationPosition.webidl", + "GeolocationPositionError.webidl", + "GeometryUtils.webidl", + "GetUserMediaRequest.webidl", + "Grid.webidl", + "Headers.webidl", + "History.webidl", + "HTMLAllCollection.webidl", + "HTMLAnchorElement.webidl", + "HTMLAreaElement.webidl", + "HTMLAudioElement.webidl", + "HTMLBaseElement.webidl", + "HTMLBodyElement.webidl", + "HTMLBRElement.webidl", + "HTMLButtonElement.webidl", + "HTMLCanvasElement.webidl", + "HTMLCollection.webidl", + "HTMLDataElement.webidl", + "HTMLDataListElement.webidl", + "HTMLDetailsElement.webidl", + "HTMLDialogElement.webidl", + "HTMLDirectoryElement.webidl", + "HTMLDivElement.webidl", + "HTMLDListElement.webidl", + "HTMLDocument.webidl", + "HTMLElement.webidl", + "HTMLEmbedElement.webidl", + "HTMLFieldSetElement.webidl", + "HTMLFontElement.webidl", + "HTMLFormControlsCollection.webidl", + "HTMLFormElement.webidl", + "HTMLFrameElement.webidl", + "HTMLFrameSetElement.webidl", + "HTMLHeadElement.webidl", + "HTMLHeadingElement.webidl", + "HTMLHRElement.webidl", + "HTMLHtmlElement.webidl", + "HTMLHyperlinkElementUtils.webidl", + "HTMLIFrameElement.webidl", + "HTMLImageElement.webidl", + "HTMLInputElement.webidl", + "HTMLLabelElement.webidl", + "HTMLLegendElement.webidl", + "HTMLLIElement.webidl", + "HTMLLinkElement.webidl", + "HTMLMapElement.webidl", + "HTMLMarqueeElement.webidl", + "HTMLMediaElement.webidl", + "HTMLMenuElement.webidl", + "HTMLMenuItemElement.webidl", + "HTMLMetaElement.webidl", + "HTMLMeterElement.webidl", + "HTMLModElement.webidl", + "HTMLObjectElement.webidl", + "HTMLOListElement.webidl", + "HTMLOptGroupElement.webidl", + "HTMLOptionElement.webidl", + "HTMLOptionsCollection.webidl", + "HTMLOutputElement.webidl", + "HTMLParagraphElement.webidl", + "HTMLParamElement.webidl", + "HTMLPictureElement.webidl", + "HTMLPreElement.webidl", + "HTMLProgressElement.webidl", + "HTMLQuoteElement.webidl", + "HTMLScriptElement.webidl", + "HTMLSelectElement.webidl", + "HTMLSlotElement.webidl", + "HTMLSourceElement.webidl", + "HTMLSpanElement.webidl", + "HTMLStyleElement.webidl", + "HTMLTableCaptionElement.webidl", + "HTMLTableCellElement.webidl", + "HTMLTableColElement.webidl", + "HTMLTableElement.webidl", + "HTMLTableRowElement.webidl", + "HTMLTableSectionElement.webidl", + "HTMLTemplateElement.webidl", + "HTMLTextAreaElement.webidl", + "HTMLTimeElement.webidl", + "HTMLTitleElement.webidl", + "HTMLTrackElement.webidl", + "HTMLUListElement.webidl", + "HTMLVideoElement.webidl", + "IDBCursor.webidl", + "IDBDatabase.webidl", + "IDBFactory.webidl", + "IDBFileHandle.webidl", + "IDBFileRequest.webidl", + "IDBIndex.webidl", + "IDBKeyRange.webidl", + "IDBMutableFile.webidl", + "IDBObjectStore.webidl", + "IDBOpenDBRequest.webidl", + "IDBRequest.webidl", + "IDBTransaction.webidl", + "IDBVersionChangeEvent.webidl", + "IdleDeadline.webidl", + "IIRFilterNode.webidl", + "ImageBitmap.webidl", + "ImageBitmapRenderingContext.webidl", + "ImageCapture.webidl", + "ImageData.webidl", + "ImageDocument.webidl", + "InputEvent.webidl", + "IntersectionObserver.webidl", + "IntlUtils.webidl", + "IterableIterator.webidl", + "KeyAlgorithm.webidl", + "KeyboardEvent.webidl", + "KeyEvent.webidl", + "KeyframeAnimationOptions.webidl", + "KeyframeEffect.webidl", + "KeyIdsInitData.webidl", + "LinkStyle.webidl", + "LoadURIOptions.webidl", + "Localization.webidl", + "Location.webidl", + "MathMLElement.webidl", + "MediaCapabilities.webidl", + "MediaDebugInfo.webidl", + "MediaDeviceInfo.webidl", + "MediaDevices.webidl", + "MediaElementAudioSourceNode.webidl", + "MediaEncryptedEvent.webidl", + "MediaError.webidl", + "MediaKeyError.webidl", + "MediaKeyMessageEvent.webidl", + "MediaKeys.webidl", + "MediaKeySession.webidl", + "MediaKeysRequestStatus.webidl", + "MediaKeyStatusMap.webidl", + "MediaKeySystemAccess.webidl", + "MediaList.webidl", + "MediaQueryList.webidl", + "MediaRecorder.webidl", + "MediaSession.webidl", + "MediaSource.webidl", + "MediaStream.webidl", + "MediaStreamAudioDestinationNode.webidl", + "MediaStreamAudioSourceNode.webidl", + "MediaStreamError.webidl", + "MediaStreamTrack.webidl", + "MediaStreamTrackAudioSourceNode.webidl", + "MediaTrackSettings.webidl", + "MediaTrackSupportedConstraints.webidl", + "MerchantValidationEvent.webidl", + "MessageChannel.webidl", + "MessageEvent.webidl", + "MessagePort.webidl", + "MIDIAccess.webidl", + "MIDIInput.webidl", + "MIDIInputMap.webidl", + "MIDIMessageEvent.webidl", + "MIDIOptions.webidl", + "MIDIOutput.webidl", + "MIDIOutputMap.webidl", + "MIDIPort.webidl", + "MimeType.webidl", + "MimeTypeArray.webidl", + "MouseEvent.webidl", + "MouseScrollEvent.webidl", + "MozFrameLoaderOwner.webidl", + "MutationEvent.webidl", + "MutationObserver.webidl", + "NamedNodeMap.webidl", + "NativeOSFileInternals.webidl", + "Navigator.webidl", + "NetDashboard.webidl", + "NetErrorInfo.webidl", + "NetworkInformation.webidl", + "NetworkOptions.webidl", + "NodeFilter.webidl", + "NodeIterator.webidl", + "NodeList.webidl", + "Notification.webidl", + "NotificationEvent.webidl", + "NotifyPaintEvent.webidl", + "OfflineAudioContext.webidl", + "OfflineResourceList.webidl", + "OffscreenCanvas.webidl", + "OscillatorNode.webidl", + "PaintRequest.webidl", + "PaintRequestList.webidl", + "PaintWorkletGlobalScope.webidl", + "PannerNode.webidl", + "ParentNode.webidl", + "PaymentAddress.webidl", + "PaymentMethodChangeEvent.webidl", + "PaymentRequest.webidl", + "PaymentRequestUpdateEvent.webidl", + "PaymentResponse.webidl", + "Performance.webidl", + "PerformanceEntry.webidl", + "PerformanceMark.webidl", + "PerformanceMeasure.webidl", + "PerformanceNavigation.webidl", + "PerformanceNavigationTiming.webidl", + "PerformanceObserver.webidl", + "PerformanceObserverEntryList.webidl", + "PerformanceResourceTiming.webidl", + "PerformanceServerTiming.webidl", + "PerformanceTiming.webidl", + "PeriodicWave.webidl", + "Permissions.webidl", + "PermissionStatus.webidl", + "Plugin.webidl", + "PluginArray.webidl", + "PointerEvent.webidl", + "Presentation.webidl", + "PresentationAvailability.webidl", + "PresentationConnection.webidl", + "PresentationConnectionList.webidl", + "PresentationReceiver.webidl", + "PresentationRequest.webidl", + "ProcessingInstruction.webidl", + "ProfileTimelineMarker.webidl", + "Promise.webidl", + "PushEvent.webidl", + "PushManager.webidl", + "PushManager.webidl", + "PushMessageData.webidl", + "PushSubscription.webidl", + "PushSubscriptionOptions.webidl", + "RadioNodeList.webidl", + "Range.webidl", + "ReferrerPolicy.webidl", + "Reporting.webidl", + "Request.webidl", + "ResizeObserver.webidl", + "Response.webidl", + "Sanitizer.webidl", + "Screen.webidl", + "ScreenOrientation.webidl", + "ScriptProcessorNode.webidl", + "ScrollAreaEvent.webidl", + "Selection.webidl", + "ServiceWorker.webidl", + "ServiceWorkerContainer.webidl", + "ServiceWorkerGlobalScope.webidl", + "ServiceWorkerRegistration.webidl", + "ShadowRoot.webidl", + "SharedWorker.webidl", + "SharedWorkerGlobalScope.webidl", + "SimpleGestureEvent.webidl", + "SocketCommon.webidl", + "SourceBuffer.webidl", + "SourceBufferList.webidl", + "StaticRange.webidl", + "StereoPannerNode.webidl", + "Storage.webidl", + "StorageEvent.webidl", + "StorageManager.webidl", + "StorageType.webidl", + "StreamFilter.webidl", + "StreamFilterDataEvent.webidl", + "StructuredCloneTester.webidl", + "StyleSheet.webidl", + "StyleSheetList.webidl", + "SubtleCrypto.webidl", + "SVGAElement.webidl", + "SVGAngle.webidl", + "SVGAnimatedAngle.webidl", + "SVGAnimatedBoolean.webidl", + "SVGAnimatedEnumeration.webidl", + "SVGAnimatedInteger.webidl", + "SVGAnimatedLength.webidl", + "SVGAnimatedLengthList.webidl", + "SVGAnimatedNumber.webidl", + "SVGAnimatedNumberList.webidl", + "SVGAnimatedPathData.webidl", + "SVGAnimatedPoints.webidl", + "SVGAnimatedPreserveAspectRatio.webidl", + "SVGAnimatedRect.webidl", + "SVGAnimatedString.webidl", + "SVGAnimatedTransformList.webidl", + "SVGAnimateElement.webidl", + "SVGAnimateMotionElement.webidl", + "SVGAnimateTransformElement.webidl", + "SVGAnimationElement.webidl", + "SVGCircleElement.webidl", + "SVGClipPathElement.webidl", + "SVGComponentTransferFunctionElement.webidl", + "SVGDefsElement.webidl", + "SVGDescElement.webidl", + "SVGElement.webidl", + "SVGEllipseElement.webidl", + "SVGFEBlendElement.webidl", + "SVGFEColorMatrixElement.webidl", + "SVGFEComponentTransferElement.webidl", + "SVGFECompositeElement.webidl", + "SVGFEConvolveMatrixElement.webidl", + "SVGFEDiffuseLightingElement.webidl", + "SVGFEDisplacementMapElement.webidl", + "SVGFEDistantLightElement.webidl", + "SVGFEDropShadowElement.webidl", + "SVGFEFloodElement.webidl", + "SVGFEFuncAElement.webidl", + "SVGFEFuncBElement.webidl", + "SVGFEFuncGElement.webidl", + "SVGFEFuncRElement.webidl", + "SVGFEGaussianBlurElement.webidl", + "SVGFEImageElement.webidl", + "SVGFEMergeElement.webidl", + "SVGFEMergeNodeElement.webidl", + "SVGFEMorphologyElement.webidl", + "SVGFEOffsetElement.webidl", + "SVGFEPointLightElement.webidl", + "SVGFESpecularLightingElement.webidl", + "SVGFESpotLightElement.webidl", + "SVGFETileElement.webidl", + "SVGFETurbulenceElement.webidl", + "SVGFilterElement.webidl", + "SVGFilterPrimitiveStandardAttributes.webidl", + "SVGFitToViewBox.webidl", + "SVGForeignObjectElement.webidl", + "SVGGElement.webidl", + "SVGGeometryElement.webidl", + "SVGGradientElement.webidl", + "SVGGraphicsElement.webidl", + "SVGImageElement.webidl", + "SVGLength.webidl", + "SVGLengthList.webidl", + "SVGLinearGradientElement.webidl", + "SVGLineElement.webidl", + "SVGMarkerElement.webidl", + "SVGMaskElement.webidl", + "SVGMatrix.webidl", + "SVGMetadataElement.webidl", + "SVGMPathElement.webidl", + "SVGNumber.webidl", + "SVGNumberList.webidl", + "SVGPathElement.webidl", + "SVGPathSeg.webidl", + "SVGPathSegList.webidl", + "SVGPatternElement.webidl", + "SVGPoint.webidl", + "SVGPointList.webidl", + "SVGPolygonElement.webidl", + "SVGPolylineElement.webidl", + "SVGPreserveAspectRatio.webidl", + "SVGRadialGradientElement.webidl", + "SVGRect.webidl", + "SVGRectElement.webidl", + "SVGScriptElement.webidl", + "SVGSetElement.webidl", + "SVGStopElement.webidl", + "SVGStringList.webidl", + "SVGStyleElement.webidl", + "SVGSVGElement.webidl", + "SVGSwitchElement.webidl", + "SVGSymbolElement.webidl", + "SVGTests.webidl", + "SVGTextContentElement.webidl", + "SVGTextElement.webidl", + "SVGTextPathElement.webidl", + "SVGTextPositioningElement.webidl", + "SVGTitleElement.webidl", + "SVGTransform.webidl", + "SVGTransformList.webidl", + "SVGTSpanElement.webidl", + "SVGUnitTypes.webidl", + "SVGURIReference.webidl", + "SVGUseElement.webidl", + "SVGViewElement.webidl", + "SVGZoomAndPan.webidl", + "TCPServerSocket.webidl", + "TCPServerSocketEvent.webidl", + "TCPSocket.webidl", + "TCPSocketErrorEvent.webidl", + "TCPSocketEvent.webidl", + "Text.webidl", + "TextClause.webidl", + "TextDecoder.webidl", + "TextEncoder.webidl", + "TextTrack.webidl", + "TextTrackCue.webidl", + "TextTrackCueList.webidl", + "TextTrackList.webidl", + "TimeEvent.webidl", + "TimeRanges.webidl", + "Touch.webidl", + "TouchEvent.webidl", + "TouchList.webidl", + "TransitionEvent.webidl", + "TreeColumn.webidl", + "TreeColumns.webidl", + "TreeContentView.webidl", + "TreeView.webidl", + "TreeWalker.webidl", + "U2F.webidl", + "UDPMessageEvent.webidl", + "UDPSocket.webidl", + "UIEvent.webidl", + "URL.webidl", + "URLSearchParams.webidl", + "ValidityState.webidl", + "VideoPlaybackQuality.webidl", + "VideoTrack.webidl", + "VideoTrackList.webidl", + "VisualViewport.webidl", + "VRDisplay.webidl", + "VRDisplayEvent.webidl", + "VRServiceTest.webidl", + "VTTCue.webidl", + "VTTRegion.webidl", + "WaveShaperNode.webidl", + "WebAuthentication.webidl", + "WebComponents.webidl", + "WebGL2RenderingContext.webidl", + "WebGLRenderingContext.webidl", + "WebGPU.webidl", + "WebSocket.webidl", + "WebXR.webidl", + "WheelEvent.webidl", + "WidevineCDMManifest.webidl", + "WindowOrWorkerGlobalScope.webidl", + "WindowRoot.webidl", + "Worker.webidl", + "WorkerDebuggerGlobalScope.webidl", + "WorkerGlobalScope.webidl", + "WorkerLocation.webidl", + "WorkerNavigator.webidl", + "Worklet.webidl", + "WorkletGlobalScope.webidl", + "XMLDocument.webidl", + "XMLHttpRequest.webidl", + "XMLHttpRequestEventTarget.webidl", + "XMLHttpRequestUpload.webidl", + "XMLSerializer.webidl", + "XPathEvaluator.webidl", + "XPathExpression.webidl", + "XPathNSResolver.webidl", + "XPathResult.webidl", + "XSLTProcessor.webidl", + "XULCommandEvent.webidl", + "XULElement.webidl", + "XULPopupElement.webidl", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: WEBIDL_FILES += [ - 'PeerConnectionImpl.webidl', - 'PeerConnectionObserver.webidl', - 'PeerConnectionObserverEnums.webidl', - 'RTCCertificate.webidl', - 'RTCConfiguration.webidl', - 'RTCDataChannel.webidl', - 'RTCDtlsTransport.webidl', - 'RTCDTMFSender.webidl', - 'RTCIceCandidate.webidl', - 'RTCIdentityAssertion.webidl', - 'RTCIdentityProvider.webidl', - 'RTCPeerConnection.webidl', - 'RTCPeerConnectionStatic.webidl', - 'RTCRtpReceiver.webidl', - 'RTCRtpSender.webidl', - 'RTCRtpSources.webidl', - 'RTCRtpTransceiver.webidl', - 'RTCSessionDescription.webidl', - 'RTCStatsReport.webidl', - 'TransceiverImpl.webidl', - 'WebrtcDeprecated.webidl', - 'WebrtcGlobalInformation.webidl', + "PeerConnectionImpl.webidl", + "PeerConnectionObserver.webidl", + "PeerConnectionObserverEnums.webidl", + "RTCCertificate.webidl", + "RTCConfiguration.webidl", + "RTCDataChannel.webidl", + "RTCDtlsTransport.webidl", + "RTCDTMFSender.webidl", + "RTCIceCandidate.webidl", + "RTCIdentityAssertion.webidl", + "RTCIdentityProvider.webidl", + "RTCPeerConnection.webidl", + "RTCPeerConnectionStatic.webidl", + "RTCRtpReceiver.webidl", + "RTCRtpSender.webidl", + "RTCRtpSources.webidl", + "RTCRtpTransceiver.webidl", + "RTCSessionDescription.webidl", + "RTCStatsReport.webidl", + "TransceiverImpl.webidl", + "WebrtcDeprecated.webidl", + "WebrtcGlobalInformation.webidl", ] -if CONFIG['MOZ_WEBSPEECH']: +if CONFIG["MOZ_WEBSPEECH"]: WEBIDL_FILES += [ - 'SpeechGrammar.webidl', - 'SpeechGrammarList.webidl', - 'SpeechRecognition.webidl', - 'SpeechRecognitionAlternative.webidl', - 'SpeechRecognitionError.webidl', - 'SpeechRecognitionEvent.webidl', - 'SpeechRecognitionResult.webidl', - 'SpeechRecognitionResultList.webidl', - 'SpeechSynthesis.webidl', - 'SpeechSynthesisErrorEvent.webidl', - 'SpeechSynthesisEvent.webidl', - 'SpeechSynthesisUtterance.webidl', - 'SpeechSynthesisVoice.webidl', + "SpeechGrammar.webidl", + "SpeechGrammarList.webidl", + "SpeechRecognition.webidl", + "SpeechRecognitionAlternative.webidl", + "SpeechRecognitionError.webidl", + "SpeechRecognitionEvent.webidl", + "SpeechRecognitionResult.webidl", + "SpeechRecognitionResultList.webidl", + "SpeechSynthesis.webidl", + "SpeechSynthesisErrorEvent.webidl", + "SpeechSynthesisEvent.webidl", + "SpeechSynthesisUtterance.webidl", + "SpeechSynthesisVoice.webidl", ] WEBIDL_FILES += [ - 'CloseEvent.webidl', - 'CustomEvent.webidl', - 'DeviceOrientationEvent.webidl', - 'HashChangeEvent.webidl', - 'PageTransitionEvent.webidl', - 'PopStateEvent.webidl', - 'PopupBlockedEvent.webidl', - 'ProgressEvent.webidl', - 'StyleSheetApplicableStateChangeEvent.webidl', + "CloseEvent.webidl", + "CustomEvent.webidl", + "DeviceOrientationEvent.webidl", + "HashChangeEvent.webidl", + "PageTransitionEvent.webidl", + "PopStateEvent.webidl", + "PopupBlockedEvent.webidl", + "ProgressEvent.webidl", + "StyleSheetApplicableStateChangeEvent.webidl", ] # We only expose our prefable test interfaces in debug builds, just to be on # the safe side. -if CONFIG['MOZ_DEBUG'] and CONFIG['ENABLE_TESTS']: - WEBIDL_FILES += ['TestFunctions.webidl', - 'TestInterfaceJS.webidl', - 'TestInterfaceJSDictionaries.webidl', - 'TestInterfaceJSMaplikeSetlikeIterable.webidl'] +if CONFIG["MOZ_DEBUG"] and CONFIG["ENABLE_TESTS"]: + WEBIDL_FILES += [ + "TestFunctions.webidl", + "TestInterfaceJS.webidl", + "TestInterfaceJSDictionaries.webidl", + "TestInterfaceJSMaplikeSetlikeIterable.webidl", + ] WEBIDL_FILES += [ - 'InstallTrigger.webidl', + "InstallTrigger.webidl", ] -if CONFIG['FUZZING']: +if CONFIG["FUZZING"]: WEBIDL_FILES += [ - 'FuzzingFunctions.webidl', + "FuzzingFunctions.webidl", ] GENERATED_EVENTS_WEBIDL_FILES = [ - 'AddonEvent.webidl', - 'AnimationPlaybackEvent.webidl', - 'BlobEvent.webidl', - 'CaretStateChangedEvent.webidl', - 'CloseEvent.webidl', - 'DeviceLightEvent.webidl', - 'DeviceOrientationEvent.webidl', - 'DeviceProximityEvent.webidl', - 'ErrorEvent.webidl', - 'FontFaceSetLoadEvent.webidl', - 'FormDataEvent.webidl', - 'FrameCrashedEvent.webidl', - 'GamepadAxisMoveEvent.webidl', - 'GamepadButtonEvent.webidl', - 'GamepadEvent.webidl', - 'GPUUncapturedErrorEvent.webidl', - 'HashChangeEvent.webidl', - 'HiddenPluginEvent.webidl', - 'ImageCaptureErrorEvent.webidl', - 'MediaQueryListEvent.webidl', - 'MediaRecorderErrorEvent.webidl', - 'MediaStreamEvent.webidl', - 'MediaStreamTrackEvent.webidl', - 'MIDIConnectionEvent.webidl', - 'OfflineAudioCompletionEvent.webidl', - 'PageTransitionEvent.webidl', - 'PerformanceEntryEvent.webidl', - 'PluginCrashedEvent.webidl', - 'PopStateEvent.webidl', - 'PopupBlockedEvent.webidl', - 'PopupPositionedEvent.webidl', - 'PositionStateEvent.webidl', - 'PresentationConnectionAvailableEvent.webidl', - 'PresentationConnectionCloseEvent.webidl', - 'ProgressEvent.webidl', - 'PromiseRejectionEvent.webidl', - 'ScrollViewChangeEvent.webidl', - 'SecurityPolicyViolationEvent.webidl', - 'StyleSheetApplicableStateChangeEvent.webidl', - 'SubmitEvent.webidl', - 'TCPServerSocketEvent.webidl', - 'TCPSocketErrorEvent.webidl', - 'TCPSocketEvent.webidl', - 'TrackEvent.webidl', - 'UDPMessageEvent.webidl', - 'UserProximityEvent.webidl', - 'WebGLContextEvent.webidl', - 'XRInputSourceEvent.webidl', - 'XRInputSourcesChangeEvent.webidl', - 'XRReferenceSpaceEvent.webidl', - 'XRSessionEvent.webidl', + "AddonEvent.webidl", + "AnimationPlaybackEvent.webidl", + "BlobEvent.webidl", + "CaretStateChangedEvent.webidl", + "CloseEvent.webidl", + "DeviceLightEvent.webidl", + "DeviceOrientationEvent.webidl", + "DeviceProximityEvent.webidl", + "ErrorEvent.webidl", + "FontFaceSetLoadEvent.webidl", + "FormDataEvent.webidl", + "FrameCrashedEvent.webidl", + "GamepadAxisMoveEvent.webidl", + "GamepadButtonEvent.webidl", + "GamepadEvent.webidl", + "GPUUncapturedErrorEvent.webidl", + "HashChangeEvent.webidl", + "HiddenPluginEvent.webidl", + "ImageCaptureErrorEvent.webidl", + "MediaQueryListEvent.webidl", + "MediaRecorderErrorEvent.webidl", + "MediaStreamEvent.webidl", + "MediaStreamTrackEvent.webidl", + "MIDIConnectionEvent.webidl", + "OfflineAudioCompletionEvent.webidl", + "PageTransitionEvent.webidl", + "PerformanceEntryEvent.webidl", + "PluginCrashedEvent.webidl", + "PopStateEvent.webidl", + "PopupBlockedEvent.webidl", + "PopupPositionedEvent.webidl", + "PositionStateEvent.webidl", + "PresentationConnectionAvailableEvent.webidl", + "PresentationConnectionCloseEvent.webidl", + "ProgressEvent.webidl", + "PromiseRejectionEvent.webidl", + "ScrollViewChangeEvent.webidl", + "SecurityPolicyViolationEvent.webidl", + "StyleSheetApplicableStateChangeEvent.webidl", + "SubmitEvent.webidl", + "TCPServerSocketEvent.webidl", + "TCPSocketErrorEvent.webidl", + "TCPSocketEvent.webidl", + "TrackEvent.webidl", + "UDPMessageEvent.webidl", + "UserProximityEvent.webidl", + "WebGLContextEvent.webidl", + "XRInputSourceEvent.webidl", + "XRInputSourcesChangeEvent.webidl", + "XRReferenceSpaceEvent.webidl", + "XRSessionEvent.webidl", ] -if CONFIG['MOZ_WEBRTC']: +if CONFIG["MOZ_WEBRTC"]: GENERATED_EVENTS_WEBIDL_FILES += [ - 'RTCDataChannelEvent.webidl', - 'RTCDTMFToneChangeEvent.webidl', - 'RTCPeerConnectionIceEvent.webidl', - 'RTCTrackEvent.webidl', + "RTCDataChannelEvent.webidl", + "RTCDTMFToneChangeEvent.webidl", + "RTCPeerConnectionIceEvent.webidl", + "RTCTrackEvent.webidl", ] -if CONFIG['MOZ_WEBSPEECH']: +if CONFIG["MOZ_WEBSPEECH"]: GENERATED_EVENTS_WEBIDL_FILES += [ - 'SpeechRecognitionEvent.webidl', - 'SpeechSynthesisErrorEvent.webidl', - 'SpeechSynthesisEvent.webidl', + "SpeechRecognitionEvent.webidl", + "SpeechSynthesisErrorEvent.webidl", + "SpeechSynthesisEvent.webidl", ] -if CONFIG['MOZ_BUILD_APP'] in ['browser', 'comm/mail', 'mobile/android', 'xulrunner']: +if CONFIG["MOZ_BUILD_APP"] in ["browser", "comm/mail", "mobile/android", "xulrunner"]: WEBIDL_FILES += [ - 'External.webidl', + "External.webidl", ] -if CONFIG['ACCESSIBILITY']: +if CONFIG["ACCESSIBILITY"]: WEBIDL_FILES += [ - 'AccessibleNode.webidl', + "AccessibleNode.webidl", ] diff --git a/dom/webshare/moz.build b/dom/webshare/moz.build index 42720eb7fde744..efb701c0f77142 100644 --- a/dom/webshare/moz.build +++ b/dom/webshare/moz.build @@ -7,5 +7,4 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") -MOCHITEST_MANIFESTS += ['test/mochitest/mochitest.ini'] - +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] diff --git a/dom/websocket/moz.build b/dom/websocket/moz.build index 45fa8bc1f64340..fbd2399e428b1a 100644 --- a/dom/websocket/moz.build +++ b/dom/websocket/moz.build @@ -8,26 +8,26 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Networking") EXPORTS.mozilla.dom += [ - 'WebSocket.h', + "WebSocket.h", ] UNIFIED_SOURCES += [ - 'WebSocket.cpp', + "WebSocket.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] MOCHITEST_CHROME_MANIFESTS += [ - 'tests/chrome.ini', + "tests/chrome.ini", ] MOCHITEST_MANIFESTS += [ - 'tests/mochitest.ini', - 'tests/websocket_hybi/mochitest.ini', + "tests/mochitest.ini", + "tests/websocket_hybi/mochitest.ini", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/websocket/tests/file_websocket_basic_wsh.py b/dom/websocket/tests/file_websocket_basic_wsh.py index 0a93731f59d631..050e7ccc106ccb 100644 --- a/dom/websocket/tests/file_websocket_basic_wsh.py +++ b/dom/websocket/tests/file_websocket_basic_wsh.py @@ -5,27 +5,27 @@ def web_socket_do_extra_handshake(request): # must set request.ws_protocol to the selected version from ws_requested_protocols request.ws_protocol = request.ws_requested_protocols[0] - if (request.ws_protocol == 'error'): - raise ValueError('Error') + if request.ws_protocol == "error": + raise ValueError("Error") pass def web_socket_transfer_data(request): while True: line = msgutil.receive_message(request) - if line == 'protocol': + if line == "protocol": msgutil.send_message(request, request.ws_protocol) continue - if line == 'resource': + if line == "resource": msgutil.send_message(request, request.ws_resource) continue - if line == 'origin': + if line == "origin": msgutil.send_message(request, request.ws_origin) continue msgutil.send_message(request, line) - if line == 'end': + if line == "end": return diff --git a/dom/websocket/tests/file_websocket_permessage_deflate_disabled_wsh.py b/dom/websocket/tests/file_websocket_permessage_deflate_disabled_wsh.py index 81e4a1f879a4e1..c588ebc77cf0f9 100644 --- a/dom/websocket/tests/file_websocket_permessage_deflate_disabled_wsh.py +++ b/dom/websocket/tests/file_websocket_permessage_deflate_disabled_wsh.py @@ -6,14 +6,14 @@ def web_socket_do_extra_handshake(request): if request.ws_requested_extensions is not None: for extension_request in request.ws_requested_extensions: if extension_request.name() == "permessage-deflate": - raise ValueError('permessage-deflate should not be offered') + raise ValueError("permessage-deflate should not be offered") def web_socket_transfer_data(request): while True: rcvd = msgutil.receive_message(request) opcode = request.ws_stream.get_last_received_opcode() - if (opcode == common.OPCODE_BINARY): + if opcode == common.OPCODE_BINARY: msgutil.send_message(request, rcvd, binary=True) - elif (opcode == common.OPCODE_TEXT): + elif opcode == common.OPCODE_TEXT: msgutil.send_message(request, rcvd) diff --git a/dom/websocket/tests/file_websocket_permessage_deflate_params_wsh.py b/dom/websocket/tests/file_websocket_permessage_deflate_params_wsh.py index cb079df4d76a06..838756964539b7 100644 --- a/dom/websocket/tests/file_websocket_permessage_deflate_params_wsh.py +++ b/dom/websocket/tests/file_websocket_permessage_deflate_params_wsh.py @@ -12,14 +12,14 @@ def web_socket_do_extra_handshake(request): deflate_found = True if deflate_found is False: - raise ValueError('deflate extension processor not found') + raise ValueError("deflate extension processor not found") def web_socket_transfer_data(request): while True: rcvd = msgutil.receive_message(request) opcode = request.ws_stream.get_last_received_opcode() - if (opcode == common.OPCODE_BINARY): + if opcode == common.OPCODE_BINARY: msgutil.send_message(request, rcvd, binary=True) - elif (opcode == common.OPCODE_TEXT): + elif opcode == common.OPCODE_TEXT: msgutil.send_message(request, rcvd) diff --git a/dom/websocket/tests/file_websocket_permessage_deflate_rejected_wsh.py b/dom/websocket/tests/file_websocket_permessage_deflate_rejected_wsh.py index 7463b70c8522ec..e0a99467f3002d 100644 --- a/dom/websocket/tests/file_websocket_permessage_deflate_rejected_wsh.py +++ b/dom/websocket/tests/file_websocket_permessage_deflate_rejected_wsh.py @@ -12,14 +12,14 @@ def web_socket_do_extra_handshake(request): deflate_removed = True if deflate_removed is False: - raise ValueError('deflate extension processor not found') + raise ValueError("deflate extension processor not found") def web_socket_transfer_data(request): while True: rcvd = msgutil.receive_message(request) opcode = request.ws_stream.get_last_received_opcode() - if (opcode == common.OPCODE_BINARY): + if opcode == common.OPCODE_BINARY: msgutil.send_message(request, rcvd, binary=True) - elif (opcode == common.OPCODE_TEXT): + elif opcode == common.OPCODE_TEXT: msgutil.send_message(request, rcvd) diff --git a/dom/websocket/tests/file_websocket_permessage_deflate_wsh.py b/dom/websocket/tests/file_websocket_permessage_deflate_wsh.py index 22efecd11dd754..8075829481d1ca 100644 --- a/dom/websocket/tests/file_websocket_permessage_deflate_wsh.py +++ b/dom/websocket/tests/file_websocket_permessage_deflate_wsh.py @@ -11,14 +11,14 @@ def web_socket_do_extra_handshake(request): pmce_offered = True if pmce_offered is False: - raise ValueError('permessage-deflate not offered') + raise ValueError("permessage-deflate not offered") def web_socket_transfer_data(request): while True: rcvd = msgutil.receive_message(request) opcode = request.ws_stream.get_last_received_opcode() - if (opcode == common.OPCODE_BINARY): + if opcode == common.OPCODE_BINARY: msgutil.send_message(request, rcvd, binary=True) - elif (opcode == common.OPCODE_TEXT): + elif opcode == common.OPCODE_TEXT: msgutil.send_message(request, rcvd) diff --git a/dom/websocket/tests/file_websocket_wsh.py b/dom/websocket/tests/file_websocket_wsh.py index 1ea45fee4af49c..b9301276c00875 100644 --- a/dom/websocket/tests/file_websocket_wsh.py +++ b/dom/websocket/tests/file_websocket_wsh.py @@ -20,7 +20,7 @@ def web_socket_do_extra_handshake(request): elif request.ws_protocol == "test-10": time.sleep(3) elif request.ws_protocol == "test-19": - raise ValueError('Aborting (test-19)') + raise ValueError("Aborting (test-19)") elif request.ws_protocol == "test-20" or request.ws_protocol == "test-17": time.sleep(3) elif request.ws_protocol == "test-22": @@ -34,6 +34,7 @@ def web_socket_do_extra_handshake(request): else: pass + # Behave according to recommendation of RFC 6455, section # 5.5.1: # "When sending a Close frame in response, the endpoint typically echos the # status code it received." @@ -55,15 +56,15 @@ def web_socket_transfer_data(request): resp = "wrong message" if msgutil.receive_message(request) == "1": resp = "2" - msgutil.send_message(request, resp.decode('utf-8')) + msgutil.send_message(request, resp.decode("utf-8")) resp = "wrong message" if msgutil.receive_message(request) == "3": resp = "4" - msgutil.send_message(request, resp.decode('utf-8')) + msgutil.send_message(request, resp.decode("utf-8")) resp = "wrong message" if msgutil.receive_message(request) == "5": resp = "あいうえお" - msgutil.send_message(request, resp.decode('utf-8')) + msgutil.send_message(request, resp.decode("utf-8")) msgutil.close_connection(request) elif request.ws_protocol == "test-7": msgutil.send_message(request, "test-7 data") @@ -73,20 +74,22 @@ def web_socket_transfer_data(request): resp = "wrong message" if msgutil.receive_message(request) == "client data": resp = "server data" - msgutil.send_message(request, resp.decode('utf-8')) + msgutil.send_message(request, resp.decode("utf-8")) elif request.ws_protocol == "test-12": msg = msgutil.receive_message(request) - if msg == u'a\ufffdb': + if msg == u"a\ufffdb": # converted unpaired surrogate in UTF-16 to UTF-8 OK msgutil.send_message(request, "SUCCESS") else: - msgutil.send_message(request, "FAIL got '" + msg - + "' instead of string with replacement char'") + msgutil.send_message( + request, + "FAIL got '" + msg + "' instead of string with replacement char'", + ) elif request.ws_protocol == "test-13": # first one binary message containing the byte 0x61 ('a') - request.connection.write('\xff\x01\x61') + request.connection.write("\xff\x01\x61") # after a bad utf8 message - request.connection.write('\x01\x61\xff') + request.connection.write("\x01\x61\xff") msgutil.close_connection(request) elif request.ws_protocol == "test-14": msgutil.close_connection(request) @@ -105,7 +108,7 @@ def web_socket_transfer_data(request): resp = "wrong message" if msgutil.receive_message(request) == "client data": resp = "server data" - msgutil.send_message(request, resp.decode('utf-8')) + msgutil.send_message(request, resp.decode("utf-8")) time.sleep(2) msgutil.close_connection(request) elif request.ws_protocol == "test-20": @@ -133,17 +136,13 @@ def web_socket_transfer_data(request): request.ws_stream.close_connection(test37code, test37reason) elif request.ws_protocol == "test-42": # Echo back 3 messages - msgutil.send_message(request, - msgutil.receive_message(request)) - msgutil.send_message(request, - msgutil.receive_message(request)) - msgutil.send_message(request, - msgutil.receive_message(request)) + msgutil.send_message(request, msgutil.receive_message(request)) + msgutil.send_message(request, msgutil.receive_message(request)) + msgutil.send_message(request, msgutil.receive_message(request)) elif request.ws_protocol == "test-44": rcv = msgutil.receive_message(request) # check we received correct binary msg - if len(rcv) == 3 \ - and ord(rcv[0]) == 5 and ord(rcv[1]) == 0 and ord(rcv[2]) == 7: + if len(rcv) == 3 and ord(rcv[0]) == 5 and ord(rcv[1]) == 0 and ord(rcv[2]) == 7: # reply with binary msg 0x04 msgutil.send_message(request, struct.pack("cc", chr(0), chr(4)), True, True) else: @@ -155,7 +154,9 @@ def web_socket_transfer_data(request): # send back same blob as binary msg msgutil.send_message(request, rcv, True, True) else: - msgutil.send_message(request, "incorrect binary msg received: '" + rcv + "'") + msgutil.send_message( + request, "incorrect binary msg received: '" + rcv + "'" + ) elif request.ws_protocol == "test-46": msgutil.send_message(request, "client must drop this if close was called") diff --git a/dom/websocket/tests/websocket_hybi/file_binary-frames_wsh.py b/dom/websocket/tests/websocket_hybi/file_binary-frames_wsh.py index 2f9f379726b4fe..830367e846708f 100644 --- a/dom/websocket/tests/websocket_hybi/file_binary-frames_wsh.py +++ b/dom/websocket/tests/websocket_hybi/file_binary-frames_wsh.py @@ -7,7 +7,7 @@ def web_socket_do_extra_handshake(request): def web_socket_transfer_data(request): - messages_to_send = ['Hello, world!', '', all_distinct_bytes()] + messages_to_send = ["Hello, world!", "", all_distinct_bytes()] for message in messages_to_send: # FIXME: Should use better API to send binary messages when pywebsocket supports it. header = stream.create_header(common.OPCODE_BINARY, len(message), 1, 0, 0, 0, 0) @@ -15,4 +15,4 @@ def web_socket_transfer_data(request): def all_distinct_bytes(): - return ''.join([chr(i) for i in range(256)]) + return "".join([chr(i) for i in range(256)]) diff --git a/dom/websocket/tests/websocket_hybi/file_check-binary-messages_wsh.py b/dom/websocket/tests/websocket_hybi/file_check-binary-messages_wsh.py index 62122024c41bfa..1d5bc49dfa2251 100644 --- a/dom/websocket/tests/websocket_hybi/file_check-binary-messages_wsh.py +++ b/dom/websocket/tests/websocket_hybi/file_check-binary-messages_wsh.py @@ -6,16 +6,20 @@ def web_socket_do_extra_handshake(request): def web_socket_transfer_data(request): - expected_messages = ['Hello, world!', '', all_distinct_bytes()] + expected_messages = ["Hello, world!", "", all_distinct_bytes()] for test_number, expected_message in enumerate(expected_messages): message = msgutil.receive_message(request) if type(message) == str and message == expected_message: - msgutil.send_message(request, 'PASS: Message #{:d}.'.format(test_number)) + msgutil.send_message(request, "PASS: Message #{:d}.".format(test_number)) else: - msgutil.send_message(request, 'FAIL: Message #{:d}: Received unexpected message: {!r}' - .format(test_number, message)) + msgutil.send_message( + request, + "FAIL: Message #{:d}: Received unexpected message: {!r}".format( + test_number, message + ), + ) def all_distinct_bytes(): - return ''.join([chr(i) for i in range(256)]) + return "".join([chr(i) for i in range(256)]) diff --git a/dom/workers/moz.build b/dom/workers/moz.build index f7ab879db3a3e7..0aa4dacff95934 100644 --- a/dom/workers/moz.build +++ b/dom/workers/moz.build @@ -7,100 +7,100 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Workers") -DIRS += ['remoteworkers', 'sharedworkers'] +DIRS += ["remoteworkers", "sharedworkers"] # Public stuff. EXPORTS.mozilla.dom += [ - 'ChromeWorker.h', - 'JSExecutionManager.h', - 'Worker.h', - 'WorkerCommon.h', - 'WorkerDebugger.h', - 'WorkerDebuggerManager.h', - 'WorkerError.h', - 'WorkerLoadInfo.h', - 'WorkerLocation.h', - 'WorkerNavigator.h', - 'WorkerPrivate.h', - 'WorkerRef.h', - 'WorkerRunnable.h', - 'WorkerScope.h', - 'WorkerStatus.h', + "ChromeWorker.h", + "JSExecutionManager.h", + "Worker.h", + "WorkerCommon.h", + "WorkerDebugger.h", + "WorkerDebuggerManager.h", + "WorkerError.h", + "WorkerLoadInfo.h", + "WorkerLocation.h", + "WorkerNavigator.h", + "WorkerPrivate.h", + "WorkerRef.h", + "WorkerRunnable.h", + "WorkerScope.h", + "WorkerStatus.h", ] # Private stuff. EXPORTS.mozilla.dom.workerinternals += [ - 'JSSettings.h', - 'Queue.h', - 'RuntimeService.h', - 'ScriptLoader.h', + "JSSettings.h", + "Queue.h", + "RuntimeService.h", + "ScriptLoader.h", ] -XPIDL_MODULE = 'dom_workers' +XPIDL_MODULE = "dom_workers" XPIDL_SOURCES += [ - 'nsIWorkerDebugger.idl', - 'nsIWorkerDebuggerManager.idl', + "nsIWorkerDebugger.idl", + "nsIWorkerDebuggerManager.idl", ] UNIFIED_SOURCES += [ - 'ChromeWorker.cpp', - 'ChromeWorkerScope.cpp', - 'JSExecutionManager.cpp', - 'MessageEventRunnable.cpp', - 'Principal.cpp', - 'RegisterBindings.cpp', - 'RuntimeService.cpp', - 'ScriptLoader.cpp', - 'Worker.cpp', - 'WorkerCSPEventListener.cpp', - 'WorkerDebugger.cpp', - 'WorkerDebuggerManager.cpp', - 'WorkerError.cpp', - 'WorkerEventTarget.cpp', - 'WorkerLoadInfo.cpp', - 'WorkerLocation.cpp', - 'WorkerNavigator.cpp', - 'WorkerPrivate.cpp', - 'WorkerRef.cpp', - 'WorkerRunnable.cpp', - 'WorkerScope.cpp', - 'WorkerThread.cpp', + "ChromeWorker.cpp", + "ChromeWorkerScope.cpp", + "JSExecutionManager.cpp", + "MessageEventRunnable.cpp", + "Principal.cpp", + "RegisterBindings.cpp", + "RuntimeService.cpp", + "ScriptLoader.cpp", + "Worker.cpp", + "WorkerCSPEventListener.cpp", + "WorkerDebugger.cpp", + "WorkerDebuggerManager.cpp", + "WorkerError.cpp", + "WorkerEventTarget.cpp", + "WorkerLoadInfo.cpp", + "WorkerLocation.cpp", + "WorkerNavigator.cpp", + "WorkerPrivate.cpp", + "WorkerRef.cpp", + "WorkerRunnable.cpp", + "WorkerScope.cpp", + "WorkerThread.cpp", ] LOCAL_INCLUDES += [ - '/caps', - '/dom/base', - '/dom/bindings', - '/dom/system', - '/dom/workers/remoteworkers', - '/js/xpconnect/loader', - '/netwerk/base', - '/xpcom/build', - '/xpcom/threads', + "/caps", + "/dom/base", + "/dom/bindings", + "/dom/system", + "/dom/workers/remoteworkers", + "/js/xpconnect/loader", + "/netwerk/base", + "/xpcom/build", + "/xpcom/threads", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": LOCAL_INCLUDES += [ - '/xpcom/base', + "/xpcom/base", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" MOCHITEST_MANIFESTS += [ - 'test/mochitest.ini', + "test/mochitest.ini", ] MOCHITEST_CHROME_MANIFESTS += [ - 'test/chrome.ini', + "test/chrome.ini", ] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell/xpcshell.ini"] -BROWSER_CHROME_MANIFESTS += ['test/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser.ini"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/workers/remoteworkers/moz.build b/dom/workers/remoteworkers/moz.build index a62533356f6edc..9983b7dd1015c1 100644 --- a/dom/workers/remoteworkers/moz.build +++ b/dom/workers/remoteworkers/moz.build @@ -5,41 +5,41 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'RemoteWorkerChild.h', - 'RemoteWorkerController.h', - 'RemoteWorkerControllerChild.h', - 'RemoteWorkerControllerParent.h', - 'RemoteWorkerManager.h', - 'RemoteWorkerParent.h', - 'RemoteWorkerService.h', - 'RemoteWorkerServiceChild.h', - 'RemoteWorkerServiceParent.h', + "RemoteWorkerChild.h", + "RemoteWorkerController.h", + "RemoteWorkerControllerChild.h", + "RemoteWorkerControllerParent.h", + "RemoteWorkerManager.h", + "RemoteWorkerParent.h", + "RemoteWorkerService.h", + "RemoteWorkerServiceChild.h", + "RemoteWorkerServiceParent.h", ] UNIFIED_SOURCES += [ - 'RemoteWorkerChild.cpp', - 'RemoteWorkerController.cpp', - 'RemoteWorkerControllerChild.cpp', - 'RemoteWorkerControllerParent.cpp', - 'RemoteWorkerManager.cpp', - 'RemoteWorkerParent.cpp', - 'RemoteWorkerService.cpp', - 'RemoteWorkerServiceChild.cpp', - 'RemoteWorkerServiceParent.cpp', + "RemoteWorkerChild.cpp", + "RemoteWorkerController.cpp", + "RemoteWorkerControllerChild.cpp", + "RemoteWorkerControllerParent.cpp", + "RemoteWorkerManager.cpp", + "RemoteWorkerParent.cpp", + "RemoteWorkerService.cpp", + "RemoteWorkerServiceChild.cpp", + "RemoteWorkerServiceParent.cpp", ] LOCAL_INCLUDES += [ - '/dom/serviceworkers', - '/xpcom/build', + "/dom/serviceworkers", + "/xpcom/build", ] IPDL_SOURCES += [ - 'PRemoteWorker.ipdl', - 'PRemoteWorkerController.ipdl', - 'PRemoteWorkerService.ipdl', - 'RemoteWorkerTypes.ipdlh', + "PRemoteWorker.ipdl", + "PRemoteWorkerController.ipdl", + "PRemoteWorkerService.ipdl", + "RemoteWorkerTypes.ipdlh", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/workers/sharedworkers/moz.build b/dom/workers/sharedworkers/moz.build index 95d3a71e725285..2b83bc9525a8c0 100644 --- a/dom/workers/sharedworkers/moz.build +++ b/dom/workers/sharedworkers/moz.build @@ -5,24 +5,24 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'SharedWorker.h', - 'SharedWorkerChild.h', - 'SharedWorkerManager.h', - 'SharedWorkerParent.h', + "SharedWorker.h", + "SharedWorkerChild.h", + "SharedWorkerManager.h", + "SharedWorkerParent.h", ] UNIFIED_SOURCES += [ - 'SharedWorker.cpp', - 'SharedWorkerChild.cpp', - 'SharedWorkerManager.cpp', - 'SharedWorkerParent.cpp', - 'SharedWorkerService.cpp', + "SharedWorker.cpp", + "SharedWorkerChild.cpp", + "SharedWorkerManager.cpp", + "SharedWorkerParent.cpp", + "SharedWorkerService.cpp", ] IPDL_SOURCES += [ - 'PSharedWorker.ipdl', + "PSharedWorker.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/workers/test/marionette/test_service_workers_at_startup.py b/dom/workers/test/marionette/test_service_workers_at_startup.py index dcc6559579396d..4496c7408c4f61 100644 --- a/dom/workers/test/marionette/test_service_workers_at_startup.py +++ b/dom/workers/test/marionette/test_service_workers_at_startup.py @@ -18,11 +18,13 @@ def tearDown(self): super(ServiceWorkerAtStartupTestCase, self).tearDown() def install_service_worker(self): - install_url = self.marionette.absolute_url("serviceworker/install_serviceworker.html") + install_url = self.marionette.absolute_url( + "serviceworker/install_serviceworker.html" + ) self.marionette.navigate(install_url) Wait(self.marionette).until( lambda _: self.is_service_worker_registered, - message="Wait the service worker to be installed" + message="Wait the service worker to be installed", ) def test_registered_service_worker_after_restart(self): @@ -31,7 +33,7 @@ def test_registered_service_worker_after_restart(self): # (Bug 1665184). Wait(self.marionette, timeout=10).until( lambda _: self.profile_serviceworker_txt_exists, - message="Wait service workers to be stored in the profile" + message="Wait service workers to be stored in the profile", ) # Quit and start a new session to simulate a full browser restart @@ -44,7 +46,7 @@ def test_registered_service_worker_after_restart(self): Wait(self.marionette).until( lambda _: self.is_service_worker_registered, - message="Wait the service worker to be registered after restart" + message="Wait the service worker to be registered after restart", ) self.assertTrue(self.is_service_worker_registered) @@ -55,7 +57,8 @@ def profile_serviceworker_txt_exists(self): @property def is_service_worker_registered(self): with self.marionette.using_context("chrome"): - return self.marionette.execute_script(""" + return self.marionette.execute_script( + """ let swm = Cc["@mozilla.org/serviceworkers/manager;1"].getService( Ci.nsIServiceWorkerManager ); @@ -74,4 +77,6 @@ def is_service_worker_registered(self): } } return false; - """, script_args=(self.marionette.absolute_url(""),)) + """, + script_args=(self.marionette.absolute_url(""),), + ) diff --git a/dom/worklet/moz.build b/dom/worklet/moz.build index dc427cab538419..706921ff53eed9 100644 --- a/dom/worklet/moz.build +++ b/dom/worklet/moz.build @@ -8,27 +8,27 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Core & HTML") EXPORTS.mozilla.dom += [ - 'Worklet.h', - 'WorkletGlobalScope.h', - 'WorkletImpl.h', - 'WorkletPrincipals.h', - 'WorkletThread.h', + "Worklet.h", + "WorkletGlobalScope.h", + "WorkletImpl.h", + "WorkletPrincipals.h", + "WorkletThread.h", ] UNIFIED_SOURCES += [ - 'Worklet.cpp', - 'WorkletGlobalScope.cpp', - 'WorkletImpl.cpp', - 'WorkletPrincipals.cpp', - 'WorkletThread.cpp', + "Worklet.cpp", + "WorkletGlobalScope.cpp", + "WorkletImpl.cpp", + "WorkletPrincipals.cpp", + "WorkletThread.cpp", ] LOCAL_INCLUDES += [ - '/js/xpconnect/src', + "/js/xpconnect/src", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/xhr/moz.build b/dom/xhr/moz.build index bef29099c28572..1c9b5c63fb53e0 100644 --- a/dom/xhr/moz.build +++ b/dom/xhr/moz.build @@ -8,32 +8,32 @@ with Files("**"): BUG_COMPONENT = ("Core", "DOM: Networking") EXPORTS.mozilla.dom += [ - 'XMLHttpRequest.h', - 'XMLHttpRequestEventTarget.h', - 'XMLHttpRequestMainThread.h', - 'XMLHttpRequestString.h', - 'XMLHttpRequestUpload.h', + "XMLHttpRequest.h", + "XMLHttpRequestEventTarget.h", + "XMLHttpRequestMainThread.h", + "XMLHttpRequestString.h", + "XMLHttpRequestUpload.h", ] UNIFIED_SOURCES += [ - 'XMLHttpRequest.cpp', - 'XMLHttpRequestEventTarget.cpp', - 'XMLHttpRequestMainThread.cpp', - 'XMLHttpRequestString.cpp', - 'XMLHttpRequestUpload.cpp', - 'XMLHttpRequestWorker.cpp', + "XMLHttpRequest.cpp", + "XMLHttpRequestEventTarget.cpp", + "XMLHttpRequestMainThread.cpp", + "XMLHttpRequestString.cpp", + "XMLHttpRequestUpload.cpp", + "XMLHttpRequestWorker.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/file', - '/netwerk/base', + "/dom/base", + "/dom/file", + "/netwerk/base", ] -MOCHITEST_MANIFESTS += [ 'tests/mochitest.ini' ] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] -BROWSER_CHROME_MANIFESTS += [ 'tests/browser.ini' ] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/xml/moz.build b/dom/xml/moz.build index 000038d7408cac..6b9a2c5576f3a5 100644 --- a/dom/xml/moz.build +++ b/dom/xml/moz.build @@ -7,44 +7,44 @@ with Files("**"): BUG_COMPONENT = ("Core", "XML") -DIRS += ['resources'] +DIRS += ["resources"] -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] EXPORTS += [ - 'nsIXMLContentSink.h', + "nsIXMLContentSink.h", ] EXPORTS.mozilla.dom += [ - 'CDATASection.h', - 'nsXMLElement.h', - 'ProcessingInstruction.h', - 'XMLDocument.h', - 'XMLStylesheetProcessingInstruction.h', + "CDATASection.h", + "nsXMLElement.h", + "ProcessingInstruction.h", + "XMLDocument.h", + "XMLStylesheetProcessingInstruction.h", ] UNIFIED_SOURCES += [ - 'CDATASection.cpp', - 'nsXMLContentSink.cpp', - 'nsXMLElement.cpp', - 'nsXMLFragmentContentSink.cpp', - 'nsXMLPrettyPrinter.cpp', - 'ProcessingInstruction.cpp', - 'XMLDocument.cpp', - 'XMLStylesheetProcessingInstruction.cpp', + "CDATASection.cpp", + "nsXMLContentSink.cpp", + "nsXMLElement.cpp", + "nsXMLFragmentContentSink.cpp", + "nsXMLPrettyPrinter.cpp", + "ProcessingInstruction.cpp", + "XMLDocument.cpp", + "XMLStylesheetProcessingInstruction.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/caps', - '/dom/base', - '/dom/html', - '/dom/xslt/base', - '/dom/xul', - '/layout/style', + "/caps", + "/dom/base", + "/dom/html", + "/dom/xslt/base", + "/dom/xul", + "/layout/style", ] RESOURCE_FILES.dtd += [ - 'htmlmathml-f.ent', + "htmlmathml-f.ent", ] diff --git a/dom/xml/resources/moz.build b/dom/xml/resources/moz.build index eb4454d28f88bf..d988c0ff9b162c 100644 --- a/dom/xml/resources/moz.build +++ b/dom/xml/resources/moz.build @@ -4,4 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -JAR_MANIFESTS += ['jar.mn'] \ No newline at end of file +JAR_MANIFESTS += ["jar.mn"] diff --git a/dom/xslt/base/moz.build b/dom/xslt/base/moz.build index 701e1cc3fe0817..f3f5d67e41bfe3 100644 --- a/dom/xslt/base/moz.build +++ b/dom/xslt/base/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'txDouble.cpp', - 'txExpandedName.cpp', - 'txExpandedNameMap.cpp', - 'txList.cpp', - 'txNamespaceMap.cpp', - 'txURIUtils.cpp', + "txDouble.cpp", + "txExpandedName.cpp", + "txExpandedNameMap.cpp", + "txList.cpp", + "txNamespaceMap.cpp", + "txURIUtils.cpp", ] LOCAL_INCLUDES += [ - '..', - '../xml', - '../xpath', - '../xslt', + "..", + "../xml", + "../xpath", + "../xslt", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/xslt/moz.build b/dom/xslt/moz.build index c9b5839c95fa0e..034a58ecc6cadb 100644 --- a/dom/xslt/moz.build +++ b/dom/xslt/moz.build @@ -8,15 +8,15 @@ with Files("**"): BUG_COMPONENT = ("Core", "XSLT") EXPORTS += [ - 'nsIDocumentTransformer.h', + "nsIDocumentTransformer.h", ] DIRS += [ - 'base', - 'xml', - 'xpath', - 'xslt', + "base", + "xml", + "xpath", + "xslt", ] -if CONFIG['ENABLE_TESTS']: - MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] +if CONFIG["ENABLE_TESTS"]: + MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] diff --git a/dom/xslt/xml/moz.build b/dom/xslt/xml/moz.build index f20a27bba3492f..a8f33bd35038e6 100644 --- a/dom/xslt/xml/moz.build +++ b/dom/xslt/xml/moz.build @@ -5,15 +5,15 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'txXMLParser.cpp', - 'txXMLUtils.cpp', + "txXMLParser.cpp", + "txXMLUtils.cpp", ] LOCAL_INCLUDES += [ - '../base', - '../xpath', - '../xslt', - '/dom/base', + "../base", + "../xpath", + "../xslt", + "/dom/base", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/dom/xslt/xpath/moz.build b/dom/xslt/xpath/moz.build index 664d3dbc16808e..483a7d25959916 100644 --- a/dom/xslt/xpath/moz.build +++ b/dom/xslt/xpath/moz.build @@ -5,57 +5,57 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.dom += [ - 'txIXPathContext.h', - 'XPathEvaluator.h', - 'XPathExpression.h', - 'XPathResult.h', + "txIXPathContext.h", + "XPathEvaluator.h", + "XPathExpression.h", + "XPathResult.h", ] UNIFIED_SOURCES += [ - 'txBooleanExpr.cpp', - 'txBooleanResult.cpp', - 'txCoreFunctionCall.cpp', - 'txErrorExpr.cpp', - 'txExpr.cpp', - 'txExprLexer.cpp', - 'txExprParser.cpp', - 'txFilterExpr.cpp', - 'txForwardContext.cpp', - 'txFunctionCall.cpp', - 'txLiteralExpr.cpp', - 'txLocationStep.cpp', - 'txMozillaXPathTreeWalker.cpp', - 'txNamedAttributeStep.cpp', - 'txNameTest.cpp', - 'txNodeSet.cpp', - 'txNodeSetContext.cpp', - 'txNodeTypeTest.cpp', - 'txNumberExpr.cpp', - 'txNumberResult.cpp', - 'txPathExpr.cpp', - 'txPredicatedNodeTest.cpp', - 'txPredicateList.cpp', - 'txRelationalExpr.cpp', - 'txResultRecycler.cpp', - 'txRootExpr.cpp', - 'txStringResult.cpp', - 'txUnaryExpr.cpp', - 'txUnionExpr.cpp', - 'txUnionNodeTest.cpp', - 'txVariableRefExpr.cpp', - 'txXPathOptimizer.cpp', - 'XPathEvaluator.cpp', - 'XPathExpression.cpp', - 'XPathResult.cpp', + "txBooleanExpr.cpp", + "txBooleanResult.cpp", + "txCoreFunctionCall.cpp", + "txErrorExpr.cpp", + "txExpr.cpp", + "txExprLexer.cpp", + "txExprParser.cpp", + "txFilterExpr.cpp", + "txForwardContext.cpp", + "txFunctionCall.cpp", + "txLiteralExpr.cpp", + "txLocationStep.cpp", + "txMozillaXPathTreeWalker.cpp", + "txNamedAttributeStep.cpp", + "txNameTest.cpp", + "txNodeSet.cpp", + "txNodeSetContext.cpp", + "txNodeTypeTest.cpp", + "txNumberExpr.cpp", + "txNumberResult.cpp", + "txPathExpr.cpp", + "txPredicatedNodeTest.cpp", + "txPredicateList.cpp", + "txRelationalExpr.cpp", + "txResultRecycler.cpp", + "txRootExpr.cpp", + "txStringResult.cpp", + "txUnaryExpr.cpp", + "txUnionExpr.cpp", + "txUnionNodeTest.cpp", + "txVariableRefExpr.cpp", + "txXPathOptimizer.cpp", + "XPathEvaluator.cpp", + "XPathExpression.cpp", + "XPathResult.cpp", ] LOCAL_INCLUDES += [ - '../base', - '../xml', - '../xslt', + "../base", + "../xml", + "../xslt", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/xslt/xslt/moz.build b/dom/xslt/xslt/moz.build index cb57433bbcb6ed..ab1dc7009635d2 100644 --- a/dom/xslt/xslt/moz.build +++ b/dom/xslt/xslt/moz.build @@ -4,62 +4,60 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -EXPORTS.mozilla.dom += [ - 'txMozillaXSLTProcessor.h' -] +EXPORTS.mozilla.dom += ["txMozillaXSLTProcessor.h"] -XPIDL_MODULE = 'dom_xslt' +XPIDL_MODULE = "dom_xslt" XPIDL_SOURCES += [ - 'txIEXSLTFunctions.idl', + "txIEXSLTFunctions.idl", ] UNIFIED_SOURCES += [ - 'txBufferingHandler.cpp', - 'txCurrentFunctionCall.cpp', - 'txDocumentFunctionCall.cpp', - 'txExecutionState.cpp', - 'txEXSLTFunctions.cpp', - 'txFormatNumberFunctionCall.cpp', - 'txGenerateIdFunctionCall.cpp', - 'txInstructions.cpp', - 'txKeyFunctionCall.cpp', - 'txMozillaStylesheetCompiler.cpp', - 'txMozillaTextOutput.cpp', - 'txMozillaXMLOutput.cpp', - 'txMozillaXSLTProcessor.cpp', - 'txNodeSorter.cpp', - 'txOutputFormat.cpp', - 'txPatternOptimizer.cpp', - 'txPatternParser.cpp', - 'txRtfHandler.cpp', - 'txStylesheet.cpp', - 'txStylesheetCompileHandlers.cpp', - 'txStylesheetCompiler.cpp', - 'txTextHandler.cpp', - 'txToplevelItems.cpp', - 'txUnknownHandler.cpp', - 'txXPathResultComparator.cpp', - 'txXSLTEnvironmentFunctionCall.cpp', - 'txXSLTNumber.cpp', - 'txXSLTNumberCounters.cpp', - 'txXSLTPatterns.cpp', - 'txXSLTProcessor.cpp', + "txBufferingHandler.cpp", + "txCurrentFunctionCall.cpp", + "txDocumentFunctionCall.cpp", + "txExecutionState.cpp", + "txEXSLTFunctions.cpp", + "txFormatNumberFunctionCall.cpp", + "txGenerateIdFunctionCall.cpp", + "txInstructions.cpp", + "txKeyFunctionCall.cpp", + "txMozillaStylesheetCompiler.cpp", + "txMozillaTextOutput.cpp", + "txMozillaXMLOutput.cpp", + "txMozillaXSLTProcessor.cpp", + "txNodeSorter.cpp", + "txOutputFormat.cpp", + "txPatternOptimizer.cpp", + "txPatternParser.cpp", + "txRtfHandler.cpp", + "txStylesheet.cpp", + "txStylesheetCompileHandlers.cpp", + "txStylesheetCompiler.cpp", + "txTextHandler.cpp", + "txToplevelItems.cpp", + "txUnknownHandler.cpp", + "txXPathResultComparator.cpp", + "txXSLTEnvironmentFunctionCall.cpp", + "txXSLTNumber.cpp", + "txXSLTNumberCounters.cpp", + "txXSLTPatterns.cpp", + "txXSLTProcessor.cpp", ] EXTRA_JS_MODULES += [ - 'txEXSLTRegExFunctions.jsm', + "txEXSLTRegExFunctions.jsm", ] LOCAL_INCLUDES += [ - '../base', - '../xml', - '../xpath', - '/dom/base', - '/js/xpconnect/src', + "../base", + "../xml", + "../xpath", + "/dom/base", + "/js/xpconnect/src", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/dom/xul/moz.build b/dom/xul/moz.build index 290b18bc9d1cdf..155fece3899bd8 100644 --- a/dom/xul/moz.build +++ b/dom/xul/moz.build @@ -7,83 +7,83 @@ with Files("**"): BUG_COMPONENT = ("Core", "XUL") -if CONFIG['MOZ_BUILD_APP'] == 'browser': - DEFINES['MOZ_BREAK_XUL_OVERLAYS'] = True +if CONFIG["MOZ_BUILD_APP"] == "browser": + DEFINES["MOZ_BREAK_XUL_OVERLAYS"] = True -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] -if CONFIG['MOZ_XUL']: +if CONFIG["MOZ_XUL"]: EXPORTS += [ - 'nsXULCommandDispatcher.h', - 'nsXULElement.h', - 'nsXULPrototypeDocument.h', - 'nsXULSortService.h', + "nsXULCommandDispatcher.h", + "nsXULElement.h", + "nsXULPrototypeDocument.h", + "nsXULSortService.h", ] EXPORTS.mozilla.dom += [ - 'XULBroadcastManager.h', - 'XULFrameElement.h', - 'XULMenuElement.h', - 'XULPersist.h', - 'XULPopupElement.h', - 'XULTextElement.h', - 'XULTooltipElement.h', - 'XULTreeElement.h', + "XULBroadcastManager.h", + "XULFrameElement.h", + "XULMenuElement.h", + "XULPersist.h", + "XULPopupElement.h", + "XULTextElement.h", + "XULTooltipElement.h", + "XULTreeElement.h", ] UNIFIED_SOURCES += [ - 'nsXULCommandDispatcher.cpp', - 'nsXULContentSink.cpp', - 'nsXULContentUtils.cpp', - 'nsXULElement.cpp', - 'nsXULPopupListener.cpp', - 'nsXULPrototypeCache.cpp', - 'nsXULPrototypeDocument.cpp', - 'nsXULSortService.cpp', - 'XULBroadcastManager.cpp', - 'XULFrameElement.cpp', - 'XULMenuElement.cpp', - 'XULPersist.cpp', - 'XULPopupElement.cpp', - 'XULTextElement.cpp', - 'XULTooltipElement.cpp', - 'XULTreeElement.cpp', + "nsXULCommandDispatcher.cpp", + "nsXULContentSink.cpp", + "nsXULContentUtils.cpp", + "nsXULElement.cpp", + "nsXULPopupListener.cpp", + "nsXULPrototypeCache.cpp", + "nsXULPrototypeDocument.cpp", + "nsXULSortService.cpp", + "XULBroadcastManager.cpp", + "XULFrameElement.cpp", + "XULMenuElement.cpp", + "XULPersist.cpp", + "XULPopupElement.cpp", + "XULTextElement.cpp", + "XULTooltipElement.cpp", + "XULTreeElement.cpp", ] XPIDL_SOURCES += [ - 'nsIBrowserController.idl', - 'nsIController.idl', - 'nsIControllers.idl', + "nsIBrowserController.idl", + "nsIController.idl", + "nsIControllers.idl", ] -XPIDL_MODULE = 'xul' +XPIDL_MODULE = "xul" EXPORTS.mozilla.dom += [ - 'ChromeObserver.h', + "ChromeObserver.h", ] UNIFIED_SOURCES += [ - 'ChromeObserver.cpp', - 'nsXULControllers.cpp', + "ChromeObserver.cpp", + "nsXULControllers.cpp", ] LOCAL_INCLUDES += [ - '/docshell/base', - '/dom/base', - '/dom/html', - '/dom/xml', - '/layout/base', - '/layout/generic', - '/layout/style', - '/layout/xul', - '/layout/xul/tree', + "/docshell/base", + "/dom/base", + "/dom/html", + "/dom/xml", + "/layout/base", + "/layout/generic", + "/layout/style", + "/layout/xul", + "/layout/xul/tree", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/editor/composer/moz.build b/editor/composer/moz.build index 62746156f0484b..48335a27046b7e 100644 --- a/editor/composer/moz.build +++ b/editor/composer/moz.build @@ -4,60 +4,60 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['test/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest.ini"] -MOCHITEST_CHROME_MANIFESTS += ['test/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/chrome.ini"] XPIDL_SOURCES += [ - 'nsIEditingSession.idl', + "nsIEditingSession.idl", ] -XPIDL_MODULE = 'composer' +XPIDL_MODULE = "composer" UNIFIED_SOURCES += [ - 'ComposerCommandsUpdater.cpp', - 'nsEditingSession.cpp', + "ComposerCommandsUpdater.cpp", + "nsEditingSession.cpp", ] EXPORTS += [ - 'nsEditingSession.h', + "nsEditingSession.h", ] EXPORTS.mozilla += [ - 'ComposerCommandsUpdater.h', + "ComposerCommandsUpdater.h", ] # Needed because we include HTMLEditor.h which indirectly includes Document.h LOCAL_INCLUDES += [ - '/dom/base', - '/dom/html', # For nsHTMLDocument - '/editor/spellchecker', # nsComposeTxtSrvFilter.h - '/layout/style', # For things nsHTMLDocument includes. + "/dom/base", + "/dom/html", # For nsHTMLDocument + "/editor/spellchecker", # nsComposeTxtSrvFilter.h + "/layout/style", # For things nsHTMLDocument includes. ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" RESOURCE_FILES += [ - 'res/EditorOverride.css', - 'res/grabber.gif', - 'res/table-add-column-after-active.gif', - 'res/table-add-column-after-hover.gif', - 'res/table-add-column-after.gif', - 'res/table-add-column-before-active.gif', - 'res/table-add-column-before-hover.gif', - 'res/table-add-column-before.gif', - 'res/table-add-row-after-active.gif', - 'res/table-add-row-after-hover.gif', - 'res/table-add-row-after.gif', - 'res/table-add-row-before-active.gif', - 'res/table-add-row-before-hover.gif', - 'res/table-add-row-before.gif', - 'res/table-remove-column-active.gif', - 'res/table-remove-column-hover.gif', - 'res/table-remove-column.gif', - 'res/table-remove-row-active.gif', - 'res/table-remove-row-hover.gif', - 'res/table-remove-row.gif', + "res/EditorOverride.css", + "res/grabber.gif", + "res/table-add-column-after-active.gif", + "res/table-add-column-after-hover.gif", + "res/table-add-column-after.gif", + "res/table-add-column-before-active.gif", + "res/table-add-column-before-hover.gif", + "res/table-add-column-before.gif", + "res/table-add-row-after-active.gif", + "res/table-add-row-after-hover.gif", + "res/table-add-row-after.gif", + "res/table-add-row-before-active.gif", + "res/table-add-row-before-hover.gif", + "res/table-add-row-before.gif", + "res/table-remove-column-active.gif", + "res/table-remove-column-hover.gif", + "res/table-remove-column.gif", + "res/table-remove-row-active.gif", + "res/table-remove-row-hover.gif", + "res/table-remove-row.gif", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/editor/libeditor/moz.build b/editor/libeditor/moz.build index 2a8886f8d05df0..35517650c056d2 100644 --- a/editor/libeditor/moz.build +++ b/editor/libeditor/moz.build @@ -5,93 +5,93 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. MOCHITEST_MANIFESTS += [ - 'tests/browserscope/mochitest.ini', - 'tests/mochitest.ini', + "tests/browserscope/mochitest.ini", + "tests/mochitest.ini", ] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome.ini"] -BROWSER_CHROME_MANIFESTS += ['tests/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] EXPORTS.mozilla += [ - 'ChangeStyleTransaction.h', - 'CSSEditUtils.h', - 'EditAction.h', - 'EditorBase.h', - 'EditorCommands.h', - 'EditorController.h', - 'EditorDOMPoint.h', - 'EditorUtils.h', - 'EditTransactionBase.h', - 'HTMLEditor.h', - 'HTMLEditorController.h', - 'ManualNAC.h', - 'SelectionState.h', - 'TextEditor.h', - 'TypeInState.h', + "ChangeStyleTransaction.h", + "CSSEditUtils.h", + "EditAction.h", + "EditorBase.h", + "EditorCommands.h", + "EditorController.h", + "EditorDOMPoint.h", + "EditorUtils.h", + "EditTransactionBase.h", + "HTMLEditor.h", + "HTMLEditorController.h", + "ManualNAC.h", + "SelectionState.h", + "TextEditor.h", + "TypeInState.h", ] UNIFIED_SOURCES += [ - 'ChangeAttributeTransaction.cpp', - 'ChangeStyleTransaction.cpp', - 'CompositionTransaction.cpp', - 'CreateElementTransaction.cpp', - 'CSSEditUtils.cpp', - 'DeleteNodeTransaction.cpp', - 'DeleteRangeTransaction.cpp', - 'DeleteTextTransaction.cpp', - 'EditAggregateTransaction.cpp', - 'EditorBase.cpp', - 'EditorCommands.cpp', - 'EditorController.cpp', - 'EditorEventListener.cpp', - 'EditorUtils.cpp', - 'EditTransactionBase.cpp', - 'HTMLAbsPositionEditor.cpp', - 'HTMLAnonymousNodeEditor.cpp', - 'HTMLEditor.cpp', - 'HTMLEditorCommands.cpp', - 'HTMLEditorController.cpp', - 'HTMLEditorDataTransfer.cpp', - 'HTMLEditorDocumentCommands.cpp', - 'HTMLEditorEventListener.cpp', - 'HTMLEditorObjectResizer.cpp', - 'HTMLEditSubActionHandler.cpp', - 'HTMLEditUtils.cpp', - 'HTMLInlineTableEditor.cpp', - 'HTMLStyleEditor.cpp', - 'HTMLTableEditor.cpp', - 'InsertNodeTransaction.cpp', - 'InsertTextTransaction.cpp', - 'InternetCiter.cpp', - 'JoinNodeTransaction.cpp', - 'PlaceholderTransaction.cpp', - 'ReplaceTextTransaction.cpp', - 'SelectionState.cpp', - 'SplitNodeTransaction.cpp', - 'TextEditor.cpp', - 'TextEditorDataTransfer.cpp', - 'TextEditSubActionHandler.cpp', - 'TypeInState.cpp', - 'WSRunObject.cpp', + "ChangeAttributeTransaction.cpp", + "ChangeStyleTransaction.cpp", + "CompositionTransaction.cpp", + "CreateElementTransaction.cpp", + "CSSEditUtils.cpp", + "DeleteNodeTransaction.cpp", + "DeleteRangeTransaction.cpp", + "DeleteTextTransaction.cpp", + "EditAggregateTransaction.cpp", + "EditorBase.cpp", + "EditorCommands.cpp", + "EditorController.cpp", + "EditorEventListener.cpp", + "EditorUtils.cpp", + "EditTransactionBase.cpp", + "HTMLAbsPositionEditor.cpp", + "HTMLAnonymousNodeEditor.cpp", + "HTMLEditor.cpp", + "HTMLEditorCommands.cpp", + "HTMLEditorController.cpp", + "HTMLEditorDataTransfer.cpp", + "HTMLEditorDocumentCommands.cpp", + "HTMLEditorEventListener.cpp", + "HTMLEditorObjectResizer.cpp", + "HTMLEditSubActionHandler.cpp", + "HTMLEditUtils.cpp", + "HTMLInlineTableEditor.cpp", + "HTMLStyleEditor.cpp", + "HTMLTableEditor.cpp", + "InsertNodeTransaction.cpp", + "InsertTextTransaction.cpp", + "InternetCiter.cpp", + "JoinNodeTransaction.cpp", + "PlaceholderTransaction.cpp", + "ReplaceTextTransaction.cpp", + "SelectionState.cpp", + "SplitNodeTransaction.cpp", + "TextEditor.cpp", + "TextEditorDataTransfer.cpp", + "TextEditSubActionHandler.cpp", + "TypeInState.cpp", + "WSRunObject.cpp", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/html', - '/extensions/spellcheck/src', - '/layout/generic', - '/layout/style', - '/layout/tables', - '/layout/xul', + "/dom/base", + "/dom/html", + "/extensions/spellcheck/src", + "/layout/generic", + "/layout/style", + "/layout/tables", + "/layout/xul", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] with Files("tests/*1151186*"): BUG_COMPONENT = ("Core", "DOM: UI Events & Focus Handling") diff --git a/editor/moz.build b/editor/moz.build index 79f8ec575d1736..d356f21e77e359 100644 --- a/editor/moz.build +++ b/editor/moz.build @@ -5,31 +5,31 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'libeditor', - 'spellchecker', - 'txmgr', - 'composer', + "libeditor", + "spellchecker", + "txmgr", + "composer", ] XPIDL_SOURCES += [ - 'nsIDocumentStateListener.idl', - 'nsIEditActionListener.idl', - 'nsIEditor.idl', - 'nsIEditorMailSupport.idl', - 'nsIEditorObserver.idl', - 'nsIEditorSpellCheck.idl', - 'nsIHTMLAbsPosEditor.idl', - 'nsIHTMLEditor.idl', - 'nsIHTMLInlineTableEditor.idl', - 'nsIHTMLObjectResizer.idl', - 'nsITableEditor.idl', + "nsIDocumentStateListener.idl", + "nsIEditActionListener.idl", + "nsIEditor.idl", + "nsIEditorMailSupport.idl", + "nsIEditorObserver.idl", + "nsIEditorSpellCheck.idl", + "nsIHTMLAbsPosEditor.idl", + "nsIHTMLEditor.idl", + "nsIHTMLInlineTableEditor.idl", + "nsIHTMLObjectResizer.idl", + "nsITableEditor.idl", ] -XPIDL_MODULE = 'editor' +XPIDL_MODULE = "editor" TESTING_JS_MODULES += [ - 'AsyncSpellCheckTestHelper.jsm', + "AsyncSpellCheckTestHelper.jsm", ] -with Files('**'): - BUG_COMPONENT = ('Core', 'DOM: Editor') +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Editor") diff --git a/editor/spellchecker/moz.build b/editor/spellchecker/moz.build index ee5b677b3da82b..d7ba9dfa6dbf67 100644 --- a/editor/spellchecker/moz.build +++ b/editor/spellchecker/moz.build @@ -4,24 +4,24 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] XPIDL_SOURCES += [ - 'nsIInlineSpellChecker.idl', + "nsIInlineSpellChecker.idl", ] -XPIDL_MODULE = 'txtsvc' +XPIDL_MODULE = "txtsvc" EXPORTS.mozilla += [ - 'EditorSpellCheck.h', - 'TextServicesDocument.h', + "EditorSpellCheck.h", + "TextServicesDocument.h", ] UNIFIED_SOURCES += [ - 'EditorSpellCheck.cpp', - 'FilteredContentIterator.cpp', - 'nsComposeTxtSrvFilter.cpp', - 'TextServicesDocument.cpp', + "EditorSpellCheck.cpp", + "FilteredContentIterator.cpp", + "nsComposeTxtSrvFilter.cpp", + "TextServicesDocument.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/editor/txmgr/moz.build b/editor/txmgr/moz.build index fe2ff825e38307..6e675021341759 100644 --- a/editor/txmgr/moz.build +++ b/editor/txmgr/moz.build @@ -4,29 +4,29 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] XPIDL_SOURCES += [ - 'nsITransaction.idl', - 'nsITransactionListener.idl', - 'nsITransactionManager.idl', + "nsITransaction.idl", + "nsITransactionListener.idl", + "nsITransactionManager.idl", ] -XPIDL_MODULE = 'txmgr' +XPIDL_MODULE = "txmgr" EXPORTS += [ - 'nsTransactionManagerCID.h', + "nsTransactionManagerCID.h", ] EXPORTS.mozilla += [ - 'TransactionManager.h', - 'TransactionStack.h', + "TransactionManager.h", + "TransactionStack.h", ] UNIFIED_SOURCES += [ - 'TransactionItem.cpp', - 'TransactionManager.cpp', - 'TransactionStack.cpp', + "TransactionItem.cpp", + "TransactionManager.cpp", + "TransactionStack.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/editor/txmgr/tests/moz.build b/editor/txmgr/tests/moz.build index 4fa18921d1db6e..9a8985dbc4aaaf 100644 --- a/editor/txmgr/tests/moz.build +++ b/editor/txmgr/tests/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'TestTXMgr.cpp', + "TestTXMgr.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/extensions/auth/moz.build b/extensions/auth/moz.build index 50234ad62b9b63..ac2a23fcf01b83 100644 --- a/extensions/auth/moz.build +++ b/extensions/auth/moz.build @@ -5,35 +5,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'nsAuthGSSAPI.cpp', + "nsAuthGSSAPI.cpp", ] SOURCES += [ - 'nsAuthSASL.cpp', - 'nsHttpNegotiateAuth.cpp', # contains constants whose names conflict with constants in other files - 'nsIAuthModule.cpp', # includes windows.h recursively which conflicts with TimeStamp.h + "nsAuthSASL.cpp", + "nsHttpNegotiateAuth.cpp", # contains constants whose names conflict with constants in other files + "nsIAuthModule.cpp", # includes windows.h recursively which conflicts with TimeStamp.h ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - 'nsAuthSSPI.cpp', + "nsAuthSSPI.cpp", ] - DEFINES['USE_SSPI'] = True + DEFINES["USE_SSPI"] = True else: UNIFIED_SOURCES += [ - 'nsAuthSambaNTLM.cpp', + "nsAuthSambaNTLM.cpp", ] LOCAL_INCLUDES += [ - '/netwerk/dns', # For nsDNSService2.h - '/security/manager/ssl', + "/netwerk/dns", # For nsDNSService2.h + "/security/manager/ssl", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -with Files('**'): - BUG_COMPONENT = ('Core', 'Networking') +with Files("**"): + BUG_COMPONENT = ("Core", "Networking") -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") diff --git a/extensions/moz.build b/extensions/moz.build index 2d002650fe58dc..3e1ff5d7d1cbc2 100644 --- a/extensions/moz.build +++ b/extensions/moz.build @@ -4,7 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += CONFIG['MOZ_EXTENSIONS'] +DIRS += CONFIG["MOZ_EXTENSIONS"] -with Files('**'): - BUG_COMPONENT = ('Core', 'General') +with Files("**"): + BUG_COMPONENT = ("Core", "General") diff --git a/extensions/permissions/moz.build b/extensions/permissions/moz.build index e687407cb9b759..57ed3309bcfe1b 100644 --- a/extensions/permissions/moz.build +++ b/extensions/permissions/moz.build @@ -4,39 +4,39 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['test'] +TEST_DIRS += ["test"] TESTING_JS_MODULES += [ - 'test/PermissionTestUtils.jsm', + "test/PermissionTestUtils.jsm", ] EXPORTS.mozilla += [ - 'Permission.h', - 'PermissionDelegateHandler.h', - 'PermissionDelegateIPCUtils.h', - 'PermissionManager.h', + "Permission.h", + "PermissionDelegateHandler.h", + "PermissionDelegateIPCUtils.h", + "PermissionManager.h", ] UNIFIED_SOURCES += [ - 'Permission.cpp', - 'PermissionDelegateHandler.cpp', - 'PermissionManager.cpp', + "Permission.cpp", + "PermissionDelegateHandler.cpp", + "PermissionManager.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '/caps', + "/caps", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -with Files('**'): - BUG_COMPONENT = ('Core', 'Permission Manager') +with Files("**"): + BUG_COMPONENT = ("Core", "Permission Manager") diff --git a/extensions/permissions/test/gtest/moz.build b/extensions/permissions/test/gtest/moz.build index cd890cb13a8967..132c384597abbf 100644 --- a/extensions/permissions/test/gtest/moz.build +++ b/extensions/permissions/test/gtest/moz.build @@ -5,8 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'PermissionManagerTest.cpp', + "PermissionManagerTest.cpp", ] -FINAL_LIBRARY = 'xul-gtest' - +FINAL_LIBRARY = "xul-gtest" diff --git a/extensions/permissions/test/moz.build b/extensions/permissions/test/moz.build index d1b3cdd58e7f48..5c684595756ace 100644 --- a/extensions/permissions/test/moz.build +++ b/extensions/permissions/test/moz.build @@ -5,12 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. TEST_DIRS += [ - 'gtest', + "gtest", ] XPCSHELL_TESTS_MANIFESTS += [ - 'unit/xpcshell.ini', + "unit/xpcshell.ini", ] -BROWSER_CHROME_MANIFESTS += ['browser.ini'] - +BROWSER_CHROME_MANIFESTS += ["browser.ini"] diff --git a/extensions/pref/autoconfig/moz.build b/extensions/pref/autoconfig/moz.build index 4027ba61b07a36..0ef8aad2a9698a 100644 --- a/extensions/pref/autoconfig/moz.build +++ b/extensions/pref/autoconfig/moz.build @@ -4,8 +4,8 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['src'] +DIRS += ["src"] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] -MARIONETTE_UNIT_MANIFESTS += ['test/marionette/manifest.ini'] +MARIONETTE_UNIT_MANIFESTS += ["test/marionette/manifest.ini"] diff --git a/extensions/pref/autoconfig/src/moz.build b/extensions/pref/autoconfig/src/moz.build index cb697f0c6615e9..12722cf6d7a24f 100644 --- a/extensions/pref/autoconfig/src/moz.build +++ b/extensions/pref/autoconfig/src/moz.build @@ -5,17 +5,17 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'nsAutoConfig.cpp', - 'nsJSConfigTriggers.cpp', - 'nsReadConfig.cpp', + "nsAutoConfig.cpp", + "nsJSConfigTriggers.cpp", + "nsReadConfig.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" FINAL_TARGET_FILES.defaults.autoconfig += [ - 'prefcalls.js', + "prefcalls.js", ] diff --git a/extensions/pref/autoconfig/test/marionette/test_autoconfig.py b/extensions/pref/autoconfig/test/marionette/test_autoconfig.py index 98bee6eec61f26..2cf520f86af494 100644 --- a/extensions/pref/autoconfig/test/marionette/test_autoconfig.py +++ b/extensions/pref/autoconfig/test/marionette/test_autoconfig.py @@ -11,34 +11,41 @@ class TestAutoConfig(MarionetteTestCase): - def tearDown(self): self.marionette.quit(clean=True) - if hasattr(self, 'pref_file'): + if hasattr(self, "pref_file"): os.remove(self.pref_file) - if hasattr(self, 'autoconfig_file'): + if hasattr(self, "autoconfig_file"): os.remove(self.autoconfig_file) super(TestAutoConfig, self).tearDown() def pref_has_user_value(self, pref): with self.marionette.using_context("chrome"): - return self.marionette.execute_script(""" + return self.marionette.execute_script( + """ return Services.prefs.prefHasUserValue(arguments[0]); - """, script_args=(pref,)) + """, + script_args=(pref,), + ) def pref_is_locked(self, pref): with self.marionette.using_context("chrome"): - return self.marionette.execute_script(""" + return self.marionette.execute_script( + """ return Services.prefs.prefIsLocked(arguments[0]); - """, script_args=(pref,)) + """, + script_args=(pref,), + ) def test_autoconfig(self): with self.marionette.using_context("chrome"): - self.exe_dir = self.marionette.execute_script(""" + self.exe_dir = self.marionette.execute_script( + """ return Services.dirsvc.get("GreD", Ci.nsIFile).path; - """) + """ + ) self.marionette.quit() @@ -51,26 +58,46 @@ def test_autoconfig(self): self.marionette.start_session() with self.marionette.using_context("chrome"): - self.assertTrue(self.pref_has_user_value("_autoconfig_.test.userpref"), - "Pref should have user value") - - self.assertEqual(self.marionette.get_pref("_autoconfig_.test.userpref"), - "userpref", "User pref should be set") - - self.assertEqual(self.marionette.get_pref("_autoconfig_.test.defaultpref", True), - "defaultpref", "Default pref should be set") - - self.assertTrue(self.pref_is_locked("_autoconfig_.test.lockpref"), - "Pref should be locked") - - self.assertEqual(self.marionette.get_pref("_autoconfig_.test.lockpref"), - "lockpref", "Locked pref should be set") - - self.assertFalse(self.pref_is_locked("_autoconfig_.test.unlockpref"), - "Pref should be unlocked") - - self.assertEqual(self.marionette.get_pref("_autoconfig_.test.unlockpref"), - "unlockpref", "Unlocked pref should be set") - - self.assertFalse(self.pref_has_user_value("_autoconfig_.test.clearpref"), - "Pref should be cleared") + self.assertTrue( + self.pref_has_user_value("_autoconfig_.test.userpref"), + "Pref should have user value", + ) + + self.assertEqual( + self.marionette.get_pref("_autoconfig_.test.userpref"), + "userpref", + "User pref should be set", + ) + + self.assertEqual( + self.marionette.get_pref("_autoconfig_.test.defaultpref", True), + "defaultpref", + "Default pref should be set", + ) + + self.assertTrue( + self.pref_is_locked("_autoconfig_.test.lockpref"), + "Pref should be locked", + ) + + self.assertEqual( + self.marionette.get_pref("_autoconfig_.test.lockpref"), + "lockpref", + "Locked pref should be set", + ) + + self.assertFalse( + self.pref_is_locked("_autoconfig_.test.unlockpref"), + "Pref should be unlocked", + ) + + self.assertEqual( + self.marionette.get_pref("_autoconfig_.test.unlockpref"), + "unlockpref", + "Unlocked pref should be set", + ) + + self.assertFalse( + self.pref_has_user_value("_autoconfig_.test.clearpref"), + "Pref should be cleared", + ) diff --git a/extensions/pref/moz.build b/extensions/pref/moz.build index ab1f7770b91f6b..b0b3670440a016 100644 --- a/extensions/pref/moz.build +++ b/extensions/pref/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['autoconfig'] +DIRS += ["autoconfig"] -with Files('**'): - BUG_COMPONENT = ('Core', 'AutoConfig (Mission Control Desktop)') +with Files("**"): + BUG_COMPONENT = ("Core", "AutoConfig (Mission Control Desktop)") diff --git a/extensions/spellcheck/hunspell/glue/moz.build b/extensions/spellcheck/hunspell/glue/moz.build index 963adaced4a7b9..7d5a110d7d83ed 100644 --- a/extensions/spellcheck/hunspell/glue/moz.build +++ b/extensions/spellcheck/hunspell/glue/moz.build @@ -5,38 +5,38 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'mozHunspell.cpp', - 'mozHunspellFileMgrHost.cpp', - 'RemoteSpellCheckEngineChild.cpp', - 'RemoteSpellCheckEngineParent.cpp', + "mozHunspell.cpp", + "mozHunspellFileMgrHost.cpp", + "RemoteSpellCheckEngineChild.cpp", + "RemoteSpellCheckEngineParent.cpp", ] -DEFINES['HUNSPELL_STATIC'] = True +DEFINES["HUNSPELL_STATIC"] = True -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../src', - '/dom/base', - '/extensions/spellcheck/src', + "../src", + "/dom/base", + "/extensions/spellcheck/src", ] -include('/ipc/chromium/chromium-config.mozbuild') -include('common.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") +include("common.mozbuild") HunspellIncludes() IPDL_SOURCES = [ - 'PRemoteSpellcheckEngine.ipdl', + "PRemoteSpellcheckEngine.ipdl", ] EXPORTS.mozilla += [ - 'RemoteSpellCheckEngineChild.h', - 'RemoteSpellCheckEngineParent.h', + "RemoteSpellCheckEngineChild.h", + "RemoteSpellCheckEngineParent.h", ] -if CONFIG['CC_TYPE'] in ('clang', 'clang-cl'): +if CONFIG["CC_TYPE"] in ("clang", "clang-cl"): CXXFLAGS += [ # We force-include mozHunspellAllocator.h from third-party code. - '-Wno-undefined-var-template', + "-Wno-undefined-var-template", ] diff --git a/extensions/spellcheck/hunspell/moz.build b/extensions/spellcheck/hunspell/moz.build index 972970dfaac1a5..1c31aea6ff18d3 100644 --- a/extensions/spellcheck/hunspell/moz.build +++ b/extensions/spellcheck/hunspell/moz.build @@ -4,7 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['glue', 'src'] +DIRS += ["glue", "src"] -if CONFIG['ENABLE_TESTS']: - XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +if CONFIG["ENABLE_TESTS"]: + XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] diff --git a/extensions/spellcheck/idl/moz.build b/extensions/spellcheck/idl/moz.build index 595a8e24eb9941..caed8a2d58de81 100644 --- a/extensions/spellcheck/idl/moz.build +++ b/extensions/spellcheck/idl/moz.build @@ -5,9 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'mozIPersonalDictionary.idl', - 'mozISpellCheckingEngine.idl', + "mozIPersonalDictionary.idl", + "mozISpellCheckingEngine.idl", ] -XPIDL_MODULE = 'spellchecker' - +XPIDL_MODULE = "spellchecker" diff --git a/extensions/spellcheck/locales/moz.build b/extensions/spellcheck/locales/moz.build index 13b5a7443b23c2..aae8e8ca67554f 100644 --- a/extensions/spellcheck/locales/moz.build +++ b/extensions/spellcheck/locales/moz.build @@ -5,6 +5,6 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. LOCALIZED_FILES.dictionaries += [ - 'en-US/hunspell/*.aff', - 'en-US/hunspell/*.dic', + "en-US/hunspell/*.aff", + "en-US/hunspell/*.dic", ] diff --git a/extensions/spellcheck/moz.build b/extensions/spellcheck/moz.build index acaf73ab3f921d..7ed9b411161822 100644 --- a/extensions/spellcheck/moz.build +++ b/extensions/spellcheck/moz.build @@ -4,14 +4,14 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['idl', 'hunspell', 'src'] +DIRS += ["idl", "hunspell", "src"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] != "android": # GeckoView disables hunspell spellchecker. - DIRS += ['locales'] + DIRS += ["locales"] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest/mochitest.ini'] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest/mochitest.ini"] -with Files('**'): - BUG_COMPONENT = ('Core', 'Spelling checker') +with Files("**"): + BUG_COMPONENT = ("Core", "Spelling checker") diff --git a/extensions/spellcheck/src/moz.build b/extensions/spellcheck/src/moz.build index 1fa03d1c566b66..dcc1d54f74d01e 100644 --- a/extensions/spellcheck/src/moz.build +++ b/extensions/spellcheck/src/moz.build @@ -4,30 +4,30 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") UNIFIED_SOURCES += [ - 'mozEnglishWordUtils.cpp', - 'mozInlineSpellChecker.cpp', - 'mozInlineSpellWordUtil.cpp', - 'mozPersonalDictionary.cpp', - 'mozSpellChecker.cpp', + "mozEnglishWordUtils.cpp", + "mozInlineSpellChecker.cpp", + "mozInlineSpellWordUtil.cpp", + "mozPersonalDictionary.cpp", + "mozSpellChecker.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../hunspell/glue', - '../hunspell/src', - '/dom/base', + "../hunspell/glue", + "../hunspell/src", + "/dom/base", ] EXPORTS.mozilla += [ - 'mozInlineSpellChecker.h', - 'mozSpellChecker.h', + "mozInlineSpellChecker.h", + "mozSpellChecker.h", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/extensions/universalchardet/moz.build b/extensions/universalchardet/moz.build index 874d696f957565..2d4b7badfc2031 100644 --- a/extensions/universalchardet/moz.build +++ b/extensions/universalchardet/moz.build @@ -4,7 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] -with Files('**'): - BUG_COMPONENT = ('Core', 'Internationalization') +with Files("**"): + BUG_COMPONENT = ("Core", "Internationalization") diff --git a/extensions/universalchardet/tests/moz.build b/extensions/universalchardet/tests/moz.build index 47f2ae4322f81d..1a7d5281ea3f09 100644 --- a/extensions/universalchardet/tests/moz.build +++ b/extensions/universalchardet/tests/moz.build @@ -4,5 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_CHROME_MANIFESTS += ['chrome.ini'] - +MOCHITEST_CHROME_MANIFESTS += ["chrome.ini"] diff --git a/gfx/2d/moz.build b/gfx/2d/moz.build index f850d45ee07fa0..ed40639264c977 100644 --- a/gfx/2d/moz.build +++ b/gfx/2d/moz.build @@ -5,242 +5,242 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla += [ - 'GenericRefCounted.h', + "GenericRefCounted.h", ] EXPORTS.mozilla.gfx += [ - '2D.h', - 'BaseCoord.h', - 'BaseMargin.h', - 'BasePoint.h', - 'BasePoint3D.h', - 'BasePoint4D.h', - 'BaseRect.h', - 'BaseSize.h', - 'BezierUtils.h', - 'Blur.h', - 'BorrowedContext.h', - 'Coord.h', - 'CriticalSection.h', - 'DataSurfaceHelpers.h', - 'DrawEventRecorder.h', - 'DrawTargetOffset.h', - 'DrawTargetRecording.h', - 'DrawTargetTiled.h', - 'DrawTargetWrapAndRecord.h', - 'Filters.h', - 'FontVariation.h', - 'Helpers.h', - 'HelpersCairo.h', - 'InlineTranslator.h', - 'IterableArena.h', - 'Logging.h', - 'LoggingConstants.h', - 'Matrix.h', - 'MatrixFwd.h', - 'NumericTools.h', - 'PathHelpers.h', - 'PatternHelpers.h', - 'Point.h', - 'Polygon.h', - 'Quaternion.h', - 'RecordedEvent.h', - 'RecordingTypes.h', - 'Rect.h', - 'RectAbsolute.h', - 'Scale.h', - 'ScaleFactor.h', - 'ScaleFactors2D.h', - 'SourceSurfaceCairo.h', - 'SourceSurfaceCapture.h', - 'SourceSurfaceRawData.h', - 'StackArray.h', - 'Swizzle.h', - 'Tools.h', - 'Triangle.h', - 'Types.h', - 'UserData.h', + "2D.h", + "BaseCoord.h", + "BaseMargin.h", + "BasePoint.h", + "BasePoint3D.h", + "BasePoint4D.h", + "BaseRect.h", + "BaseSize.h", + "BezierUtils.h", + "Blur.h", + "BorrowedContext.h", + "Coord.h", + "CriticalSection.h", + "DataSurfaceHelpers.h", + "DrawEventRecorder.h", + "DrawTargetOffset.h", + "DrawTargetRecording.h", + "DrawTargetTiled.h", + "DrawTargetWrapAndRecord.h", + "Filters.h", + "FontVariation.h", + "Helpers.h", + "HelpersCairo.h", + "InlineTranslator.h", + "IterableArena.h", + "Logging.h", + "LoggingConstants.h", + "Matrix.h", + "MatrixFwd.h", + "NumericTools.h", + "PathHelpers.h", + "PatternHelpers.h", + "Point.h", + "Polygon.h", + "Quaternion.h", + "RecordedEvent.h", + "RecordingTypes.h", + "Rect.h", + "RectAbsolute.h", + "Scale.h", + "ScaleFactor.h", + "ScaleFactors2D.h", + "SourceSurfaceCairo.h", + "SourceSurfaceCapture.h", + "SourceSurfaceRawData.h", + "StackArray.h", + "Swizzle.h", + "Tools.h", + "Triangle.h", + "Types.h", + "UserData.h", ] -EXPORTS.mozilla.gfx += ['ssse3-scaler.h'] +EXPORTS.mozilla.gfx += ["ssse3-scaler.h"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('cocoa', 'uikit'): +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("cocoa", "uikit"): EXPORTS.mozilla.gfx += [ - 'MacIOSurface.h', - 'UnscaledFontMac.h', + "MacIOSurface.h", + "UnscaledFontMac.h", ] UNIFIED_SOURCES += [ - 'NativeFontResourceMac.cpp', - 'ScaledFontMac.cpp', + "NativeFontResourceMac.cpp", + "ScaledFontMac.cpp", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXPORTS.mozilla.gfx += [ - 'dw-extra.h', - 'UnscaledFontDWrite.h', - 'UnscaledFontGDI.h', + "dw-extra.h", + "UnscaledFontDWrite.h", + "UnscaledFontGDI.h", ] SOURCES += [ - 'ConicGradientEffectD2D1.cpp', - 'DrawTargetD2D1.cpp', - 'ExtendInputEffectD2D1.cpp', - 'FilterNodeD2D1.cpp', - 'NativeFontResourceDWrite.cpp', - 'NativeFontResourceGDI.cpp', - 'PathD2D.cpp', - 'RadialGradientEffectD2D1.cpp', - 'ScaledFontDWrite.cpp', - 'ScaledFontWin.cpp', - 'SourceSurfaceD2D1.cpp', + "ConicGradientEffectD2D1.cpp", + "DrawTargetD2D1.cpp", + "ExtendInputEffectD2D1.cpp", + "FilterNodeD2D1.cpp", + "NativeFontResourceDWrite.cpp", + "NativeFontResourceGDI.cpp", + "PathD2D.cpp", + "RadialGradientEffectD2D1.cpp", + "ScaledFontDWrite.cpp", + "ScaledFontWin.cpp", + "SourceSurfaceD2D1.cpp", ] - DEFINES['WIN32'] = True + DEFINES["WIN32"] = True -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gtk'): +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("android", "gtk"): EXPORTS.mozilla.gfx += [ - 'UnscaledFontFreeType.h', + "UnscaledFontFreeType.h", ] SOURCES += [ - 'NativeFontResourceFreeType.cpp', - 'UnscaledFontFreeType.cpp', + "NativeFontResourceFreeType.cpp", + "UnscaledFontFreeType.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": SOURCES += [ - 'ScaledFontFontconfig.cpp', + "ScaledFontFontconfig.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": SOURCES += [ - 'ScaledFontFreeType.cpp', + "ScaledFontFreeType.cpp", ] -if CONFIG['MOZ_ENABLE_SKIA']: +if CONFIG["MOZ_ENABLE_SKIA"]: SOURCES += [ - 'ConvolutionFilter.cpp', - 'DrawTargetSkia.cpp', - 'PathSkia.cpp', - 'SourceSurfaceSkia.cpp', + "ConvolutionFilter.cpp", + "DrawTargetSkia.cpp", + "PathSkia.cpp", + "SourceSurfaceSkia.cpp", ] - if CONFIG['CC_TYPE'] == 'clang': + if CONFIG["CC_TYPE"] == "clang": # Suppress warnings from Skia header files. - SOURCES['DrawTargetSkia.cpp'].flags += ['-Wno-implicit-fallthrough'] - SOURCES['PathSkia.cpp'].flags += ['-Wno-implicit-fallthrough'] - SOURCES['SourceSurfaceSkia.cpp'].flags += ['-Wno-implicit-fallthrough'] + SOURCES["DrawTargetSkia.cpp"].flags += ["-Wno-implicit-fallthrough"] + SOURCES["PathSkia.cpp"].flags += ["-Wno-implicit-fallthrough"] + SOURCES["SourceSurfaceSkia.cpp"].flags += ["-Wno-implicit-fallthrough"] EXPORTS.mozilla.gfx += [ - 'ConvolutionFilter.h', - 'HelpersSkia.h', + "ConvolutionFilter.h", + "HelpersSkia.h", ] # Are we targeting x86 or x64? If so, build SSE2 files. -if CONFIG['INTEL_ARCHITECTURE']: +if CONFIG["INTEL_ARCHITECTURE"]: SOURCES += [ - 'BlurSSE2.cpp', - 'FilterProcessingSSE2.cpp', - 'ImageScalingSSE2.cpp', - 'ssse3-scaler.c', - 'SwizzleAVX2.cpp', - 'SwizzleSSE2.cpp', - 'SwizzleSSSE3.cpp', + "BlurSSE2.cpp", + "FilterProcessingSSE2.cpp", + "ImageScalingSSE2.cpp", + "ssse3-scaler.c", + "SwizzleAVX2.cpp", + "SwizzleSSE2.cpp", + "SwizzleSSSE3.cpp", ] - DEFINES['USE_SSE2'] = True + DEFINES["USE_SSE2"] = True # The file uses SSE2 intrinsics, so it needs special compile flags on some # compilers. - SOURCES['BlurSSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] - SOURCES['FilterProcessingSSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] - SOURCES['ImageScalingSSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] - SOURCES['SwizzleAVX2.cpp'].flags += ['-mavx2'] - SOURCES['SwizzleSSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] - SOURCES['SwizzleSSSE3.cpp'].flags += CONFIG['SSSE3_FLAGS'] - SOURCES['ssse3-scaler.c'].flags += CONFIG['SSSE3_FLAGS'] -elif CONFIG['CPU_ARCH'].startswith('mips'): + SOURCES["BlurSSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] + SOURCES["FilterProcessingSSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] + SOURCES["ImageScalingSSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] + SOURCES["SwizzleAVX2.cpp"].flags += ["-mavx2"] + SOURCES["SwizzleSSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] + SOURCES["SwizzleSSSE3.cpp"].flags += CONFIG["SSSE3_FLAGS"] + SOURCES["ssse3-scaler.c"].flags += CONFIG["SSSE3_FLAGS"] +elif CONFIG["CPU_ARCH"].startswith("mips"): SOURCES += [ - 'BlurLS3.cpp', + "BlurLS3.cpp", ] UNIFIED_SOURCES += [ - 'BezierUtils.cpp', - 'Blur.cpp', - 'BufferEdgePad.cpp', - 'BufferUnrotate.cpp', - 'CaptureCommandList.cpp', - 'DataSourceSurface.cpp', - 'DataSurfaceHelpers.cpp', - 'DrawEventRecorder.cpp', - 'DrawTarget.cpp', - 'DrawTargetCairo.cpp', - 'DrawTargetCapture.cpp', - 'DrawTargetDual.cpp', - 'DrawTargetOffset.cpp', - 'DrawTargetRecording.cpp', - 'DrawTargetTiled.cpp', - 'DrawTargetWrapAndRecord.cpp', - 'FilterNodeCapture.cpp', - 'FilterNodeSoftware.cpp', - 'FilterProcessing.cpp', - 'FilterProcessingScalar.cpp', - 'ImageScaling.cpp', - 'Matrix.cpp', - 'NativeFontResource.cpp', - 'Path.cpp', - 'PathCairo.cpp', - 'PathCapture.cpp', - 'PathHelpers.cpp', - 'PathRecording.cpp', - 'Quaternion.cpp', - 'RecordedEvent.cpp', - 'Scale.cpp', - 'ScaledFontBase.cpp', - 'SFNTData.cpp', - 'SourceSurfaceCairo.cpp', - 'SourceSurfaceCapture.cpp', - 'SourceSurfaceRawData.cpp', - 'Swizzle.cpp', - 'Types.cpp', + "BezierUtils.cpp", + "Blur.cpp", + "BufferEdgePad.cpp", + "BufferUnrotate.cpp", + "CaptureCommandList.cpp", + "DataSourceSurface.cpp", + "DataSurfaceHelpers.cpp", + "DrawEventRecorder.cpp", + "DrawTarget.cpp", + "DrawTargetCairo.cpp", + "DrawTargetCapture.cpp", + "DrawTargetDual.cpp", + "DrawTargetOffset.cpp", + "DrawTargetRecording.cpp", + "DrawTargetTiled.cpp", + "DrawTargetWrapAndRecord.cpp", + "FilterNodeCapture.cpp", + "FilterNodeSoftware.cpp", + "FilterProcessing.cpp", + "FilterProcessingScalar.cpp", + "ImageScaling.cpp", + "Matrix.cpp", + "NativeFontResource.cpp", + "Path.cpp", + "PathCairo.cpp", + "PathCapture.cpp", + "PathHelpers.cpp", + "PathRecording.cpp", + "Quaternion.cpp", + "RecordedEvent.cpp", + "Scale.cpp", + "ScaledFontBase.cpp", + "SFNTData.cpp", + "SourceSurfaceCairo.cpp", + "SourceSurfaceCapture.cpp", + "SourceSurfaceRawData.cpp", + "Swizzle.cpp", + "Types.cpp", ] SOURCES += [ - 'Factory.cpp', # Need to suppress warnings in Skia header files. - 'InlineTranslator.cpp', + "Factory.cpp", # Need to suppress warnings in Skia header files. + "InlineTranslator.cpp", ] -if CONFIG['CC_TYPE'] == 'clang': - SOURCES['Factory.cpp'].flags += ['-Wno-implicit-fallthrough'] +if CONFIG["CC_TYPE"] == "clang": + SOURCES["Factory.cpp"].flags += ["-Wno-implicit-fallthrough"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": EXPORTS.mozilla.gfx += [ - 'QuartzSupport.h', + "QuartzSupport.h", ] SOURCES += [ - 'MacIOSurface.cpp', - 'QuartzSupport.mm', + "MacIOSurface.cpp", + "QuartzSupport.mm", ] -if CONFIG['CPU_ARCH'] == 'aarch64' or CONFIG['BUILD_ARM_NEON']: +if CONFIG["CPU_ARCH"] == "aarch64" or CONFIG["BUILD_ARM_NEON"]: SOURCES += [ - 'BlurNEON.cpp', - 'LuminanceNEON.cpp', - 'SwizzleNEON.cpp', + "BlurNEON.cpp", + "LuminanceNEON.cpp", + "SwizzleNEON.cpp", ] - DEFINES['USE_NEON'] = True - SOURCES['BlurNEON.cpp'].flags += CONFIG['NEON_FLAGS'] - SOURCES['LuminanceNEON.cpp'].flags += CONFIG['NEON_FLAGS'] - SOURCES['SwizzleNEON.cpp'].flags += CONFIG['NEON_FLAGS'] + DEFINES["USE_NEON"] = True + SOURCES["BlurNEON.cpp"].flags += CONFIG["NEON_FLAGS"] + SOURCES["LuminanceNEON.cpp"].flags += CONFIG["NEON_FLAGS"] + SOURCES["SwizzleNEON.cpp"].flags += CONFIG["NEON_FLAGS"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -for var in ('USE_CAIRO', 'MOZ2D_HAS_MOZ_CAIRO'): +for var in ("USE_CAIRO", "MOZ2D_HAS_MOZ_CAIRO"): DEFINES[var] = True -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gtk'): - DEFINES['MOZ_ENABLE_FREETYPE'] = True +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("android", "gtk"): + DEFINES["MOZ_ENABLE_FREETYPE"] = True -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gtk'): - CXXFLAGS += CONFIG['CAIRO_FT_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("android", "gtk"): + CXXFLAGS += CONFIG["CAIRO_FT_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] diff --git a/gfx/angle/moz.build b/gfx/angle/moz.build index 32131a6ac60ffe..08c5b40380dee0 100644 --- a/gfx/angle/moz.build +++ b/gfx/angle/moz.build @@ -4,13 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += [ 'targets/translator' ] +DIRS += ["targets/translator"] # Only build libEGL/libGLESv2 on Windows -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - DIRS += [ 'targets/libEGL', 'targets/libGLESv2' ] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + DIRS += ["targets/libEGL", "targets/libGLESv2"] -if CONFIG['MOZ_ANGLE_RENDERER'] and CONFIG['MOZ_D3DCOMPILER_VISTA_DLL_PATH']: +if CONFIG["MOZ_ANGLE_RENDERER"] and CONFIG["MOZ_D3DCOMPILER_VISTA_DLL_PATH"]: FINAL_TARGET_FILES += [ - '%' + CONFIG['MOZ_D3DCOMPILER_VISTA_DLL_PATH'], + "%" + CONFIG["MOZ_D3DCOMPILER_VISTA_DLL_PATH"], ] diff --git a/gfx/angle/targets/angle_common/moz.build b/gfx/angle/targets/angle_common/moz.build index 1231b748982529..ae7cf4aa60d469 100644 --- a/gfx/angle/targets/angle_common/moz.build +++ b/gfx/angle/targets/angle_common/moz.build @@ -1,43 +1,43 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', - '../../checkout/src/common/third_party/base/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", + "../../checkout/src/common/third_party/base/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -144,53 +144,53 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/common/aligned_memory.cpp', - '../../checkout/src/common/android_util.cpp', - '../../checkout/src/common/angleutils.cpp', - '../../checkout/src/common/debug.cpp', - '../../checkout/src/common/event_tracer.cpp', - '../../checkout/src/common/Float16ToFloat32.cpp', - '../../checkout/src/common/mathutil.cpp', - '../../checkout/src/common/matrix_utils.cpp', - '../../checkout/src/common/MemoryBuffer.cpp', - '../../checkout/src/common/PackedEGLEnums_autogen.cpp', - '../../checkout/src/common/PackedEnums.cpp', - '../../checkout/src/common/PackedGLEnums_autogen.cpp', - '../../checkout/src/common/PoolAlloc.cpp', - '../../checkout/src/common/string_utils.cpp', - '../../checkout/src/common/system_utils.cpp', - '../../checkout/src/common/third_party/base/anglebase/sha1.cc', - '../../checkout/src/common/third_party/smhasher/src/PMurHash.cpp', - '../../checkout/src/common/third_party/xxhash/xxhash.c', - '../../checkout/src/common/tls.cpp', - '../../checkout/src/common/uniform_type_info_autogen.cpp', - '../../checkout/src/common/utilities.cpp', + "../../checkout/src/common/aligned_memory.cpp", + "../../checkout/src/common/android_util.cpp", + "../../checkout/src/common/angleutils.cpp", + "../../checkout/src/common/debug.cpp", + "../../checkout/src/common/event_tracer.cpp", + "../../checkout/src/common/Float16ToFloat32.cpp", + "../../checkout/src/common/mathutil.cpp", + "../../checkout/src/common/matrix_utils.cpp", + "../../checkout/src/common/MemoryBuffer.cpp", + "../../checkout/src/common/PackedEGLEnums_autogen.cpp", + "../../checkout/src/common/PackedEnums.cpp", + "../../checkout/src/common/PackedGLEnums_autogen.cpp", + "../../checkout/src/common/PoolAlloc.cpp", + "../../checkout/src/common/string_utils.cpp", + "../../checkout/src/common/system_utils.cpp", + "../../checkout/src/common/third_party/base/anglebase/sha1.cc", + "../../checkout/src/common/third_party/smhasher/src/PMurHash.cpp", + "../../checkout/src/common/third_party/xxhash/xxhash.c", + "../../checkout/src/common/tls.cpp", + "../../checkout/src/common/uniform_type_info_autogen.cpp", + "../../checkout/src/common/utilities.cpp", ] -if CONFIG['OS_ARCH'] != 'WINNT': +if CONFIG["OS_ARCH"] != "WINNT": SOURCES += [ - '../../checkout/src/common/system_utils_posix.cpp', + "../../checkout/src/common/system_utils_posix.cpp", ] -if CONFIG['OS_ARCH'] == 'Darwin': +if CONFIG["OS_ARCH"] == "Darwin": SOURCES += [ - '../../checkout/src/common/system_utils_mac.cpp', + "../../checkout/src/common/system_utils_mac.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - '../../checkout/src/common/system_utils_win.cpp', + "../../checkout/src/common/system_utils_win.cpp", ] -if CONFIG['OS_ARCH'] not in ('Darwin', 'WINNT'): +if CONFIG["OS_ARCH"] not in ("Darwin", "WINNT"): SOURCES += [ - '../../checkout/src/common/system_utils_linux.cpp', + "../../checkout/src/common/system_utils_linux.cpp", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/FIXED:NO', @@ -204,6 +204,6 @@ if CONFIG['OS_ARCH'] not in ('Darwin', 'WINNT'): # '/PROFILE', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -Library('angle_common') \ No newline at end of file +Library("angle_common") diff --git a/gfx/angle/targets/angle_gpu_info_util/moz.build b/gfx/angle/targets/angle_gpu_info_util/moz.build index b95c326d8ae984..48994d7693c7c6 100644 --- a/gfx/angle/targets/angle_gpu_info_util/moz.build +++ b/gfx/angle/targets/angle_gpu_info_util/moz.build @@ -1,45 +1,45 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -DEFINES['ANGLE_ENABLE_D3D11'] = True -DEFINES['ANGLE_ENABLE_D3D9'] = True -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +DEFINES["ANGLE_ENABLE_D3D11"] = True +DEFINES["ANGLE_ENABLE_D3D9"] = True +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', - '../../checkout/src/common/third_party/base/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", + "../../checkout/src/common/third_party/base/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -146,31 +146,31 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/gpu_info_util/SystemInfo.cpp', + "../../checkout/src/gpu_info_util/SystemInfo.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - '../../checkout/src/gpu_info_util/SystemInfo_win.cpp', + "../../checkout/src/gpu_info_util/SystemInfo_win.cpp", ] USE_LIBS += [ - 'angle_common', + "angle_common", ] DIRS += [ - '../angle_common', + "../angle_common", ] OS_LIBS += [ - 'dxgi', - 'setupapi', + "dxgi", + "setupapi", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/FIXED:NO', @@ -184,6 +184,6 @@ OS_LIBS += [ # '/PROFILE', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -Library('angle_gpu_info_util') \ No newline at end of file +Library("angle_gpu_info_util") diff --git a/gfx/angle/targets/angle_image_util/moz.build b/gfx/angle/targets/angle_image_util/moz.build index 48a129bfe90da2..0c7ad5db0a2351 100644 --- a/gfx/angle/targets/angle_image_util/moz.build +++ b/gfx/angle/targets/angle_image_util/moz.build @@ -1,43 +1,43 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', - '../../checkout/src/common/third_party/base/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", + "../../checkout/src/common/third_party/base/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -144,24 +144,24 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/image_util/copyimage.cpp', - '../../checkout/src/image_util/imageformats.cpp', - '../../checkout/src/image_util/loadimage.cpp', - '../../checkout/src/image_util/loadimage_etc.cpp', + "../../checkout/src/image_util/copyimage.cpp", + "../../checkout/src/image_util/imageformats.cpp", + "../../checkout/src/image_util/loadimage.cpp", + "../../checkout/src/image_util/loadimage_etc.cpp", ] USE_LIBS += [ - 'angle_common', + "angle_common", ] DIRS += [ - '../angle_common', + "../angle_common", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/FIXED:NO', @@ -175,6 +175,6 @@ DIRS += [ # '/PROFILE', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -Library('angle_image_util') \ No newline at end of file +Library("angle_image_util") diff --git a/gfx/angle/targets/libEGL/moz.build b/gfx/angle/targets/libEGL/moz.build index f857aab0cfcba8..a07c1fe01944de 100644 --- a/gfx/angle/targets/libEGL/moz.build +++ b/gfx/angle/targets/libEGL/moz.build @@ -1,51 +1,51 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -#DEFINES['ANGLE_EGL_LIBRARY_NAME'] = '"libEGL"' -DEFINES['ANGLE_GLESV2_LIBRARY_NAME'] = '"libGLESv2"' -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -DEFINES['ANGLE_USE_EGL_LOADER'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -DEFINES['EGLAPI'] = '' -DEFINES['EGL_EGLEXT_PROTOTYPES'] = True -DEFINES['EGL_EGL_PROTOTYPES'] = '1' -DEFINES['GL_GLES_PROTOTYPES'] = '1' -DEFINES['GL_GLEXT_PROTOTYPES'] = True -DEFINES['LIBEGL_IMPLEMENTATION'] = True -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +# DEFINES['ANGLE_EGL_LIBRARY_NAME'] = '"libEGL"' +DEFINES["ANGLE_GLESV2_LIBRARY_NAME"] = '"libGLESv2"' +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +DEFINES["ANGLE_USE_EGL_LOADER"] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +DEFINES["EGLAPI"] = "" +DEFINES["EGL_EGLEXT_PROTOTYPES"] = True +DEFINES["EGL_EGL_PROTOTYPES"] = "1" +DEFINES["GL_GLES_PROTOTYPES"] = "1" +DEFINES["GL_GLEXT_PROTOTYPES"] = True +DEFINES["LIBEGL_IMPLEMENTATION"] = True +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -152,53 +152,53 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/common/system_utils.cpp', - '../../checkout/src/libEGL/egl_loader_autogen.cpp', - '../../checkout/src/libEGL/libEGL.cpp', + "../../checkout/src/common/system_utils.cpp", + "../../checkout/src/libEGL/egl_loader_autogen.cpp", + "../../checkout/src/libEGL/libEGL.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - '../../checkout/src/common/system_utils_win.cpp', + "../../checkout/src/common/system_utils_win.cpp", ] USE_LIBS += [ - 'libGLESv2', + "libGLESv2", ] DIRS += [ - '../libGLESv2', + "../libGLESv2", ] OS_LIBS += [ - 'advapi32', - 'comdlg32', - 'dbghelp', - 'delayimp', - 'dnsapi', - 'gdi32', - 'kernel32', - 'msimg32', - 'odbc32', - 'odbccp32', - 'ole32', - 'oleaut32', - 'shell32', - 'shlwapi', - 'user32', - 'usp10', - 'uuid', - 'version', - 'wininet', - 'winmm', - 'winspool', - 'ws2_32', + "advapi32", + "comdlg32", + "dbghelp", + "delayimp", + "dnsapi", + "gdi32", + "kernel32", + "msimg32", + "odbc32", + "odbccp32", + "ole32", + "oleaut32", + "shell32", + "shlwapi", + "user32", + "usp10", + "uuid", + "version", + "wininet", + "winmm", + "winspool", + "ws2_32", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/DYNAMICBASE', @@ -218,8 +218,8 @@ OS_LIBS += [ # '/SUBSYSTEM:CONSOLE,5.02', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -DEFFILE = '../../checkout/src/libEGL/libEGL.def' -RCFILE = '../../checkout/src/libEGL/libEGL.rc' -GeckoSharedLibrary('libEGL', linkage=None) \ No newline at end of file +DEFFILE = "../../checkout/src/libEGL/libEGL.def" +RCFILE = "../../checkout/src/libEGL/libEGL.rc" +GeckoSharedLibrary("libEGL", linkage=None) diff --git a/gfx/angle/targets/libGLESv2/moz.build b/gfx/angle/targets/libGLESv2/moz.build index fda0b90ce3dd23..cdfc081bd141f2 100644 --- a/gfx/angle/targets/libGLESv2/moz.build +++ b/gfx/angle/targets/libGLESv2/moz.build @@ -1,54 +1,54 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -DEFINES['ANGLE_CAPTURE_ENABLED'] = '0' -DEFINES['ANGLE_ENABLE_D3D11'] = True -DEFINES['ANGLE_ENABLE_D3D9'] = True -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -DEFINES['EGL_EGLEXT_PROTOTYPES'] = True -DEFINES['EGL_EGL_PROTOTYPES'] = '1' -DEFINES['GL_API'] = '' -DEFINES['GL_APICALL'] = '' -DEFINES['GL_GLES_PROTOTYPES'] = '1' -DEFINES['GL_GLEXT_PROTOTYPES'] = True -DEFINES['LIBANGLE_IMPLEMENTATION'] = True -DEFINES['LIBGLESV2_IMPLEMENTATION'] = True -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +DEFINES["ANGLE_CAPTURE_ENABLED"] = "0" +DEFINES["ANGLE_ENABLE_D3D11"] = True +DEFINES["ANGLE_ENABLE_D3D9"] = True +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +DEFINES["EGL_EGLEXT_PROTOTYPES"] = True +DEFINES["EGL_EGL_PROTOTYPES"] = "1" +DEFINES["GL_API"] = "" +DEFINES["GL_APICALL"] = "" +DEFINES["GL_GLES_PROTOTYPES"] = "1" +DEFINES["GL_GLEXT_PROTOTYPES"] = True +DEFINES["LIBANGLE_IMPLEMENTATION"] = True +DEFINES["LIBGLESV2_IMPLEMENTATION"] = True +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', - '../../checkout/src/common/third_party/base/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", + "../../checkout/src/common/third_party/base/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -156,239 +156,239 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/libANGLE/angletypes.cpp', - '../../checkout/src/libANGLE/AttributeMap.cpp', - '../../checkout/src/libANGLE/BlobCache.cpp', - '../../checkout/src/libANGLE/Buffer.cpp', - '../../checkout/src/libANGLE/Caps.cpp', - '../../checkout/src/libANGLE/Compiler.cpp', - '../../checkout/src/libANGLE/Config.cpp', - '../../checkout/src/libANGLE/Context.cpp', - '../../checkout/src/libANGLE/Context_gl.cpp', - '../../checkout/src/libANGLE/Context_gles_1_0.cpp', - '../../checkout/src/libANGLE/Debug.cpp', - '../../checkout/src/libANGLE/Device.cpp', - '../../checkout/src/libANGLE/Display.cpp', - '../../checkout/src/libANGLE/EGLSync.cpp', - '../../checkout/src/libANGLE/Error.cpp', - '../../checkout/src/libANGLE/es3_copy_conversion_table_autogen.cpp', - '../../checkout/src/libANGLE/Fence.cpp', - '../../checkout/src/libANGLE/format_map_autogen.cpp', - '../../checkout/src/libANGLE/formatutils.cpp', - '../../checkout/src/libANGLE/Framebuffer.cpp', - '../../checkout/src/libANGLE/FramebufferAttachment.cpp', - '../../checkout/src/libANGLE/GLES1Renderer.cpp', - '../../checkout/src/libANGLE/GLES1State.cpp', - '../../checkout/src/libANGLE/HandleAllocator.cpp', - '../../checkout/src/libANGLE/HandleRangeAllocator.cpp', - '../../checkout/src/libANGLE/Image.cpp', - '../../checkout/src/libANGLE/ImageIndex.cpp', - '../../checkout/src/libANGLE/IndexRangeCache.cpp', - '../../checkout/src/libANGLE/LoggingAnnotator.cpp', - '../../checkout/src/libANGLE/MemoryObject.cpp', - '../../checkout/src/libANGLE/MemoryProgramCache.cpp', - '../../checkout/src/libANGLE/Observer.cpp', - '../../checkout/src/libANGLE/Path.cpp', - '../../checkout/src/libANGLE/Platform.cpp', - '../../checkout/src/libANGLE/Program.cpp', - '../../checkout/src/libANGLE/ProgramLinkedResources.cpp', - '../../checkout/src/libANGLE/ProgramPipeline.cpp', - '../../checkout/src/libANGLE/Query.cpp', - '../../checkout/src/libANGLE/queryconversions.cpp', - '../../checkout/src/libANGLE/queryutils.cpp', - '../../checkout/src/libANGLE/Renderbuffer.cpp', - '../../checkout/src/libANGLE/renderer/ContextImpl.cpp', - '../../checkout/src/libANGLE/renderer/d3d/BufferD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/CompilerD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Blit11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Buffer11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Clear11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Context11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/converged/CompositorNativeWindow11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/DebugAnnotator11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/dxgi_format_map_autogen.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/dxgi_support_table_autogen.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/ExternalImageSiblingImpl11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Fence11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/formatutils11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Framebuffer11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Image11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/IndexBuffer11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/InputLayoutCache.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/MappedSubresourceVerifier11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/PixelTransfer11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Program11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/ProgramPipeline11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Query11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Renderer11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/renderer11_utils.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/RenderStateCache.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/RenderTarget11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/ResourceManager11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/ShaderExecutable11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/StateManager11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/StreamProducerD3DTexture.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/SwapChain11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/texture_format_table.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/texture_format_table_autogen.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/TransformFeedback11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/Trim11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/VertexArray11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/VertexBuffer11.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d11/win32/NativeWindow11Win32.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Blit9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Buffer9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Context9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/DebugAnnotator9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Fence9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/formatutils9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Framebuffer9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Image9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/IndexBuffer9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/NativeWindow9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Query9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/Renderer9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/renderer9_utils.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/RenderTarget9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/ShaderExecutable9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/StateManager9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/SwapChain9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/TextureStorage9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/VertexBuffer9.cpp', - '../../checkout/src/libANGLE/renderer/d3d/d3d9/VertexDeclarationCache.cpp', - '../../checkout/src/libANGLE/renderer/d3d/DeviceD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/DisplayD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/DynamicHLSL.cpp', - '../../checkout/src/libANGLE/renderer/d3d/DynamicImage2DHLSL.cpp', - '../../checkout/src/libANGLE/renderer/d3d/EGLImageD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/FramebufferD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/HLSLCompiler.cpp', - '../../checkout/src/libANGLE/renderer/d3d/ImageD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/IndexBuffer.cpp', - '../../checkout/src/libANGLE/renderer/d3d/IndexDataManager.cpp', - '../../checkout/src/libANGLE/renderer/d3d/NativeWindowD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/ProgramD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/RenderbufferD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/RendererD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/RenderTargetD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/ShaderD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/ShaderExecutableD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/SurfaceD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/SwapChainD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/TextureD3D.cpp', - '../../checkout/src/libANGLE/renderer/d3d/VertexBuffer.cpp', - '../../checkout/src/libANGLE/renderer/d3d/VertexDataManager.cpp', - '../../checkout/src/libANGLE/renderer/DeviceImpl.cpp', - '../../checkout/src/libANGLE/renderer/DisplayImpl.cpp', - '../../checkout/src/libANGLE/renderer/driver_utils.cpp', - '../../checkout/src/libANGLE/renderer/Format_table_autogen.cpp', - '../../checkout/src/libANGLE/renderer/load_functions_table_autogen.cpp', - '../../checkout/src/libANGLE/renderer/QueryImpl.cpp', - '../../checkout/src/libANGLE/renderer/renderer_utils.cpp', - '../../checkout/src/libANGLE/renderer/ShaderImpl.cpp', - '../../checkout/src/libANGLE/renderer/SurfaceImpl.cpp', - '../../checkout/src/libANGLE/renderer/TextureImpl.cpp', - '../../checkout/src/libANGLE/ResourceManager.cpp', - '../../checkout/src/libANGLE/Sampler.cpp', - '../../checkout/src/libANGLE/Semaphore.cpp', - '../../checkout/src/libANGLE/Shader.cpp', - '../../checkout/src/libANGLE/State.cpp', - '../../checkout/src/libANGLE/Stream.cpp', - '../../checkout/src/libANGLE/Surface.cpp', - '../../checkout/src/libANGLE/Texture.cpp', - '../../checkout/src/libANGLE/Thread.cpp', - '../../checkout/src/libANGLE/TransformFeedback.cpp', - '../../checkout/src/libANGLE/Uniform.cpp', - '../../checkout/src/libANGLE/validationEGL.cpp', - '../../checkout/src/libANGLE/validationES.cpp', - '../../checkout/src/libANGLE/validationES1.cpp', - '../../checkout/src/libANGLE/validationES2.cpp', - '../../checkout/src/libANGLE/validationES3.cpp', - '../../checkout/src/libANGLE/validationES31.cpp', - '../../checkout/src/libANGLE/validationGL1.cpp', - '../../checkout/src/libANGLE/validationGL11.cpp', - '../../checkout/src/libANGLE/validationGL12.cpp', - '../../checkout/src/libANGLE/validationGL13.cpp', - '../../checkout/src/libANGLE/validationGL14.cpp', - '../../checkout/src/libANGLE/validationGL15.cpp', - '../../checkout/src/libANGLE/validationGL2.cpp', - '../../checkout/src/libANGLE/validationGL21.cpp', - '../../checkout/src/libANGLE/validationGL3.cpp', - '../../checkout/src/libANGLE/validationGL31.cpp', - '../../checkout/src/libANGLE/validationGL32.cpp', - '../../checkout/src/libANGLE/validationGL33.cpp', - '../../checkout/src/libANGLE/validationGL4.cpp', - '../../checkout/src/libANGLE/validationGL41.cpp', - '../../checkout/src/libANGLE/validationGL42.cpp', - '../../checkout/src/libANGLE/validationGL43.cpp', - '../../checkout/src/libANGLE/validationGL44.cpp', - '../../checkout/src/libANGLE/validationGL45.cpp', - '../../checkout/src/libANGLE/validationGL46.cpp', - '../../checkout/src/libANGLE/VaryingPacking.cpp', - '../../checkout/src/libANGLE/VertexArray.cpp', - '../../checkout/src/libANGLE/VertexAttribute.cpp', - '../../checkout/src/libANGLE/WorkerThread.cpp', - '../../checkout/src/libGLESv2/entry_points_egl.cpp', - '../../checkout/src/libGLESv2/entry_points_egl_ext.cpp', - '../../checkout/src/libGLESv2/entry_points_gles_1_0_autogen.cpp', - '../../checkout/src/libGLESv2/entry_points_gles_2_0_autogen.cpp', - '../../checkout/src/libGLESv2/entry_points_gles_3_0_autogen.cpp', - '../../checkout/src/libGLESv2/entry_points_gles_3_1_autogen.cpp', - '../../checkout/src/libGLESv2/entry_points_gles_ext_autogen.cpp', - '../../checkout/src/libGLESv2/global_state.cpp', - '../../checkout/src/libGLESv2/libGLESv2_autogen.cpp', - '../../checkout/src/libGLESv2/proc_table_egl_autogen.cpp', - '../../checkout/src/third_party/systeminfo/SystemInfo.cpp', + "../../checkout/src/libANGLE/angletypes.cpp", + "../../checkout/src/libANGLE/AttributeMap.cpp", + "../../checkout/src/libANGLE/BlobCache.cpp", + "../../checkout/src/libANGLE/Buffer.cpp", + "../../checkout/src/libANGLE/Caps.cpp", + "../../checkout/src/libANGLE/Compiler.cpp", + "../../checkout/src/libANGLE/Config.cpp", + "../../checkout/src/libANGLE/Context.cpp", + "../../checkout/src/libANGLE/Context_gl.cpp", + "../../checkout/src/libANGLE/Context_gles_1_0.cpp", + "../../checkout/src/libANGLE/Debug.cpp", + "../../checkout/src/libANGLE/Device.cpp", + "../../checkout/src/libANGLE/Display.cpp", + "../../checkout/src/libANGLE/EGLSync.cpp", + "../../checkout/src/libANGLE/Error.cpp", + "../../checkout/src/libANGLE/es3_copy_conversion_table_autogen.cpp", + "../../checkout/src/libANGLE/Fence.cpp", + "../../checkout/src/libANGLE/format_map_autogen.cpp", + "../../checkout/src/libANGLE/formatutils.cpp", + "../../checkout/src/libANGLE/Framebuffer.cpp", + "../../checkout/src/libANGLE/FramebufferAttachment.cpp", + "../../checkout/src/libANGLE/GLES1Renderer.cpp", + "../../checkout/src/libANGLE/GLES1State.cpp", + "../../checkout/src/libANGLE/HandleAllocator.cpp", + "../../checkout/src/libANGLE/HandleRangeAllocator.cpp", + "../../checkout/src/libANGLE/Image.cpp", + "../../checkout/src/libANGLE/ImageIndex.cpp", + "../../checkout/src/libANGLE/IndexRangeCache.cpp", + "../../checkout/src/libANGLE/LoggingAnnotator.cpp", + "../../checkout/src/libANGLE/MemoryObject.cpp", + "../../checkout/src/libANGLE/MemoryProgramCache.cpp", + "../../checkout/src/libANGLE/Observer.cpp", + "../../checkout/src/libANGLE/Path.cpp", + "../../checkout/src/libANGLE/Platform.cpp", + "../../checkout/src/libANGLE/Program.cpp", + "../../checkout/src/libANGLE/ProgramLinkedResources.cpp", + "../../checkout/src/libANGLE/ProgramPipeline.cpp", + "../../checkout/src/libANGLE/Query.cpp", + "../../checkout/src/libANGLE/queryconversions.cpp", + "../../checkout/src/libANGLE/queryutils.cpp", + "../../checkout/src/libANGLE/Renderbuffer.cpp", + "../../checkout/src/libANGLE/renderer/ContextImpl.cpp", + "../../checkout/src/libANGLE/renderer/d3d/BufferD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/CompilerD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Blit11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Buffer11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Clear11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Context11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/converged/CompositorNativeWindow11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/DebugAnnotator11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/dxgi_format_map_autogen.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/dxgi_support_table_autogen.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/ExternalImageSiblingImpl11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Fence11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/formatutils11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Framebuffer11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Image11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/IndexBuffer11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/InputLayoutCache.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/MappedSubresourceVerifier11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/PixelTransfer11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Program11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/ProgramPipeline11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Query11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Renderer11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/renderer11_utils.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/RenderStateCache.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/RenderTarget11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/ResourceManager11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/ShaderExecutable11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/StateManager11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/StreamProducerD3DTexture.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/SwapChain11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/texture_format_table.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/texture_format_table_autogen.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/TextureStorage11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/TransformFeedback11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/Trim11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/VertexArray11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/VertexBuffer11.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d11/win32/NativeWindow11Win32.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Blit9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Buffer9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Context9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/DebugAnnotator9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Fence9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/formatutils9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Framebuffer9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Image9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/IndexBuffer9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/NativeWindow9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Query9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/Renderer9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/renderer9_utils.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/RenderTarget9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/ShaderExecutable9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/StateManager9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/SwapChain9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/TextureStorage9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/VertexBuffer9.cpp", + "../../checkout/src/libANGLE/renderer/d3d/d3d9/VertexDeclarationCache.cpp", + "../../checkout/src/libANGLE/renderer/d3d/DeviceD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/DisplayD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/DynamicHLSL.cpp", + "../../checkout/src/libANGLE/renderer/d3d/DynamicImage2DHLSL.cpp", + "../../checkout/src/libANGLE/renderer/d3d/EGLImageD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/FramebufferD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/HLSLCompiler.cpp", + "../../checkout/src/libANGLE/renderer/d3d/ImageD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/IndexBuffer.cpp", + "../../checkout/src/libANGLE/renderer/d3d/IndexDataManager.cpp", + "../../checkout/src/libANGLE/renderer/d3d/NativeWindowD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/ProgramD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/RenderbufferD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/RendererD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/RenderTargetD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/ShaderD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/ShaderExecutableD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/SurfaceD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/SwapChainD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/TextureD3D.cpp", + "../../checkout/src/libANGLE/renderer/d3d/VertexBuffer.cpp", + "../../checkout/src/libANGLE/renderer/d3d/VertexDataManager.cpp", + "../../checkout/src/libANGLE/renderer/DeviceImpl.cpp", + "../../checkout/src/libANGLE/renderer/DisplayImpl.cpp", + "../../checkout/src/libANGLE/renderer/driver_utils.cpp", + "../../checkout/src/libANGLE/renderer/Format_table_autogen.cpp", + "../../checkout/src/libANGLE/renderer/load_functions_table_autogen.cpp", + "../../checkout/src/libANGLE/renderer/QueryImpl.cpp", + "../../checkout/src/libANGLE/renderer/renderer_utils.cpp", + "../../checkout/src/libANGLE/renderer/ShaderImpl.cpp", + "../../checkout/src/libANGLE/renderer/SurfaceImpl.cpp", + "../../checkout/src/libANGLE/renderer/TextureImpl.cpp", + "../../checkout/src/libANGLE/ResourceManager.cpp", + "../../checkout/src/libANGLE/Sampler.cpp", + "../../checkout/src/libANGLE/Semaphore.cpp", + "../../checkout/src/libANGLE/Shader.cpp", + "../../checkout/src/libANGLE/State.cpp", + "../../checkout/src/libANGLE/Stream.cpp", + "../../checkout/src/libANGLE/Surface.cpp", + "../../checkout/src/libANGLE/Texture.cpp", + "../../checkout/src/libANGLE/Thread.cpp", + "../../checkout/src/libANGLE/TransformFeedback.cpp", + "../../checkout/src/libANGLE/Uniform.cpp", + "../../checkout/src/libANGLE/validationEGL.cpp", + "../../checkout/src/libANGLE/validationES.cpp", + "../../checkout/src/libANGLE/validationES1.cpp", + "../../checkout/src/libANGLE/validationES2.cpp", + "../../checkout/src/libANGLE/validationES3.cpp", + "../../checkout/src/libANGLE/validationES31.cpp", + "../../checkout/src/libANGLE/validationGL1.cpp", + "../../checkout/src/libANGLE/validationGL11.cpp", + "../../checkout/src/libANGLE/validationGL12.cpp", + "../../checkout/src/libANGLE/validationGL13.cpp", + "../../checkout/src/libANGLE/validationGL14.cpp", + "../../checkout/src/libANGLE/validationGL15.cpp", + "../../checkout/src/libANGLE/validationGL2.cpp", + "../../checkout/src/libANGLE/validationGL21.cpp", + "../../checkout/src/libANGLE/validationGL3.cpp", + "../../checkout/src/libANGLE/validationGL31.cpp", + "../../checkout/src/libANGLE/validationGL32.cpp", + "../../checkout/src/libANGLE/validationGL33.cpp", + "../../checkout/src/libANGLE/validationGL4.cpp", + "../../checkout/src/libANGLE/validationGL41.cpp", + "../../checkout/src/libANGLE/validationGL42.cpp", + "../../checkout/src/libANGLE/validationGL43.cpp", + "../../checkout/src/libANGLE/validationGL44.cpp", + "../../checkout/src/libANGLE/validationGL45.cpp", + "../../checkout/src/libANGLE/validationGL46.cpp", + "../../checkout/src/libANGLE/VaryingPacking.cpp", + "../../checkout/src/libANGLE/VertexArray.cpp", + "../../checkout/src/libANGLE/VertexAttribute.cpp", + "../../checkout/src/libANGLE/WorkerThread.cpp", + "../../checkout/src/libGLESv2/entry_points_egl.cpp", + "../../checkout/src/libGLESv2/entry_points_egl_ext.cpp", + "../../checkout/src/libGLESv2/entry_points_gles_1_0_autogen.cpp", + "../../checkout/src/libGLESv2/entry_points_gles_2_0_autogen.cpp", + "../../checkout/src/libGLESv2/entry_points_gles_3_0_autogen.cpp", + "../../checkout/src/libGLESv2/entry_points_gles_3_1_autogen.cpp", + "../../checkout/src/libGLESv2/entry_points_gles_ext_autogen.cpp", + "../../checkout/src/libGLESv2/global_state.cpp", + "../../checkout/src/libGLESv2/libGLESv2_autogen.cpp", + "../../checkout/src/libGLESv2/proc_table_egl_autogen.cpp", + "../../checkout/src/third_party/systeminfo/SystemInfo.cpp", ] USE_LIBS += [ - 'angle_common', - 'angle_gpu_info_util', - 'angle_image_util', - 'translator', + "angle_common", + "angle_gpu_info_util", + "angle_image_util", + "translator", ] DIRS += [ - '../angle_common', - '../angle_gpu_info_util', - '../angle_image_util', - '../translator', + "../angle_common", + "../angle_gpu_info_util", + "../angle_image_util", + "../translator", ] OS_LIBS += [ - 'advapi32', - 'comdlg32', - 'd3d9', - 'dbghelp', - 'delayimp', - 'dnsapi', - 'dxgi', - 'dxguid', - 'gdi32', - 'kernel32', - 'msimg32', - 'odbc32', - 'odbccp32', - 'ole32', - 'oleaut32', - 'setupapi', - 'shell32', - 'shlwapi', - 'user32', - 'usp10', - 'uuid', - 'version', - 'wininet', - 'winmm', - 'winspool', - 'ws2_32', + "advapi32", + "comdlg32", + "d3d9", + "dbghelp", + "delayimp", + "dnsapi", + "dxgi", + "dxguid", + "gdi32", + "kernel32", + "msimg32", + "odbc32", + "odbccp32", + "ole32", + "oleaut32", + "setupapi", + "shell32", + "shlwapi", + "user32", + "usp10", + "uuid", + "version", + "wininet", + "winmm", + "winspool", + "ws2_32", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/DELAYLOAD:d3d9.dll', @@ -409,8 +409,8 @@ OS_LIBS += [ # '/SUBSYSTEM:CONSOLE,5.02', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -DEFFILE = '../../checkout/src/libGLESv2/libGLESv2_autogen.def' -RCFILE = '../../checkout/src/libGLESv2/libGLESv2.rc' -GeckoSharedLibrary('libGLESv2', linkage=None) \ No newline at end of file +DEFFILE = "../../checkout/src/libGLESv2/libGLESv2_autogen.def" +RCFILE = "../../checkout/src/libGLESv2/libGLESv2.rc" +GeckoSharedLibrary("libGLESv2", linkage=None) diff --git a/gfx/angle/targets/preprocessor/moz.build b/gfx/angle/targets/preprocessor/moz.build index 63feb895fa60cd..d34e3cc9aeef10 100644 --- a/gfx/angle/targets/preprocessor/moz.build +++ b/gfx/angle/targets/preprocessor/moz.build @@ -1,43 +1,43 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', - '../../checkout/src/common/third_party/base/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", + "../../checkout/src/common/third_party/base/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -144,31 +144,31 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/compiler/preprocessor/DiagnosticsBase.cpp', - '../../checkout/src/compiler/preprocessor/DirectiveHandlerBase.cpp', - '../../checkout/src/compiler/preprocessor/DirectiveParser.cpp', - '../../checkout/src/compiler/preprocessor/ExpressionParser.cpp', - '../../checkout/src/compiler/preprocessor/Input.cpp', - '../../checkout/src/compiler/preprocessor/Lexer.cpp', - '../../checkout/src/compiler/preprocessor/Macro.cpp', - '../../checkout/src/compiler/preprocessor/MacroExpander.cpp', - '../../checkout/src/compiler/preprocessor/Preprocessor.cpp', - '../../checkout/src/compiler/preprocessor/Token.cpp', - '../../checkout/src/compiler/preprocessor/Tokenizer.cpp', + "../../checkout/src/compiler/preprocessor/DiagnosticsBase.cpp", + "../../checkout/src/compiler/preprocessor/DirectiveHandlerBase.cpp", + "../../checkout/src/compiler/preprocessor/DirectiveParser.cpp", + "../../checkout/src/compiler/preprocessor/ExpressionParser.cpp", + "../../checkout/src/compiler/preprocessor/Input.cpp", + "../../checkout/src/compiler/preprocessor/Lexer.cpp", + "../../checkout/src/compiler/preprocessor/Macro.cpp", + "../../checkout/src/compiler/preprocessor/MacroExpander.cpp", + "../../checkout/src/compiler/preprocessor/Preprocessor.cpp", + "../../checkout/src/compiler/preprocessor/Token.cpp", + "../../checkout/src/compiler/preprocessor/Tokenizer.cpp", ] USE_LIBS += [ - 'angle_common', + "angle_common", ] DIRS += [ - '../angle_common', + "../angle_common", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/FIXED:NO', @@ -182,6 +182,6 @@ DIRS += [ # '/PROFILE', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -Library('preprocessor') \ No newline at end of file +Library("preprocessor") diff --git a/gfx/angle/targets/translator/moz.build b/gfx/angle/targets/translator/moz.build index cf26e1eda176b6..4550fd745d85c6 100644 --- a/gfx/angle/targets/translator/moz.build +++ b/gfx/angle/targets/translator/moz.build @@ -1,46 +1,46 @@ # Generated by update-angle.py -include('../../moz.build.common') +include("../../moz.build.common") -DEFINES['ANGLE_ENABLE_ESSL'] = True -DEFINES['ANGLE_ENABLE_GLSL'] = True -DEFINES['ANGLE_ENABLE_HLSL'] = True -#DEFINES['ANGLE_IS_64_BIT_CPU'] = True -#DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True -DEFINES['CR_CLANG_REVISION'] = '"n332890-c2443155-1"' -DEFINES['DYNAMIC_ANNOTATIONS_ENABLED'] = '0' -#DEFINES['NDEBUG'] = True -DEFINES['NOMINMAX'] = True -DEFINES['NTDDI_VERSION'] = 'NTDDI_WIN10_RS2' -#DEFINES['NVALGRIND'] = True -#DEFINES['PSAPI_VERSION'] = '2' -DEFINES['UNICODE'] = True -#DEFINES['USE_AURA'] = '1' -#DEFINES['WIN32'] = True -#DEFINES['WIN32_LEAN_AND_MEAN'] = True -#DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' -DEFINES['WINVER'] = '0x0A00' -DEFINES['_ATL_NO_OPENGL'] = True -DEFINES['_CRT_RAND_S'] = True -DEFINES['_CRT_SECURE_NO_DEPRECATE'] = True -DEFINES['_HAS_EXCEPTIONS'] = '0' -#DEFINES['_HAS_NODISCARD'] = True -DEFINES['_SCL_SECURE_NO_DEPRECATE'] = True -DEFINES['_SECURE_ATL'] = True -DEFINES['_UNICODE'] = True -#DEFINES['_USING_V110_SDK71_'] = True -#DEFINES['_WIN32_WINNT'] = '0x0A00' -#DEFINES['_WINDOWS'] = True -#DEFINES['__STD_C'] = True +DEFINES["ANGLE_ENABLE_ESSL"] = True +DEFINES["ANGLE_ENABLE_GLSL"] = True +DEFINES["ANGLE_ENABLE_HLSL"] = True +# DEFINES['ANGLE_IS_64_BIT_CPU'] = True +# DEFINES['CERT_CHAIN_PARA_HAS_EXTRA_FIELDS'] = True +DEFINES["CR_CLANG_REVISION"] = '"n332890-c2443155-1"' +DEFINES["DYNAMIC_ANNOTATIONS_ENABLED"] = "0" +# DEFINES['NDEBUG'] = True +DEFINES["NOMINMAX"] = True +DEFINES["NTDDI_VERSION"] = "NTDDI_WIN10_RS2" +# DEFINES['NVALGRIND'] = True +# DEFINES['PSAPI_VERSION'] = '2' +DEFINES["UNICODE"] = True +# DEFINES['USE_AURA'] = '1' +# DEFINES['WIN32'] = True +# DEFINES['WIN32_LEAN_AND_MEAN'] = True +# DEFINES['WINAPI_FAMILY'] = 'WINAPI_FAMILY_DESKTOP_APP' +DEFINES["WINVER"] = "0x0A00" +DEFINES["_ATL_NO_OPENGL"] = True +DEFINES["_CRT_RAND_S"] = True +DEFINES["_CRT_SECURE_NO_DEPRECATE"] = True +DEFINES["_HAS_EXCEPTIONS"] = "0" +# DEFINES['_HAS_NODISCARD'] = True +DEFINES["_SCL_SECURE_NO_DEPRECATE"] = True +DEFINES["_SECURE_ATL"] = True +DEFINES["_UNICODE"] = True +# DEFINES['_USING_V110_SDK71_'] = True +# DEFINES['_WIN32_WINNT'] = '0x0A00' +# DEFINES['_WINDOWS'] = True +# DEFINES['__STD_C'] = True LOCAL_INCLUDES += [ - '../../checkout/include/', - '../../checkout/out/gen/angle/', - '../../checkout/src/', - '../../checkout/src/common/third_party/base/', + "../../checkout/include/", + "../../checkout/out/gen/angle/", + "../../checkout/src/", + "../../checkout/src/common/third_party/base/", ] -#CXXFLAGS += [ +# CXXFLAGS += [ # '-D__DATE__=', # '-D__TIME__=', # '-D__TIMESTAMP__=', @@ -148,146 +148,146 @@ LOCAL_INCLUDES += [ # '/Zc:inline', # '/Zc:sizedDealloc-', # '/Zc:twoPhase', -#] +# ] SOURCES += [ - '../../checkout/src/compiler/translator/ASTMetadataHLSL.cpp', - '../../checkout/src/compiler/translator/AtomicCounterFunctionHLSL.cpp', - '../../checkout/src/compiler/translator/blocklayout.cpp', - '../../checkout/src/compiler/translator/blocklayoutHLSL.cpp', - '../../checkout/src/compiler/translator/BuiltInFunctionEmulator.cpp', - '../../checkout/src/compiler/translator/BuiltInFunctionEmulatorGLSL.cpp', - '../../checkout/src/compiler/translator/BuiltInFunctionEmulatorHLSL.cpp', - '../../checkout/src/compiler/translator/CallDAG.cpp', - '../../checkout/src/compiler/translator/CodeGen.cpp', - '../../checkout/src/compiler/translator/CollectVariables.cpp', - '../../checkout/src/compiler/translator/Compiler.cpp', - '../../checkout/src/compiler/translator/ConstantUnion.cpp', - '../../checkout/src/compiler/translator/Declarator.cpp', - '../../checkout/src/compiler/translator/Diagnostics.cpp', - '../../checkout/src/compiler/translator/DirectiveHandler.cpp', - '../../checkout/src/compiler/translator/emulated_builtin_functions_hlsl_autogen.cpp', - '../../checkout/src/compiler/translator/ExtensionBehavior.cpp', - '../../checkout/src/compiler/translator/ExtensionGLSL.cpp', - '../../checkout/src/compiler/translator/FlagStd140Structs.cpp', - '../../checkout/src/compiler/translator/FunctionLookup.cpp', - '../../checkout/src/compiler/translator/glslang_lex.cpp', - '../../checkout/src/compiler/translator/glslang_tab.cpp', - '../../checkout/src/compiler/translator/HashNames.cpp', - '../../checkout/src/compiler/translator/ImageFunctionHLSL.cpp', - '../../checkout/src/compiler/translator/ImmutableString.cpp', - '../../checkout/src/compiler/translator/ImmutableStringBuilder.cpp', - '../../checkout/src/compiler/translator/InfoSink.cpp', - '../../checkout/src/compiler/translator/Initialize.cpp', - '../../checkout/src/compiler/translator/InitializeDll.cpp', - '../../checkout/src/compiler/translator/IntermNode.cpp', - '../../checkout/src/compiler/translator/IsASTDepthBelowLimit.cpp', - '../../checkout/src/compiler/translator/Operator.cpp', - '../../checkout/src/compiler/translator/OutputESSL.cpp', - '../../checkout/src/compiler/translator/OutputGLSL.cpp', - '../../checkout/src/compiler/translator/OutputGLSLBase.cpp', - '../../checkout/src/compiler/translator/OutputHLSL.cpp', - '../../checkout/src/compiler/translator/OutputTree.cpp', - '../../checkout/src/compiler/translator/ParseContext.cpp', - '../../checkout/src/compiler/translator/PoolAlloc.cpp', - '../../checkout/src/compiler/translator/QualifierTypes.cpp', - '../../checkout/src/compiler/translator/ResourcesHLSL.cpp', - '../../checkout/src/compiler/translator/ShaderLang.cpp', - '../../checkout/src/compiler/translator/ShaderStorageBlockFunctionHLSL.cpp', - '../../checkout/src/compiler/translator/ShaderStorageBlockOutputHLSL.cpp', - '../../checkout/src/compiler/translator/ShaderVars.cpp', - '../../checkout/src/compiler/translator/StructureHLSL.cpp', - '../../checkout/src/compiler/translator/Symbol.cpp', - '../../checkout/src/compiler/translator/SymbolTable.cpp', - '../../checkout/src/compiler/translator/SymbolTable_autogen.cpp', - '../../checkout/src/compiler/translator/SymbolUniqueId.cpp', - '../../checkout/src/compiler/translator/TextureFunctionHLSL.cpp', - '../../checkout/src/compiler/translator/TranslatorESSL.cpp', - '../../checkout/src/compiler/translator/TranslatorGLSL.cpp', - '../../checkout/src/compiler/translator/TranslatorHLSL.cpp', - '../../checkout/src/compiler/translator/tree_ops/AddAndTrueToLoopCondition.cpp', - '../../checkout/src/compiler/translator/tree_ops/AddDefaultReturnStatements.cpp', - '../../checkout/src/compiler/translator/tree_ops/ArrayReturnValueToOutParameter.cpp', - '../../checkout/src/compiler/translator/tree_ops/BreakVariableAliasingInInnerLoops.cpp', - '../../checkout/src/compiler/translator/tree_ops/ClampFragDepth.cpp', - '../../checkout/src/compiler/translator/tree_ops/ClampPointSize.cpp', - '../../checkout/src/compiler/translator/tree_ops/DeclareAndInitBuiltinsForInstancedMultiview.cpp', - '../../checkout/src/compiler/translator/tree_ops/DeferGlobalInitializers.cpp', - '../../checkout/src/compiler/translator/tree_ops/EmulateGLFragColorBroadcast.cpp', - '../../checkout/src/compiler/translator/tree_ops/EmulateMultiDrawShaderBuiltins.cpp', - '../../checkout/src/compiler/translator/tree_ops/EmulatePrecision.cpp', - '../../checkout/src/compiler/translator/tree_ops/ExpandIntegerPowExpressions.cpp', - '../../checkout/src/compiler/translator/tree_ops/FoldExpressions.cpp', - '../../checkout/src/compiler/translator/tree_ops/InitializeVariables.cpp', - '../../checkout/src/compiler/translator/tree_ops/NameEmbeddedUniformStructs.cpp', - '../../checkout/src/compiler/translator/tree_ops/PruneEmptyCases.cpp', - '../../checkout/src/compiler/translator/tree_ops/PruneNoOps.cpp', - '../../checkout/src/compiler/translator/tree_ops/RecordConstantPrecision.cpp', - '../../checkout/src/compiler/translator/tree_ops/RegenerateStructNames.cpp', - '../../checkout/src/compiler/translator/tree_ops/RemoveArrayLengthMethod.cpp', - '../../checkout/src/compiler/translator/tree_ops/RemoveDynamicIndexing.cpp', - '../../checkout/src/compiler/translator/tree_ops/RemoveInvariantDeclaration.cpp', - '../../checkout/src/compiler/translator/tree_ops/RemovePow.cpp', - '../../checkout/src/compiler/translator/tree_ops/RemoveSwitchFallThrough.cpp', - '../../checkout/src/compiler/translator/tree_ops/RemoveUnreferencedVariables.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteAtomicCounters.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteAtomicFunctionExpressions.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteDfdy.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteDoWhile.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteElseBlocks.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteExpressionsWithShaderStorageBlock.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteRepeatedAssignToSwizzled.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteStructSamplers.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteTexelFetchOffset.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteUnaryMinusOperatorFloat.cpp', - '../../checkout/src/compiler/translator/tree_ops/RewriteUnaryMinusOperatorInt.cpp', - '../../checkout/src/compiler/translator/tree_ops/ScalarizeVecAndMatConstructorArgs.cpp', - '../../checkout/src/compiler/translator/tree_ops/SeparateArrayConstructorStatements.cpp', - '../../checkout/src/compiler/translator/tree_ops/SeparateArrayInitialization.cpp', - '../../checkout/src/compiler/translator/tree_ops/SeparateDeclarations.cpp', - '../../checkout/src/compiler/translator/tree_ops/SeparateExpressionsReturningArrays.cpp', - '../../checkout/src/compiler/translator/tree_ops/SimplifyLoopConditions.cpp', - '../../checkout/src/compiler/translator/tree_ops/SplitSequenceOperator.cpp', - '../../checkout/src/compiler/translator/tree_ops/UnfoldShortCircuitAST.cpp', - '../../checkout/src/compiler/translator/tree_ops/UnfoldShortCircuitToIf.cpp', - '../../checkout/src/compiler/translator/tree_ops/UseInterfaceBlockFields.cpp', - '../../checkout/src/compiler/translator/tree_ops/VectorizeVectorScalarArithmetic.cpp', - '../../checkout/src/compiler/translator/tree_ops/WrapSwitchStatementsInBlocks.cpp', - '../../checkout/src/compiler/translator/tree_util/FindFunction.cpp', - '../../checkout/src/compiler/translator/tree_util/FindMain.cpp', - '../../checkout/src/compiler/translator/tree_util/FindSymbolNode.cpp', - '../../checkout/src/compiler/translator/tree_util/IntermNode_util.cpp', - '../../checkout/src/compiler/translator/tree_util/IntermNodePatternMatcher.cpp', - '../../checkout/src/compiler/translator/tree_util/IntermTraverse.cpp', - '../../checkout/src/compiler/translator/tree_util/ReplaceShadowingVariables.cpp', - '../../checkout/src/compiler/translator/tree_util/ReplaceVariable.cpp', - '../../checkout/src/compiler/translator/tree_util/RunAtTheEndOfShader.cpp', - '../../checkout/src/compiler/translator/Types.cpp', - '../../checkout/src/compiler/translator/util.cpp', - '../../checkout/src/compiler/translator/UtilsHLSL.cpp', - '../../checkout/src/compiler/translator/ValidateAST.cpp', - '../../checkout/src/compiler/translator/ValidateGlobalInitializer.cpp', - '../../checkout/src/compiler/translator/ValidateLimitations.cpp', - '../../checkout/src/compiler/translator/ValidateMaxParameters.cpp', - '../../checkout/src/compiler/translator/ValidateOutputs.cpp', - '../../checkout/src/compiler/translator/ValidateSwitch.cpp', - '../../checkout/src/compiler/translator/ValidateVaryingLocations.cpp', - '../../checkout/src/compiler/translator/VariablePacker.cpp', - '../../checkout/src/compiler/translator/VersionGLSL.cpp', - '../../checkout/src/third_party/compiler/ArrayBoundsClamper.cpp', + "../../checkout/src/compiler/translator/ASTMetadataHLSL.cpp", + "../../checkout/src/compiler/translator/AtomicCounterFunctionHLSL.cpp", + "../../checkout/src/compiler/translator/blocklayout.cpp", + "../../checkout/src/compiler/translator/blocklayoutHLSL.cpp", + "../../checkout/src/compiler/translator/BuiltInFunctionEmulator.cpp", + "../../checkout/src/compiler/translator/BuiltInFunctionEmulatorGLSL.cpp", + "../../checkout/src/compiler/translator/BuiltInFunctionEmulatorHLSL.cpp", + "../../checkout/src/compiler/translator/CallDAG.cpp", + "../../checkout/src/compiler/translator/CodeGen.cpp", + "../../checkout/src/compiler/translator/CollectVariables.cpp", + "../../checkout/src/compiler/translator/Compiler.cpp", + "../../checkout/src/compiler/translator/ConstantUnion.cpp", + "../../checkout/src/compiler/translator/Declarator.cpp", + "../../checkout/src/compiler/translator/Diagnostics.cpp", + "../../checkout/src/compiler/translator/DirectiveHandler.cpp", + "../../checkout/src/compiler/translator/emulated_builtin_functions_hlsl_autogen.cpp", + "../../checkout/src/compiler/translator/ExtensionBehavior.cpp", + "../../checkout/src/compiler/translator/ExtensionGLSL.cpp", + "../../checkout/src/compiler/translator/FlagStd140Structs.cpp", + "../../checkout/src/compiler/translator/FunctionLookup.cpp", + "../../checkout/src/compiler/translator/glslang_lex.cpp", + "../../checkout/src/compiler/translator/glslang_tab.cpp", + "../../checkout/src/compiler/translator/HashNames.cpp", + "../../checkout/src/compiler/translator/ImageFunctionHLSL.cpp", + "../../checkout/src/compiler/translator/ImmutableString.cpp", + "../../checkout/src/compiler/translator/ImmutableStringBuilder.cpp", + "../../checkout/src/compiler/translator/InfoSink.cpp", + "../../checkout/src/compiler/translator/Initialize.cpp", + "../../checkout/src/compiler/translator/InitializeDll.cpp", + "../../checkout/src/compiler/translator/IntermNode.cpp", + "../../checkout/src/compiler/translator/IsASTDepthBelowLimit.cpp", + "../../checkout/src/compiler/translator/Operator.cpp", + "../../checkout/src/compiler/translator/OutputESSL.cpp", + "../../checkout/src/compiler/translator/OutputGLSL.cpp", + "../../checkout/src/compiler/translator/OutputGLSLBase.cpp", + "../../checkout/src/compiler/translator/OutputHLSL.cpp", + "../../checkout/src/compiler/translator/OutputTree.cpp", + "../../checkout/src/compiler/translator/ParseContext.cpp", + "../../checkout/src/compiler/translator/PoolAlloc.cpp", + "../../checkout/src/compiler/translator/QualifierTypes.cpp", + "../../checkout/src/compiler/translator/ResourcesHLSL.cpp", + "../../checkout/src/compiler/translator/ShaderLang.cpp", + "../../checkout/src/compiler/translator/ShaderStorageBlockFunctionHLSL.cpp", + "../../checkout/src/compiler/translator/ShaderStorageBlockOutputHLSL.cpp", + "../../checkout/src/compiler/translator/ShaderVars.cpp", + "../../checkout/src/compiler/translator/StructureHLSL.cpp", + "../../checkout/src/compiler/translator/Symbol.cpp", + "../../checkout/src/compiler/translator/SymbolTable.cpp", + "../../checkout/src/compiler/translator/SymbolTable_autogen.cpp", + "../../checkout/src/compiler/translator/SymbolUniqueId.cpp", + "../../checkout/src/compiler/translator/TextureFunctionHLSL.cpp", + "../../checkout/src/compiler/translator/TranslatorESSL.cpp", + "../../checkout/src/compiler/translator/TranslatorGLSL.cpp", + "../../checkout/src/compiler/translator/TranslatorHLSL.cpp", + "../../checkout/src/compiler/translator/tree_ops/AddAndTrueToLoopCondition.cpp", + "../../checkout/src/compiler/translator/tree_ops/AddDefaultReturnStatements.cpp", + "../../checkout/src/compiler/translator/tree_ops/ArrayReturnValueToOutParameter.cpp", + "../../checkout/src/compiler/translator/tree_ops/BreakVariableAliasingInInnerLoops.cpp", + "../../checkout/src/compiler/translator/tree_ops/ClampFragDepth.cpp", + "../../checkout/src/compiler/translator/tree_ops/ClampPointSize.cpp", + "../../checkout/src/compiler/translator/tree_ops/DeclareAndInitBuiltinsForInstancedMultiview.cpp", + "../../checkout/src/compiler/translator/tree_ops/DeferGlobalInitializers.cpp", + "../../checkout/src/compiler/translator/tree_ops/EmulateGLFragColorBroadcast.cpp", + "../../checkout/src/compiler/translator/tree_ops/EmulateMultiDrawShaderBuiltins.cpp", + "../../checkout/src/compiler/translator/tree_ops/EmulatePrecision.cpp", + "../../checkout/src/compiler/translator/tree_ops/ExpandIntegerPowExpressions.cpp", + "../../checkout/src/compiler/translator/tree_ops/FoldExpressions.cpp", + "../../checkout/src/compiler/translator/tree_ops/InitializeVariables.cpp", + "../../checkout/src/compiler/translator/tree_ops/NameEmbeddedUniformStructs.cpp", + "../../checkout/src/compiler/translator/tree_ops/PruneEmptyCases.cpp", + "../../checkout/src/compiler/translator/tree_ops/PruneNoOps.cpp", + "../../checkout/src/compiler/translator/tree_ops/RecordConstantPrecision.cpp", + "../../checkout/src/compiler/translator/tree_ops/RegenerateStructNames.cpp", + "../../checkout/src/compiler/translator/tree_ops/RemoveArrayLengthMethod.cpp", + "../../checkout/src/compiler/translator/tree_ops/RemoveDynamicIndexing.cpp", + "../../checkout/src/compiler/translator/tree_ops/RemoveInvariantDeclaration.cpp", + "../../checkout/src/compiler/translator/tree_ops/RemovePow.cpp", + "../../checkout/src/compiler/translator/tree_ops/RemoveSwitchFallThrough.cpp", + "../../checkout/src/compiler/translator/tree_ops/RemoveUnreferencedVariables.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteAtomicCounters.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteAtomicFunctionExpressions.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteDfdy.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteDoWhile.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteElseBlocks.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteExpressionsWithShaderStorageBlock.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteRepeatedAssignToSwizzled.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteStructSamplers.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteTexelFetchOffset.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteUnaryMinusOperatorFloat.cpp", + "../../checkout/src/compiler/translator/tree_ops/RewriteUnaryMinusOperatorInt.cpp", + "../../checkout/src/compiler/translator/tree_ops/ScalarizeVecAndMatConstructorArgs.cpp", + "../../checkout/src/compiler/translator/tree_ops/SeparateArrayConstructorStatements.cpp", + "../../checkout/src/compiler/translator/tree_ops/SeparateArrayInitialization.cpp", + "../../checkout/src/compiler/translator/tree_ops/SeparateDeclarations.cpp", + "../../checkout/src/compiler/translator/tree_ops/SeparateExpressionsReturningArrays.cpp", + "../../checkout/src/compiler/translator/tree_ops/SimplifyLoopConditions.cpp", + "../../checkout/src/compiler/translator/tree_ops/SplitSequenceOperator.cpp", + "../../checkout/src/compiler/translator/tree_ops/UnfoldShortCircuitAST.cpp", + "../../checkout/src/compiler/translator/tree_ops/UnfoldShortCircuitToIf.cpp", + "../../checkout/src/compiler/translator/tree_ops/UseInterfaceBlockFields.cpp", + "../../checkout/src/compiler/translator/tree_ops/VectorizeVectorScalarArithmetic.cpp", + "../../checkout/src/compiler/translator/tree_ops/WrapSwitchStatementsInBlocks.cpp", + "../../checkout/src/compiler/translator/tree_util/FindFunction.cpp", + "../../checkout/src/compiler/translator/tree_util/FindMain.cpp", + "../../checkout/src/compiler/translator/tree_util/FindSymbolNode.cpp", + "../../checkout/src/compiler/translator/tree_util/IntermNode_util.cpp", + "../../checkout/src/compiler/translator/tree_util/IntermNodePatternMatcher.cpp", + "../../checkout/src/compiler/translator/tree_util/IntermTraverse.cpp", + "../../checkout/src/compiler/translator/tree_util/ReplaceShadowingVariables.cpp", + "../../checkout/src/compiler/translator/tree_util/ReplaceVariable.cpp", + "../../checkout/src/compiler/translator/tree_util/RunAtTheEndOfShader.cpp", + "../../checkout/src/compiler/translator/Types.cpp", + "../../checkout/src/compiler/translator/util.cpp", + "../../checkout/src/compiler/translator/UtilsHLSL.cpp", + "../../checkout/src/compiler/translator/ValidateAST.cpp", + "../../checkout/src/compiler/translator/ValidateGlobalInitializer.cpp", + "../../checkout/src/compiler/translator/ValidateLimitations.cpp", + "../../checkout/src/compiler/translator/ValidateMaxParameters.cpp", + "../../checkout/src/compiler/translator/ValidateOutputs.cpp", + "../../checkout/src/compiler/translator/ValidateSwitch.cpp", + "../../checkout/src/compiler/translator/ValidateVaryingLocations.cpp", + "../../checkout/src/compiler/translator/VariablePacker.cpp", + "../../checkout/src/compiler/translator/VersionGLSL.cpp", + "../../checkout/src/third_party/compiler/ArrayBoundsClamper.cpp", ] USE_LIBS += [ - 'angle_common', - 'preprocessor', + "angle_common", + "preprocessor", ] DIRS += [ - '../angle_common', - '../preprocessor', + "../angle_common", + "../preprocessor", ] -#LDFLAGS += [ +# LDFLAGS += [ # '--color-diagnostics', # '/DEBUG:GHASH', # '/FIXED:NO', @@ -301,6 +301,6 @@ DIRS += [ # '/PROFILE', # '/TIMESTAMP:1575176400', # '/WX', -#] +# ] -Library('translator') \ No newline at end of file +Library("translator") diff --git a/gfx/angle/update-angle.py b/gfx/angle/update-angle.py index 21221517e53106..d6c2158985b827 100755 --- a/gfx/angle/update-angle.py +++ b/gfx/angle/update-angle.py @@ -3,9 +3,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -assert __name__ == '__main__' +assert __name__ == "__main__" -''' +""" To update ANGLE in Gecko, use Windows with git-bash, and setup depot_tools, python2, and python3. Because depot_tools expects `python` to be `python2` (shame!), python2 must come before python3 in your path. @@ -46,7 +46,7 @@ git push moz # Push the firefox-XX branch to github.com/mozilla/angle ~~~~ -''' +""" import json import os @@ -55,40 +55,41 @@ import shutil import subprocess import sys -from typing import * # mypy annotations +from typing import * # mypy annotations REPO_DIR = pathlib.Path.cwd() GECKO_ANGLE_DIR = pathlib.Path(__file__).parent -OUT_DIR = pathlib.Path('out') +OUT_DIR = pathlib.Path("out") COMMON_HEADER = [ - '# Generated by update-angle.py', - '', + "# Generated by update-angle.py", + "", "include('../../moz.build.common')", ] -ROOTS = ['//:translator', '//:libEGL', '//:libGLESv2'] +ROOTS = ["//:translator", "//:libEGL", "//:libGLESv2"] CHECK_ONLY = False args = sys.argv[1:] while True: arg = args.pop(0) - if arg == '--check': + if arg == "--check": CHECK_ONLY = True continue args.insert(0, arg) break GN_ENV = dict(os.environ) -GN_ENV['DEPOT_TOOLS_WIN_TOOLCHAIN'] = '0' +GN_ENV["DEPOT_TOOLS_WIN_TOOLCHAIN"] = "0" -(GIT_REMOTE, ) = args # Not always 'origin'! +(GIT_REMOTE,) = args # Not always 'origin'! # ------------------------------------------------------------------------------ + def run_checked(*args, **kwargs): - print(' ', args) + print(" ", args) sys.stdout.flush() return subprocess.run(args, check=True, **kwargs) @@ -99,17 +100,17 @@ def sorted_items(x): def collapse_dotdots(path): - split = path.split('/') + split = path.split("/") ret = [] for x in split: - if x == '..' and ret: + if x == ".." and ret: ret.pop() continue ret.append(x) continue - return '/'.join(ret) + return "/".join(ret) def dag_traverse(root_keys: Sequence[str], pre_recurse_func: Callable[[str], list]): @@ -138,14 +139,15 @@ def recurse(key): recurse(x) return + # ------------------------------------------------------------------------------ -print('Importing graph') +print("Importing graph") -#shutil.rmtree(str(OUT_DIR), True) +# shutil.rmtree(str(OUT_DIR), True) OUT_DIR.mkdir(exist_ok=True) -GN_ARGS = b''' +GN_ARGS = b""" # Build arguments go here. # See "gn args --list" for available build arguments. is_clang = true @@ -154,66 +156,78 @@ def recurse(key): angle_enable_gl_null = false angle_enable_null = false angle_enable_vulkan = false -'''[1:] -args_gn_path = OUT_DIR / 'args.gn' +"""[ + 1: +] +args_gn_path = OUT_DIR / "args.gn" args_gn_path.write_bytes(GN_ARGS) try: - run_checked('gn', 'gen', str(OUT_DIR), shell=True, env=GN_ENV) + run_checked("gn", "gen", str(OUT_DIR), shell=True, env=GN_ENV) except subprocess.CalledProcessError: - sys.stderr.buffer.write(b'`gn` failed. Is depot_tools in your PATH?\n') + sys.stderr.buffer.write(b"`gn` failed. Is depot_tools in your PATH?\n") exit(1) -p = run_checked('gn', 'desc', '--format=json', str(OUT_DIR), '*', stdout=subprocess.PIPE, - shell=True, env=GN_ENV) +p = run_checked( + "gn", + "desc", + "--format=json", + str(OUT_DIR), + "*", + stdout=subprocess.PIPE, + shell=True, + env=GN_ENV, +) # - -print('\nProcessing graph') +print("\nProcessing graph") descs = json.loads(p.stdout.decode()) # - # HACKHACKHACK: Inject linux/mac sources instead of trying to merge graphs of different # platforms. -descs['//:angle_common']['sources'] += [ - '//src/common/system_utils_linux.cpp', - '//src/common/system_utils_mac.cpp', - '//src/common/system_utils_posix.cpp', +descs["//:angle_common"]["sources"] += [ + "//src/common/system_utils_linux.cpp", + "//src/common/system_utils_mac.cpp", + "//src/common/system_utils_posix.cpp", ] # Ready to traverse # ------------------------------------------------------------------------------ -LIBRARY_TYPES = ('shared_library', 'static_library') +LIBRARY_TYPES = ("shared_library", "static_library") -def flattened_target(target_name: str, descs: dict, stop_at_lib: bool =True) -> dict: + +def flattened_target(target_name: str, descs: dict, stop_at_lib: bool = True) -> dict: flattened = dict(descs[target_name]) - EXPECTED_TYPES = LIBRARY_TYPES + ('source_set', 'group', 'action') + EXPECTED_TYPES = LIBRARY_TYPES + ("source_set", "group", "action") def pre(k): dep = descs[k] - dep_type = dep['type'] - deps = dep['deps'] + dep_type = dep["type"] + deps = dep["deps"] if stop_at_lib and dep_type in LIBRARY_TYPES: return ((),) - if dep_type == 'copy': - assert not deps, (target_name, dep['deps']) + if dep_type == "copy": + assert not deps, (target_name, dep["deps"]) else: assert dep_type in EXPECTED_TYPES, (k, dep_type) - for (k,v) in dep.items(): + for (k, v) in dep.items(): if type(v) in (list, tuple): flattened[k] = flattened.get(k, []) + v else: - #flattened.setdefault(k, v) + # flattened.setdefault(k, v) pass return (deps,) - dag_traverse(descs[target_name]['deps'], pre) + dag_traverse(descs[target_name]["deps"], pre) return flattened + # ------------------------------------------------------------------------------ # Check that includes are valid. (gn's version of this check doesn't seem to work!) @@ -222,101 +236,110 @@ def pre(k): assert INCLUDE_REGEX.match(b'\n#include "foo"') IGNORED_INCLUDES = { - b'compiler/translator/TranslatorVulkan.h', - b'libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h', - b'libANGLE/renderer/gl/glx/DisplayGLX.h', - b'libANGLE/renderer/gl/cgl/DisplayCGL.h', - b'libANGLE/renderer/gl/egl/ozone/DisplayOzone.h', - b'libANGLE/renderer/gl/egl/android/DisplayAndroid.h', - b'libANGLE/renderer/gl/wgl/DisplayWGL.h', - b'libANGLE/renderer/null/DisplayNULL.h', - b'libANGLE/renderer/vulkan/android/DisplayVkAndroid.h', - b'libANGLE/renderer/vulkan/fuchsia/DisplayVkFuchsia.h', - b'libANGLE/renderer/vulkan/win32/DisplayVkWin32.h', - b'libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h', - b'kernel/image.h', + b"compiler/translator/TranslatorVulkan.h", + b"libANGLE/renderer/d3d/d3d11/winrt/NativeWindow11WinRT.h", + b"libANGLE/renderer/gl/glx/DisplayGLX.h", + b"libANGLE/renderer/gl/cgl/DisplayCGL.h", + b"libANGLE/renderer/gl/egl/ozone/DisplayOzone.h", + b"libANGLE/renderer/gl/egl/android/DisplayAndroid.h", + b"libANGLE/renderer/gl/wgl/DisplayWGL.h", + b"libANGLE/renderer/null/DisplayNULL.h", + b"libANGLE/renderer/vulkan/android/DisplayVkAndroid.h", + b"libANGLE/renderer/vulkan/fuchsia/DisplayVkFuchsia.h", + b"libANGLE/renderer/vulkan/win32/DisplayVkWin32.h", + b"libANGLE/renderer/vulkan/xcb/DisplayVkXcb.h", + b"kernel/image.h", } IGNORED_INCLUDE_PREFIXES = { - b'android', - b'Carbon', - b'CoreFoundation', - b'CoreServices', - b'IOSurface', - b'mach', - b'mach-o', - b'OpenGL', - b'pci', - b'sys', - b'wrl', - b'X11', + b"android", + b"Carbon", + b"CoreFoundation", + b"CoreServices", + b"IOSurface", + b"mach", + b"mach-o", + b"OpenGL", + b"pci", + b"sys", + b"wrl", + b"X11", } + def has_all_includes(target_name: str, descs: dict) -> bool: flat = flattened_target(target_name, descs, stop_at_lib=False) - acceptable_sources = flat.get('sources', []) + flat.get('outputs', []) - acceptable_sources = (x.rsplit('/', 1)[-1].encode() for x in acceptable_sources) + acceptable_sources = flat.get("sources", []) + flat.get("outputs", []) + acceptable_sources = (x.rsplit("/", 1)[-1].encode() for x in acceptable_sources) acceptable_sources = set(acceptable_sources) ret = True desc = descs[target_name] - for cur_file in desc.get('sources', []): - assert cur_file.startswith('/'), cur_file - if not cur_file.startswith('//'): + for cur_file in desc.get("sources", []): + assert cur_file.startswith("/"), cur_file + if not cur_file.startswith("//"): continue cur_file = pathlib.Path(cur_file[2:]) text = cur_file.read_bytes() for m in INCLUDE_REGEX.finditer(text): - if m.group(1) == b'<': + if m.group(1) == b"<": continue include = m.group(2) if include in IGNORED_INCLUDES: continue try: - (prefix, _) = include.split(b'/', 1) + (prefix, _) = include.split(b"/", 1) if prefix in IGNORED_INCLUDE_PREFIXES: continue except ValueError: pass - include_file = include.rsplit(b'/', 1)[-1] + include_file = include.rsplit(b"/", 1)[-1] if include_file not in acceptable_sources: - #print(' acceptable_sources:') - #for x in sorted(acceptable_sources): + # print(' acceptable_sources:') + # for x in sorted(acceptable_sources): # print(' ', x) - print('Warning in {}: {}: Invalid include: {}'.format(target_name, cur_file, include)) + print( + "Warning in {}: {}: Invalid include: {}".format( + target_name, cur_file, include + ) + ) ret = False - #print('Looks valid:', m.group()) + # print('Looks valid:', m.group()) continue return ret + # - # Gather real targets: + def gather_libraries(roots: Sequence[str], descs: dict) -> Set[str]: libraries = set() + def fn(target_name): cur = descs[target_name] - print(' ' + cur['type'], target_name) + print(" " + cur["type"], target_name) assert has_all_includes(target_name, descs), target_name - if cur['type'] in ('shared_library', 'static_library'): + if cur["type"] in ("shared_library", "static_library"): libraries.add(target_name) - return (cur['deps'], ) + return (cur["deps"],) dag_traverse(roots, fn) return libraries + # - libraries = gather_libraries(ROOTS, descs) -print(f'\n{len(libraries)} libraries:') +print(f"\n{len(libraries)} libraries:") for k in libraries: - print(' ', k) + print(" ", k) if CHECK_ONLY: - print('\n--check complete.') + print("\n--check complete.") exit(0) # ------------------------------------------------------------------------------ @@ -324,76 +347,76 @@ def fn(target_name): import vendor_from_git -print('') +print("") vendor_from_git.record_cherry_picks(GECKO_ANGLE_DIR, GIT_REMOTE) # -- + def sortedi(x): return sorted(x, key=str.lower) + def append_arr(dest, name, vals, indent=0): if not vals: return - dest.append('{}{} += ['.format(' '*4*indent, name)) + dest.append("{}{} += [".format(" " * 4 * indent, name)) for x in sortedi(vals): - dest.append("{}'{}',".format(' '*4*(indent+1), x)) - dest.append('{}]'.format(' '*4*indent)) - dest.append('') + dest.append("{}'{}',".format(" " * 4 * (indent + 1), x)) + dest.append("{}]".format(" " * 4 * indent)) + dest.append("") return -REGISTERED_DEFINES = { - 'ANGLE_CAPTURE_ENABLED': True, - 'ANGLE_EGL_LIBRARY_NAME': False, - 'ANGLE_ENABLE_D3D11': True, - 'ANGLE_ENABLE_D3D9': True, - 'ANGLE_ENABLE_DEBUG_ANNOTATIONS': True, - 'ANGLE_ENABLE_NULL': False, - 'ANGLE_ENABLE_OPENGL': False, - 'ANGLE_ENABLE_OPENGL_NULL': False, - 'ANGLE_ENABLE_ESSL': True, - 'ANGLE_ENABLE_GLSL': True, - 'ANGLE_ENABLE_HLSL': True, - 'ANGLE_GENERATE_SHADER_DEBUG_INFO': True, - 'ANGLE_GLESV2_LIBRARY_NAME': True, - 'ANGLE_IS_64_BIT_CPU': False, - 'ANGLE_PRELOADED_D3DCOMPILER_MODULE_NAMES': False, - 'ANGLE_USE_EGL_LOADER': True, - 'CERT_CHAIN_PARA_HAS_EXTRA_FIELDS': False, - 'CHROMIUM_BUILD': False, - 'COMPONENT_BUILD': False, - 'DYNAMIC_ANNOTATIONS_ENABLED': True, - 'EGL_EGL_PROTOTYPES': True, - 'EGL_EGLEXT_PROTOTYPES': True, - 'EGLAPI': True, - 'FIELDTRIAL_TESTING_ENABLED': False, - 'FULL_SAFE_BROWSING': False, - 'GL_API': True, - 'GL_APICALL': True, - 'GL_GLES_PROTOTYPES': True, - 'GL_GLEXT_PROTOTYPES': True, - 'GPU_INFO_USE_SETUPAPI': True, - 'LIBANGLE_IMPLEMENTATION': True, - 'LIBEGL_IMPLEMENTATION': True, - 'LIBGLESV2_IMPLEMENTATION': True, - 'NOMINMAX': True, - 'NO_TCMALLOC': False, +REGISTERED_DEFINES = { + "ANGLE_CAPTURE_ENABLED": True, + "ANGLE_EGL_LIBRARY_NAME": False, + "ANGLE_ENABLE_D3D11": True, + "ANGLE_ENABLE_D3D9": True, + "ANGLE_ENABLE_DEBUG_ANNOTATIONS": True, + "ANGLE_ENABLE_NULL": False, + "ANGLE_ENABLE_OPENGL": False, + "ANGLE_ENABLE_OPENGL_NULL": False, + "ANGLE_ENABLE_ESSL": True, + "ANGLE_ENABLE_GLSL": True, + "ANGLE_ENABLE_HLSL": True, + "ANGLE_GENERATE_SHADER_DEBUG_INFO": True, + "ANGLE_GLESV2_LIBRARY_NAME": True, + "ANGLE_IS_64_BIT_CPU": False, + "ANGLE_PRELOADED_D3DCOMPILER_MODULE_NAMES": False, + "ANGLE_USE_EGL_LOADER": True, + "CERT_CHAIN_PARA_HAS_EXTRA_FIELDS": False, + "CHROMIUM_BUILD": False, + "COMPONENT_BUILD": False, + "DYNAMIC_ANNOTATIONS_ENABLED": True, + "EGL_EGL_PROTOTYPES": True, + "EGL_EGLEXT_PROTOTYPES": True, + "EGLAPI": True, + "FIELDTRIAL_TESTING_ENABLED": False, + "FULL_SAFE_BROWSING": False, + "GL_API": True, + "GL_APICALL": True, + "GL_GLES_PROTOTYPES": True, + "GL_GLEXT_PROTOTYPES": True, + "GPU_INFO_USE_SETUPAPI": True, + "LIBANGLE_IMPLEMENTATION": True, + "LIBEGL_IMPLEMENTATION": True, + "LIBGLESV2_IMPLEMENTATION": True, + "NOMINMAX": True, + "NO_TCMALLOC": False, # Else: gfx/angle/checkout/src/libANGLE/renderer/d3d/d3d11/win32/NativeWindow11Win32.cpp(89): error C2787: 'IDCompositionDevice': no GUID has been associated with this object - 'NTDDI_VERSION': True, - - 'PSAPI_VERSION': False, - 'SAFE_BROWSING_CSD': False, - 'SAFE_BROWSING_DB_LOCAL': False, - 'UNICODE': True, - 'USE_AURA': False, - 'V8_DEPRECATION_WARNINGS': False, - 'WIN32': False, - 'WIN32_LEAN_AND_MEAN': False, - 'WINAPI_FAMILY': False, - - 'WINVER': True, + "NTDDI_VERSION": True, + "PSAPI_VERSION": False, + "SAFE_BROWSING_CSD": False, + "SAFE_BROWSING_DB_LOCAL": False, + "UNICODE": True, + "USE_AURA": False, + "V8_DEPRECATION_WARNINGS": False, + "WIN32": False, + "WIN32_LEAN_AND_MEAN": False, + "WINAPI_FAMILY": False, + "WINVER": True, # Otherwise: # gfx/angle/targets/libANGLE # In file included from c:/dev/mozilla/gecko4/gfx/angle/checkout/src/libANGLE/renderer/d3d/d3d11/converged/CompositorNativeWindow11.cpp:10: @@ -401,45 +424,43 @@ def append_arr(dest, name, vals, indent=0): # C:\Program Files (x86)\Windows Kits\10\include\10.0.17763.0\winrt\Windows.ui.composition.interop.h(103,20): error: unknown type name 'POINTER_INFO' # _In_ const POINTER_INFO& pointerInfo # ^ - - 'WTF_USE_DYNAMIC_ANNOTATIONS': False, - '_ATL_NO_OPENGL': True, - '_CRT_RAND_S': True, - '_CRT_SECURE_NO_DEPRECATE': True, - '_DEBUG': False, - '_HAS_EXCEPTIONS': True, - '_HAS_ITERATOR_DEBUGGING': False, - '_SCL_SECURE_NO_DEPRECATE': True, - '_SECURE_ATL': True, - '_UNICODE': True, - '_USING_V110_SDK71_': False, - '_WIN32_WINNT': False, - '_WINDOWS': False, - '__STD_C': False, - + "WTF_USE_DYNAMIC_ANNOTATIONS": False, + "_ATL_NO_OPENGL": True, + "_CRT_RAND_S": True, + "_CRT_SECURE_NO_DEPRECATE": True, + "_DEBUG": False, + "_HAS_EXCEPTIONS": True, + "_HAS_ITERATOR_DEBUGGING": False, + "_SCL_SECURE_NO_DEPRECATE": True, + "_SECURE_ATL": True, + "_UNICODE": True, + "_USING_V110_SDK71_": False, + "_WIN32_WINNT": False, + "_WINDOWS": False, + "__STD_C": False, # clang specific - 'CR_CLANG_REVISION': True, - 'NDEBUG': False, - 'NVALGRIND': False, - '_HAS_NODISCARD': False, + "CR_CLANG_REVISION": True, + "NDEBUG": False, + "NVALGRIND": False, + "_HAS_NODISCARD": False, } # - -print('\nRun actions') +print("\nRun actions") required_files: Set[str] = set() -run_checked('ninja', '-C', str(OUT_DIR), ':commit_id') -required_files |= set(descs['//:commit_id']['outputs']) +run_checked("ninja", "-C", str(OUT_DIR), ":commit_id") +required_files |= set(descs["//:commit_id"]["outputs"]) # - # Export our targets -print('\nExport targets') +print("\nExport targets") # Clear our dest directories -targets_dir = pathlib.Path(GECKO_ANGLE_DIR, 'targets') -checkout_dir = pathlib.Path(GECKO_ANGLE_DIR, 'checkout') +targets_dir = pathlib.Path(GECKO_ANGLE_DIR, "targets") +checkout_dir = pathlib.Path(GECKO_ANGLE_DIR, "checkout") shutil.rmtree(targets_dir, True) shutil.rmtree(checkout_dir, True) @@ -448,146 +469,150 @@ def append_arr(dest, name, vals, indent=0): # - + def export_target(target_name) -> Set[str]: - #print(' ', target_name) + # print(' ', target_name) desc = descs[target_name] flat = flattened_target(target_name, descs) - assert target_name.startswith('//:'), target_name + assert target_name.startswith("//:"), target_name name = target_name[3:] - required_files: Set[str] = set(flat['sources']) + required_files: Set[str] = set(flat["sources"]) # Create our manifest lines target_dir = targets_dir / name target_dir.mkdir(exist_ok=True) lines = list(COMMON_HEADER) - lines.append('') + lines.append("") - for x in sorted(set(desc['defines'])): + for x in sorted(set(desc["defines"])): try: - (k, v) = x.split('=', 1) + (k, v) = x.split("=", 1) v = f"'{v}'" except ValueError: - (k, v) = (x, 'True') + (k, v) = (x, "True") try: line = f"DEFINES['{k}'] = {v}" if REGISTERED_DEFINES[k] == False: - line = '#' + line + line = "#" + line lines.append(line) except KeyError: - print(f'[{name}] Unrecognized define: {k}') - lines.append('') + print(f"[{name}] Unrecognized define: {k}") + lines.append("") - cxxflags = set(desc['cflags'] + desc['cflags_cc']) + cxxflags = set(desc["cflags"] + desc["cflags_cc"]) def fixup_paths(listt): for x in set(listt): - assert x.startswith('//'), x - yield '../../checkout/' + x[2:] - - sources_by_config: Dict[str,List[str]] = {} - extras: Dict[str,str] = dict() - for x in fixup_paths(flat['sources']): - #print(' '*5, x) - (b, e) = x.rsplit('.', 1) - if e in ['h', 'y', 'l', 'inc', 'inl']: + assert x.startswith("//"), x + yield "../../checkout/" + x[2:] + + sources_by_config: Dict[str, List[str]] = {} + extras: Dict[str, str] = dict() + for x in fixup_paths(flat["sources"]): + # print(' '*5, x) + (b, e) = x.rsplit(".", 1) + if e in ["h", "y", "l", "inc", "inl"]: continue - elif e in ['cpp', 'cc', 'c']: - if b.endswith('_win'): + elif e in ["cpp", "cc", "c"]: + if b.endswith("_win"): config = "CONFIG['OS_ARCH'] == 'WINNT'" - elif b.endswith('_linux'): + elif b.endswith("_linux"): # Include these on BSDs too. config = "CONFIG['OS_ARCH'] not in ('Darwin', 'WINNT')" - elif b.endswith('_mac'): + elif b.endswith("_mac"): config = "CONFIG['OS_ARCH'] == 'Darwin'" - elif b.endswith('_posix'): + elif b.endswith("_posix"): config = "CONFIG['OS_ARCH'] != 'WINNT'" else: - config = '' # None can't compare against str. + config = "" # None can't compare against str. sources_by_config.setdefault(config, []).append(x) continue - elif e == 'rc': - assert 'RCFILE' not in extras, (target_name, extras['RCFILE'], x) - extras['RCFILE'] = f"'{x}'" + elif e == "rc": + assert "RCFILE" not in extras, (target_name, extras["RCFILE"], x) + extras["RCFILE"] = f"'{x}'" continue - elif e == 'def': - assert 'DEFFILE' not in extras, (target_name, extras['DEFFILE'], x) - extras['DEFFILE'] = f"'{x}'" + elif e == "def": + assert "DEFFILE" not in extras, (target_name, extras["DEFFILE"], x) + extras["DEFFILE"] = f"'{x}'" continue else: assert False, ("Unhandled ext:", x) - ldflags = set(desc['ldflags']) - DEF_PREFIX = '/DEF:' + ldflags = set(desc["ldflags"]) + DEF_PREFIX = "/DEF:" for x in set(ldflags): if x.startswith(DEF_PREFIX): - def_path = x[len(DEF_PREFIX):] + def_path = x[len(DEF_PREFIX) :] required_files.add(def_path) - assert 'DEFFILE' not in extras + assert "DEFFILE" not in extras ldflags.remove(x) - def_path = str(OUT_DIR) + '/' + def_path - def_path = '//' + collapse_dotdots(def_path) + def_path = str(OUT_DIR) + "/" + def_path + def_path = "//" + collapse_dotdots(def_path) def_rel_path = list(fixup_paths([def_path]))[0] - extras['DEFFILE'] = "'{}'".format(def_rel_path) + extras["DEFFILE"] = "'{}'".format(def_rel_path) - os_libs = list(map( lambda x: x[:-len('.lib')], set(desc.get('libs', [])) )) + os_libs = list(map(lambda x: x[: -len(".lib")], set(desc.get("libs", [])))) def append_arr_commented(dest, name, src): lines = [] append_arr(lines, name, src) + def comment(x): if x: - x = '#' + x + x = "#" + x return x + lines = map(comment, lines) dest += lines - append_arr(lines, 'LOCAL_INCLUDES', fixup_paths(desc['include_dirs'])) - append_arr_commented(lines, 'CXXFLAGS', cxxflags) + append_arr(lines, "LOCAL_INCLUDES", fixup_paths(desc["include_dirs"])) + append_arr_commented(lines, "CXXFLAGS", cxxflags) - for (config,v) in sorted_items(sources_by_config): + for (config, v) in sorted_items(sources_by_config): indent = 0 if config: lines.append("if {}:".format(config)) indent = 1 - append_arr(lines, 'SOURCES', v, indent=indent) + append_arr(lines, "SOURCES", v, indent=indent) dep_libs: Set[str] = set() - for dep_name in set(flat['deps']): + for dep_name in set(flat["deps"]): dep = descs[dep_name] - if dep['type'] in LIBRARY_TYPES: - assert dep_name.startswith('//:'), dep_name + if dep["type"] in LIBRARY_TYPES: + assert dep_name.startswith("//:"), dep_name dep_libs.add(dep_name[3:]) - append_arr(lines, 'USE_LIBS', dep_libs) - append_arr(lines, 'DIRS', ['../' + x for x in dep_libs]) - append_arr(lines, 'OS_LIBS', os_libs) - append_arr_commented(lines, 'LDFLAGS', ldflags) + append_arr(lines, "USE_LIBS", dep_libs) + append_arr(lines, "DIRS", ["../" + x for x in dep_libs]) + append_arr(lines, "OS_LIBS", os_libs) + append_arr_commented(lines, "LDFLAGS", ldflags) - for (k,v) in sorted(extras.items()): - lines.append('{} = {}'.format(k, v)) + for (k, v) in sorted(extras.items()): + lines.append("{} = {}".format(k, v)) - lib_type = desc['type'] - if lib_type == 'shared_library': + lib_type = desc["type"] + if lib_type == "shared_library": lines.append(f"GeckoSharedLibrary('{name}', linkage=None)") - elif lib_type == 'static_library': + elif lib_type == "static_library": lines.append(f"Library('{name}')") else: assert False, lib_type # Write it out - mozbuild = target_dir / 'moz.build' - print(' ', ' ', f'Writing {mozbuild}') - data = b'\n'.join((x.encode() for x in lines)) + mozbuild = target_dir / "moz.build" + print(" ", " ", f"Writing {mozbuild}") + data = b"\n".join((x.encode() for x in lines)) mozbuild.write_bytes(data) return required_files + # - for target_name in libraries: @@ -596,14 +621,14 @@ def comment(x): # Copy all the files -print('\nMigrate required files') +print("\nMigrate required files") i = 0 for x in required_files: i += 1 - sys.stdout.write(f'\r Copying {i}/{len(required_files)}') + sys.stdout.write(f"\r Copying {i}/{len(required_files)}") sys.stdout.flush() - assert x.startswith('//'), x + assert x.startswith("//"), x x = x[2:] src = REPO_DIR / x @@ -611,7 +636,7 @@ def comment(x): dest.parent.mkdir(parents=True, exist_ok=True) data = src.read_bytes() - data = data.replace(b'\r\n', b'\n') + data = data.replace(b"\r\n", b"\n") dest.write_bytes(data) -print('\n\nDone') +print("\n\nDone") diff --git a/gfx/angle/vendor_from_git.py b/gfx/angle/vendor_from_git.py index b92fd5f9654198..a227bbbe381841 100644 --- a/gfx/angle/vendor_from_git.py +++ b/gfx/angle/vendor_from_git.py @@ -3,15 +3,15 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -assert __name__ != '__main__' +assert __name__ != "__main__" -''' +""" Any time we vendor[1] from an external git repo, we want to keep a record of the csets we're pulling from. This script leaves a record of the merge-base reference tip and cherry-picks that we pull into Gecko. (such as gfx/angle/cherry_picks.txt) -''' +""" from pathlib import * import subprocess @@ -19,38 +19,49 @@ # -- + def print_now(*args): print(*args) sys.stdout.flush() def run_checked(*args, **kwargs): - print(' ', args) + print(" ", args) sys.stdout.flush() return subprocess.run(args, check=True, **kwargs) + # -- + def record_cherry_picks(dir_in_gecko, merge_base_origin): # merge_base_origin is not always 'origin'! - merge_base_from = Path(dir_in_gecko, 'MERGE_BASE').read_text().split('\n')[0] - merge_base_from = merge_base_origin + '/' + merge_base_from + merge_base_from = Path(dir_in_gecko, "MERGE_BASE").read_text().split("\n")[0] + merge_base_from = merge_base_origin + "/" + merge_base_from - assert '/' in merge_base_from, 'Please specify a reference tip from a remote.' - log_path = Path(dir_in_gecko, 'cherry_picks.txt') - print_now('Logging cherry picks to {}.'.format(log_path)) + assert "/" in merge_base_from, "Please specify a reference tip from a remote." + log_path = Path(dir_in_gecko, "cherry_picks.txt") + print_now("Logging cherry picks to {}.".format(log_path)) - merge_base = run_checked('git', 'merge-base', 'HEAD', merge_base_from, - stdout=subprocess.PIPE).stdout.decode().strip() + merge_base = ( + run_checked( + "git", "merge-base", "HEAD", merge_base_from, stdout=subprocess.PIPE + ) + .stdout.decode() + .strip() + ) - mb_info = run_checked('git', 'log', '{}~1..{}'.format(merge_base, merge_base), - stdout=subprocess.PIPE).stdout - cherries = run_checked('git', 'log', merge_base + '..', stdout=subprocess.PIPE).stdout + mb_info = run_checked( + "git", "log", "{}~1..{}".format(merge_base, merge_base), stdout=subprocess.PIPE + ).stdout + cherries = run_checked( + "git", "log", merge_base + "..", stdout=subprocess.PIPE + ).stdout - with open(log_path, 'wb') as f: + with open(log_path, "wb") as f: f.write(cherries) - f.write(b'\nCherries picked') - f.write(b'\n' + (b'=' * 80)) - f.write(b'\nMerge base from: ' + merge_base_from.encode()) - f.write(b'\n\n') + f.write(b"\nCherries picked") + f.write(b"\n" + (b"=" * 80)) + f.write(b"\nMerge base from: " + merge_base_from.encode()) + f.write(b"\n\n") f.write(mb_info) diff --git a/gfx/config/moz.build b/gfx/config/moz.build index 3428467fc8a011..153bda012757bd 100644 --- a/gfx/config/moz.build +++ b/gfx/config/moz.build @@ -5,25 +5,25 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'gfxConfig.h', - 'gfxFallback.h', - 'gfxFeature.h', + "gfxConfig.h", + "gfxFallback.h", + "gfxFeature.h", ] EXPORTS.mozilla.gfx += [ - 'gfxConfigManager.h', - 'gfxVarReceiver.h', - 'gfxVars.h', + "gfxConfigManager.h", + "gfxVarReceiver.h", + "gfxVars.h", ] UNIFIED_SOURCES += [ - 'gfxConfig.cpp', - 'gfxConfigManager.cpp', - 'gfxFeature.cpp', - 'gfxVars.cpp', - 'WebRenderRollout.cpp', + "gfxConfig.cpp", + "gfxConfigManager.cpp", + "gfxFeature.cpp", + "gfxVars.cpp", + "WebRenderRollout.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/gfx/gl/GLConsts.py b/gfx/gl/GLConsts.py index b279ba200ac606..90aceb9eb030a7 100755 --- a/gfx/gl/GLConsts.py +++ b/gfx/gl/GLConsts.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 -''' +""" This script will regenerate and update GLConsts.h. Step 1: @@ -19,7 +19,7 @@ Step 4: Enjoy =) -''' +""" # includes from typing import List # mypy! @@ -35,7 +35,7 @@ # - -HEADER = b''' +HEADER = b""" /* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ @@ -52,13 +52,17 @@ * * To generate this file, see tutorial in \'GLParseRegistryXML.py\'. */ -'''[1:] +"""[ + 1: +] -FOOTER = b''' +FOOTER = b""" #endif // GLCONSTS_H_ // clang-format on -'''[1:] +"""[ + 1: +] # - @@ -68,12 +72,12 @@ def format_lib_constant(lib, name, value): # name is the name of the const (example: MAX_TEXTURE_SIZE) # value is the value of the const (example: 0xABCD) - define = '#define LOCAL_' + lib + '_' + name + define = "#define LOCAL_" + lib + "_" + name whitespace = 60 - len(define) if whitespace < 0: whitespace = whitespace % 8 - return define + ' ' * whitespace + ' ' + value + return define + " " * whitespace + " " + value class GLConst: @@ -85,12 +89,12 @@ def __init__(self, lib, name, value, type): class GLDatabase: - LIBS = ['GL', 'EGL', 'GLX', 'WGL'] + LIBS = ["GL", "EGL", "GLX", "WGL"] def __init__(self): self.consts = {} self.libs = set(GLDatabase.LIBS) - self.vendors = set(['EXT', 'ATI']) + self.vendors = set(["EXT", "ATI"]) # there is no vendor="EXT" and vendor="ATI" in gl.xml, # so we manualy declare them @@ -98,48 +102,48 @@ def load_xml(self, xml_path): tree = xml.etree.ElementTree.parse(xml_path) root = tree.getroot() - for enums in root.iter('enums'): - vendor = enums.get('vendor') + for enums in root.iter("enums"): + vendor = enums.get("vendor") if not vendor: # there some standart enums that do have the vendor attribute, # so we fake them as ARB's enums - vendor = 'ARB' + vendor = "ARB" if vendor not in self.vendors: # we map this new vendor in the vendors set. self.vendors.add(vendor) - namespaceType = enums.get('type') + namespaceType = enums.get("type") for enum in enums: - if enum.tag != 'enum': + if enum.tag != "enum": # this is not an enum => we skip it continue - lib = enum.get('name').split('_')[0] + lib = enum.get("name").split("_")[0] if lib not in self.libs: # unknown library => we skip it continue - name = enum.get('name')[len(lib) + 1:] - value = enum.get('value') - type = enum.get('type') + name = enum.get("name")[len(lib) + 1 :] + value = enum.get("value") + type = enum.get("type") if not type: # if no type specified, we get the namespace's default type type = namespaceType - self.consts[lib + '_' + name] = GLConst(lib, name, value, type) + self.consts[lib + "_" + name] = GLConst(lib, name, value, type) # - db = GLDatabase() -db.load_xml(XML_DIR / 'gl.xml') -db.load_xml(XML_DIR / 'glx.xml') -db.load_xml(XML_DIR / 'wgl.xml') -db.load_xml(XML_DIR / 'egl.xml') +db.load_xml(XML_DIR / "gl.xml") +db.load_xml(XML_DIR / "glx.xml") +db.load_xml(XML_DIR / "wgl.xml") +db.load_xml(XML_DIR / "egl.xml") # - @@ -148,7 +152,7 @@ def load_xml(self, xml_path): keys = sorted(db.consts.keys()) for lib in db.LIBS: - lines.append('// ' + lib) + lines.append("// " + lib) for k in keys: const = db.consts[k] @@ -159,14 +163,14 @@ def load_xml(self, xml_path): const_str = format_lib_constant(lib, const.name, const.value) lines.append(const_str) - lines.append('') + lines.append("") # - b_lines: List[bytes] = [HEADER] + [x.encode() for x in lines] + [FOOTER] -b_data: bytes = b'\n'.join(b_lines) +b_data: bytes = b"\n".join(b_lines) -dest = pathlib.Path('GLConsts.h') +dest = pathlib.Path("GLConsts.h") dest.write_bytes(b_data) -print(f'Wrote {len(b_data)} bytes.') # Some indication that we're successful. +print(f"Wrote {len(b_data)} bytes.") # Some indication that we're successful. diff --git a/gfx/gl/moz.build b/gfx/gl/moz.build index b13427c5e08e34..72e576a408bf7f 100644 --- a/gfx/gl/moz.build +++ b/gfx/gl/moz.build @@ -4,170 +4,163 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -gl_provider = 'Null' - -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - gl_provider = 'WGL' -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - gl_provider = 'CGL' -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'uikit': - gl_provider = 'EAGL' -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - if CONFIG['MOZ_EGL_XRENDER_COMPOSITE']: - gl_provider = 'EGL' +gl_provider = "Null" + +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + gl_provider = "WGL" +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + gl_provider = "CGL" +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "uikit": + gl_provider = "EAGL" +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + if CONFIG["MOZ_EGL_XRENDER_COMPOSITE"]: + gl_provider = "EGL" else: - gl_provider = 'GLX' -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - gl_provider = 'EGL' + gl_provider = "GLX" +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + gl_provider = "EGL" -if CONFIG['MOZ_GL_PROVIDER']: - gl_provider = CONFIG['MOZ_GL_PROVIDER'] +if CONFIG["MOZ_GL_PROVIDER"]: + gl_provider = CONFIG["MOZ_GL_PROVIDER"] EXPORTS += [ - 'AndroidSurfaceTexture.h', - 'DecomposeIntoNoRepeatTriangles.h', - 'ForceDiscreteGPUHelperCGL.h', - 'GfxTexturesReporter.h', - 'GLBlitHelper.h', - 'GLConsts.h', - 'GLContext.h', - 'GLContextEGL.h', - 'GLContextProvider.h', - 'GLContextProviderImpl.h', - 'GLContextSymbols.h', - 'GLContextTypes.h', - 'GLDefs.h', - 'GLLibraryEGL.h', - 'GLLibraryLoader.h', - 'GLReadTexImageHelper.h', - 'GLScreenBuffer.h', - 'GLTextureImage.h', - 'GLTypes.h', - 'GLUploadHelpers.h', - 'HeapCopyOfStackArray.h', - 'MozFramebuffer.h', - 'ScopedGLHelpers.h', - 'SharedSurface.h', - 'SharedSurfaceEGL.h', - 'SharedSurfaceGL.h', - 'SurfaceTypes.h', + "AndroidSurfaceTexture.h", + "DecomposeIntoNoRepeatTriangles.h", + "ForceDiscreteGPUHelperCGL.h", + "GfxTexturesReporter.h", + "GLBlitHelper.h", + "GLConsts.h", + "GLContext.h", + "GLContextEGL.h", + "GLContextProvider.h", + "GLContextProviderImpl.h", + "GLContextSymbols.h", + "GLContextTypes.h", + "GLDefs.h", + "GLLibraryEGL.h", + "GLLibraryLoader.h", + "GLReadTexImageHelper.h", + "GLScreenBuffer.h", + "GLTextureImage.h", + "GLTypes.h", + "GLUploadHelpers.h", + "HeapCopyOfStackArray.h", + "MozFramebuffer.h", + "ScopedGLHelpers.h", + "SharedSurface.h", + "SharedSurfaceEGL.h", + "SharedSurfaceGL.h", + "SurfaceTypes.h", ] # Win32 is a special snowflake, for ANGLE -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXPORTS += [ - 'GLContextWGL.h', - 'SharedSurfaceANGLE.h', # Needs for `HANDLE`. - 'SharedSurfaceD3D11Interop.h', - 'WGLLibrary.h', + "GLContextWGL.h", + "SharedSurfaceANGLE.h", # Needs for `HANDLE`. + "SharedSurfaceD3D11Interop.h", + "WGLLibrary.h", ] UNIFIED_SOURCES += [ - 'GLBlitHelperD3D.cpp', - 'GLContextProviderWGL.cpp', - 'SharedSurfaceANGLE.cpp', - 'SharedSurfaceD3D11Interop.cpp', + "GLBlitHelperD3D.cpp", + "GLContextProviderWGL.cpp", + "SharedSurfaceANGLE.cpp", + "SharedSurfaceD3D11Interop.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXPORTS += [ - 'AndroidNativeWindow.h', - 'SharedSurfaceAndroidHardwareBuffer.h', + "AndroidNativeWindow.h", + "SharedSurfaceAndroidHardwareBuffer.h", ] UNIFIED_SOURCES += [ - 'SharedSurfaceAndroidHardwareBuffer.cpp', + "SharedSurfaceAndroidHardwareBuffer.cpp", ] -if gl_provider == 'CGL': +if gl_provider == "CGL": # These files include Mac headers that are unfriendly to unified builds SOURCES += [ "GLContextProviderCGL.mm", ] EXPORTS += [ - 'GLContextCGL.h', - 'SharedSurfaceIO.h', + "GLContextCGL.h", + "SharedSurfaceIO.h", ] # SharedSurfaceIO.cpp includes MacIOSurface.h which include Mac headers # which define Size and Point types in root namespace with often conflict with # our own types. While I haven't actually hit this issue in the present case, # it's been an issue in gfx/layers so let's not risk it. SOURCES += [ - 'SharedSurfaceIO.cpp', + "SharedSurfaceIO.cpp", ] OS_LIBS += [ - '-framework IOSurface', + "-framework IOSurface", ] -elif gl_provider == 'EAGL': +elif gl_provider == "EAGL": # These files include ObjC headers that are unfriendly to unified builds SOURCES += [ - 'GLContextProviderEAGL.mm', + "GLContextProviderEAGL.mm", ] EXPORTS += [ - 'GLContextEAGL.h', + "GLContextEAGL.h", ] -elif gl_provider == 'GLX': +elif gl_provider == "GLX": # GLContextProviderGLX.cpp needs to be kept out of UNIFIED_SOURCES # as it includes X11 headers which cause conflicts. SOURCES += [ - 'GLContextProviderGLX.cpp', - 'GLContextProviderX11.cpp', - 'SharedSurfaceGLX.cpp' - ] - EXPORTS += [ - 'GLContextGLX.h', - 'GLXLibrary.h', - 'SharedSurfaceGLX.h' + "GLContextProviderGLX.cpp", + "GLContextProviderX11.cpp", + "SharedSurfaceGLX.cpp", ] + EXPORTS += ["GLContextGLX.h", "GLXLibrary.h", "SharedSurfaceGLX.h"] -if CONFIG['MOZ_WAYLAND']: - SOURCES += [ - 'GLContextProviderWayland.cpp', - 'SharedSurfaceDMABUF.cpp' - ] +if CONFIG["MOZ_WAYLAND"]: + SOURCES += ["GLContextProviderWayland.cpp", "SharedSurfaceDMABUF.cpp"] UNIFIED_SOURCES += [ - 'AndroidSurfaceTexture.cpp', - 'DecomposeIntoNoRepeatTriangles.cpp', - 'GfxTexturesReporter.cpp', - 'GLBlitHelper.cpp', - 'GLContext.cpp', - 'GLContextFeatures.cpp', - 'GLContextProviderEGL.cpp', - 'GLDebugUtils.cpp', - 'GLLibraryEGL.cpp', - 'GLLibraryLoader.cpp', - 'GLReadTexImageHelper.cpp', - 'GLTextureImage.cpp', - 'GLUploadHelpers.cpp', - 'MozFramebuffer.cpp', - 'ScopedGLHelpers.cpp', - 'SharedSurface.cpp', - 'SharedSurfaceEGL.cpp', - 'SharedSurfaceGL.cpp', - 'TextureImageEGL.cpp', + "AndroidSurfaceTexture.cpp", + "DecomposeIntoNoRepeatTriangles.cpp", + "GfxTexturesReporter.cpp", + "GLBlitHelper.cpp", + "GLContext.cpp", + "GLContextFeatures.cpp", + "GLContextProviderEGL.cpp", + "GLDebugUtils.cpp", + "GLLibraryEGL.cpp", + "GLLibraryLoader.cpp", + "GLReadTexImageHelper.cpp", + "GLTextureImage.cpp", + "GLUploadHelpers.cpp", + "MozFramebuffer.cpp", + "ScopedGLHelpers.cpp", + "SharedSurface.cpp", + "SharedSurfaceEGL.cpp", + "SharedSurfaceGL.cpp", + "TextureImageEGL.cpp", ] SOURCES += [ - 'GLScreenBuffer.cpp', + "GLScreenBuffer.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['MOZ_D3DCOMPILER_VISTA_DLL']: - DEFINES['MOZ_D3DCOMPILER_VISTA_DLL'] = CONFIG['MOZ_D3DCOMPILER_VISTA_DLL'] +if CONFIG["MOZ_D3DCOMPILER_VISTA_DLL"]: + DEFINES["MOZ_D3DCOMPILER_VISTA_DLL"] = CONFIG["MOZ_D3DCOMPILER_VISTA_DLL"] -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] -CFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] +CFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['MOZ_WAYLAND']: - CXXFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS'] - CFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS'] +if CONFIG["MOZ_WAYLAND"]: + CXXFLAGS += CONFIG["MOZ_WAYLAND_CFLAGS"] + CFLAGS += CONFIG["MOZ_WAYLAND_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/gfx/ipc/moz.build b/gfx/ipc/moz.build index 681ceb0865e37e..85744c81beba2b 100644 --- a/gfx/ipc/moz.build +++ b/gfx/ipc/moz.build @@ -4,90 +4,87 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Graphics: Layers') +with Files("**"): + BUG_COMPONENT = ("Core", "Graphics: Layers") -EXPORTS.mozilla += [ - 'D3DMessageUtils.h', - 'GfxMessageUtils.h' -] +EXPORTS.mozilla += ["D3DMessageUtils.h", "GfxMessageUtils.h"] EXPORTS.mozilla.gfx += [ - 'CrossProcessPaint.h', - 'GPUChild.h', - 'GPUParent.h', - 'GPUProcessHost.h', - 'GPUProcessImpl.h', - 'GPUProcessListener.h', - 'GPUProcessManager.h', - 'SharedDIB.h', - 'VsyncBridgeChild.h', - 'VsyncBridgeParent.h', - 'VsyncIOThreadHolder.h', + "CrossProcessPaint.h", + "GPUChild.h", + "GPUParent.h", + "GPUProcessHost.h", + "GPUProcessImpl.h", + "GPUProcessListener.h", + "GPUProcessManager.h", + "SharedDIB.h", + "VsyncBridgeChild.h", + "VsyncBridgeParent.h", + "VsyncIOThreadHolder.h", ] EXPORTS.mozilla.layers += [ - 'CompositorOptions.h', - 'CompositorSession.h', - 'InProcessCompositorSession.h', - 'RemoteCompositorSession.h', + "CompositorOptions.h", + "CompositorSession.h", + "InProcessCompositorSession.h", + "RemoteCompositorSession.h", ] EXPORTS.mozilla.widget += [ - 'CompositorWidgetVsyncObserver.h', + "CompositorWidgetVsyncObserver.h", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXPORTS.mozilla.gfx += [ - 'SharedDIBSurface.h', - 'SharedDIBWin.h', + "SharedDIBSurface.h", + "SharedDIBWin.h", ] UNIFIED_SOURCES += [ - 'SharedDIBSurface.cpp', - 'SharedDIBWin.cpp', + "SharedDIBSurface.cpp", + "SharedDIBWin.cpp", ] UNIFIED_SOURCES += [ - 'CompositorSession.cpp', - 'CompositorWidgetVsyncObserver.cpp', - 'CrossProcessPaint.cpp', - 'D3DMessageUtils.cpp', - 'GPUChild.cpp', - 'GPUProcessHost.cpp', - 'GPUProcessImpl.cpp', - 'GPUProcessManager.cpp', - 'InProcessCompositorSession.cpp', - 'RemoteCompositorSession.cpp', - 'SharedDIB.cpp', - 'VsyncBridgeChild.cpp', - 'VsyncBridgeParent.cpp', - 'VsyncIOThreadHolder.cpp', + "CompositorSession.cpp", + "CompositorWidgetVsyncObserver.cpp", + "CrossProcessPaint.cpp", + "D3DMessageUtils.cpp", + "GPUChild.cpp", + "GPUProcessHost.cpp", + "GPUProcessImpl.cpp", + "GPUProcessManager.cpp", + "InProcessCompositorSession.cpp", + "RemoteCompositorSession.cpp", + "SharedDIB.cpp", + "VsyncBridgeChild.cpp", + "VsyncBridgeParent.cpp", + "VsyncIOThreadHolder.cpp", ] SOURCES += [ - 'GPUParent.cpp', + "GPUParent.cpp", ] IPDL_SOURCES = [ - 'GraphicsMessages.ipdlh', - 'PVsyncBridge.ipdl', + "GraphicsMessages.ipdlh", + "PVsyncBridge.ipdl", ] PREPROCESSED_IPDL_SOURCES += [ - 'PGPU.ipdl', + "PGPU.ipdl", ] LOCAL_INCLUDES += [ - '/dom/ipc', - '/toolkit/crashreporter', - '/xpcom/threads', + "/dom/ipc", + "/toolkit/crashreporter", + "/xpcom/threads", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] diff --git a/gfx/layers/apz/test/gtest/moz.build b/gfx/layers/apz/test/gtest/moz.build index 7f9a16bea20d00..24e968957fd58d 100644 --- a/gfx/layers/apz/test/gtest/moz.build +++ b/gfx/layers/apz/test/gtest/moz.build @@ -5,30 +5,30 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestBasic.cpp', - 'TestEventRegions.cpp', - 'TestGestureDetector.cpp', - 'TestHitTesting.cpp', - 'TestInputQueue.cpp', - 'TestPanning.cpp', - 'TestPinching.cpp', - 'TestScrollHandoff.cpp', - 'TestSnapping.cpp', - 'TestSnappingOnMomentum.cpp', - 'TestTreeManager.cpp', + "TestBasic.cpp", + "TestEventRegions.cpp", + "TestGestureDetector.cpp", + "TestHitTesting.cpp", + "TestInputQueue.cpp", + "TestPanning.cpp", + "TestPinching.cpp", + "TestScrollHandoff.cpp", + "TestSnapping.cpp", + "TestSnappingOnMomentum.cpp", + "TestTreeManager.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/gfx/2d', - '/gfx/layers', - '/gfx/tests/gtest' # for TestLayers.h, which is shared with the gfx gtests + "/gfx/2d", + "/gfx/layers", + "/gfx/tests/gtest", # for TestLayers.h, which is shared with the gfx gtests ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/gfx/layers/apz/test/gtest/mvm/moz.build b/gfx/layers/apz/test/gtest/mvm/moz.build index 1e90d12f2a1cf6..0fa985307ba162 100644 --- a/gfx/layers/apz/test/gtest/mvm/moz.build +++ b/gfx/layers/apz/test/gtest/mvm/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestMobileViewportManager.cpp', + "TestMobileViewportManager.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/gfx/layers/d3d11/genshaders.py b/gfx/layers/d3d11/genshaders.py index 96a9449fb204d3..2108bfe6930321 100644 --- a/gfx/layers/d3d11/genshaders.py +++ b/gfx/layers/d3d11/genshaders.py @@ -16,13 +16,11 @@ def shell_main(): parser = argparse.ArgumentParser() - parser.add_argument('-o', '--output', type=str, required=True, - help='Output file') - parser.add_argument('manifest', type=str, - help='Manifest source file') + parser.add_argument("-o", "--output", type=str, required=True, help="Output file") + parser.add_argument("manifest", type=str, help="Manifest source file") args = parser.parse_args() - with open(args.output, 'w') as out_file: + with open(args.output, "w") as out_file: process_manifest(out_file, args.manifest) @@ -42,7 +40,7 @@ def main(output_fp, input_filename): def process_manifest(output_fp, manifest_filename): - with codecs.open(manifest_filename, 'r', 'UTF-8') as in_fp: + with codecs.open(manifest_filename, "r", "UTF-8") as in_fp: manifest = yaml.safe_load(in_fp) shader_folder, _ = os.path.split(manifest_filename) @@ -50,67 +48,64 @@ def process_manifest(output_fp, manifest_filename): deps = set() for block in manifest: - if 'type' not in block: + if "type" not in block: raise Exception("Expected 'type' key with shader mode") - if 'file' not in block: + if "file" not in block: raise Exception("Expected 'file' key with shader file") - if 'shaders' not in block: + if "shaders" not in block: raise Exception("Expected 'shaders' key with shader name list") - shader_file = os.path.join(shader_folder, block['file']) + shader_file = os.path.join(shader_folder, block["file"]) deps.add(shader_file) - shader_model = block['type'] - for shader_name in block['shaders']: + shader_model = block["type"] + for shader_name in block["shaders"]: new_deps = run_fxc( shader_model=shader_model, shader_file=shader_file, shader_name=shader_name, - output_fp=output_fp) + output_fp=output_fp, + ) deps |= new_deps output_fp.write(FOOTER) return deps -def run_fxc(shader_model, - shader_file, - shader_name, - output_fp): - fxc_location = buildconfig.substs['FXC'] +def run_fxc(shader_model, shader_file, shader_name, output_fp): + fxc_location = buildconfig.substs["FXC"] argv = [ fxc_location, - '-nologo', - '-T{0}'.format(shader_model), + "-nologo", + "-T{0}".format(shader_model), os.path.relpath(shader_file), - '-E{0}'.format(shader_name), - '-Vn{0}'.format(shader_name), - '-Vi', + "-E{0}".format(shader_name), + "-Vn{0}".format(shader_name), + "-Vi", ] - if 'WINNT' not in buildconfig.substs['HOST_OS_ARCH']: - argv.insert(0, buildconfig.substs['WINE']) - if shader_model.startswith('vs_'): - argv += ['-DVERTEX_SHADER'] - elif shader_model.startswith('ps_'): - argv += ['-DPIXEL_SHADER'] + if "WINNT" not in buildconfig.substs["HOST_OS_ARCH"]: + argv.insert(0, buildconfig.substs["WINE"]) + if shader_model.startswith("vs_"): + argv += ["-DVERTEX_SHADER"] + elif shader_model.startswith("ps_"): + argv += ["-DPIXEL_SHADER"] deps = None with ScopedTempFilename() as temp_filename: - argv += ['-Fh{0}'.format(os.path.relpath(temp_filename))] + argv += ["-Fh{0}".format(os.path.relpath(temp_filename))] - sys.stdout.write('{0}\n'.format(' '.join(argv))) + sys.stdout.write("{0}\n".format(" ".join(argv))) sys.stdout.flush() proc_stdout = subprocess.check_output(argv) proc_stdout = decode_console_text(sys.stdout, proc_stdout) deps = find_dependencies(proc_stdout) - assert 'fxc2' in fxc_location or len(deps) > 0 + assert "fxc2" in fxc_location or len(deps) > 0 - with open(temp_filename, 'r') as temp_fp: + with open(temp_filename, "r") as temp_fp: output_fp.write(temp_fp.read()) - output_fp.write("ShaderBytes s{0} = {{ {0}, sizeof({0}) }};\n".format( - shader_name)) + output_fp.write("ShaderBytes s{0} = {{ {0}, sizeof({0}) }};\n".format(shader_name)) return deps @@ -122,7 +117,7 @@ def find_dependencies(fxc_output): # instead of pattern matching on that string, we take everything in between # brackets. We filter out potentially bogus strings later. deps = set() - for line in fxc_output.split('\n'): + for line in fxc_output.split("\n"): m = re.search(r"\[([^\]]+)\]", line) if m is None: continue @@ -130,12 +125,15 @@ def find_dependencies(fxc_output): dep_path = os.path.normpath(dep_path) # When run via Wine, FXC's output contains Windows paths on the Z drive. # We want to normalize them back to unix paths for the build system. - if 'WINNT' not in buildconfig.substs['HOST_OS_ARCH'] and dep_path.lower().startswith('z:'): - dep_path = dep_path[2:].replace('\\', '/') + if "WINNT" not in buildconfig.substs[ + "HOST_OS_ARCH" + ] and dep_path.lower().startswith("z:"): + dep_path = dep_path[2:].replace("\\", "/") if os.path.isfile(dep_path): deps.add(dep_path) return deps + # Python reads the raw bytes from stdout, so we need to try our best to # capture that as a valid Python string. @@ -143,13 +141,14 @@ def find_dependencies(fxc_output): def decode_console_text(pipe, text): try: if pipe.encoding: - return text.decode(pipe.encoding, 'replace') + return text.decode(pipe.encoding, "replace") except Exception: pass try: - return text.decode(locale.getpreferredencoding(), 'replace') + return text.decode(locale.getpreferredencoding(), "replace") except Exception: - return text.decode('utf8', 'replace') + return text.decode("utf8", "replace") + # Allocate a temporary file name and delete it when done. We need an extra # wrapper for this since TemporaryNamedFile holds the file open. @@ -173,5 +172,5 @@ def __exit__(self, type, value, traceback): pass -if __name__ == '__main__': +if __name__ == "__main__": shell_main() diff --git a/gfx/layers/ipc/fuzztest/moz.build b/gfx/layers/ipc/fuzztest/moz.build index 76fabeeb019ce3..a60293a5208276 100644 --- a/gfx/layers/ipc/fuzztest/moz.build +++ b/gfx/layers/ipc/fuzztest/moz.build @@ -4,15 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingCompositorManagerParentIPC') +Library("FuzzingCompositorManagerParentIPC") -SOURCES += [ - 'compositor_manager_parent_ipc_libfuzz.cpp' -] +SOURCES += ["compositor_manager_parent_ipc_libfuzz.cpp"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/gfx/layers/moz.build b/gfx/layers/moz.build index 0211e451b998cb..f7f68befdc14fd 100755 --- a/gfx/layers/moz.build +++ b/gfx/layers/moz.build @@ -4,656 +4,658 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Graphics: Layers') +with Files("**"): + BUG_COMPONENT = ("Core", "Graphics: Layers") -with Files('apz/**'): - BUG_COMPONENT = ('Core', 'Panning and Zooming') +with Files("apz/**"): + BUG_COMPONENT = ("Core", "Panning and Zooming") EXPORTS += [ - 'basic/BasicCanvasLayer.h', - 'basic/BasicImplData.h', - 'basic/BasicLayers.h', - 'basic/BasicLayersImpl.h', - 'basic/BasicPaintedLayer.h', - 'client/ClientCanvasLayer.h', - 'client/ClientContainerLayer.h', - 'client/ClientLayerManager.h', - 'client/ClientPaintedLayer.h', - 'client/ClientTiledPaintedLayer.h', - 'composite/CompositableHost.h', - 'composite/ImageHost.h', - 'CompositorTypes.h', - 'D3D9SurfaceImage.h', - 'FrameMetrics.h', - 'GLImages.h', - 'GPUVideoImage.h', - 'ImageContainer.h', - 'ImageLayers.h', - 'ImageTypes.h', - 'IMFYCbCrImage.h', - 'ipc/ThreadSafeRefcountingWithMainThreadDestruction.h', - 'Layers.h', - 'LayerScope.h', - 'LayerSorter.h', - 'LayersTypes.h', - 'LayerTreeInvalidation.h', - 'LayerUserData.h', - 'opengl/OGLShaderConfig.h', - 'opengl/OGLShaderProgram.h', - 'protobuf/LayerScopePacket.pb.h', - 'ReadbackLayer.h', - 'TiledLayerBuffer.h', + "basic/BasicCanvasLayer.h", + "basic/BasicImplData.h", + "basic/BasicLayers.h", + "basic/BasicLayersImpl.h", + "basic/BasicPaintedLayer.h", + "client/ClientCanvasLayer.h", + "client/ClientContainerLayer.h", + "client/ClientLayerManager.h", + "client/ClientPaintedLayer.h", + "client/ClientTiledPaintedLayer.h", + "composite/CompositableHost.h", + "composite/ImageHost.h", + "CompositorTypes.h", + "D3D9SurfaceImage.h", + "FrameMetrics.h", + "GLImages.h", + "GPUVideoImage.h", + "ImageContainer.h", + "ImageLayers.h", + "ImageTypes.h", + "IMFYCbCrImage.h", + "ipc/ThreadSafeRefcountingWithMainThreadDestruction.h", + "Layers.h", + "LayerScope.h", + "LayerSorter.h", + "LayersTypes.h", + "LayerTreeInvalidation.h", + "LayerUserData.h", + "opengl/OGLShaderConfig.h", + "opengl/OGLShaderProgram.h", + "protobuf/LayerScopePacket.pb.h", + "ReadbackLayer.h", + "TiledLayerBuffer.h", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": SOURCES += [ - 'D3D11ShareHandleImage.cpp', - 'D3D11YCbCrImage.cpp', + "D3D11ShareHandleImage.cpp", + "D3D11YCbCrImage.cpp", ] UNIFIED_SOURCES += [ - 'D3D9SurfaceImage.cpp', - 'IMFYCbCrImage.cpp', - 'TextureDIB.cpp', + "D3D9SurfaceImage.cpp", + "IMFYCbCrImage.cpp", + "TextureDIB.cpp", ] EXPORTS.mozilla.layers += [ - 'TextureDIB.h', + "TextureDIB.h", ] - if CONFIG['MOZ_ENABLE_D3D10_LAYER']: + if CONFIG["MOZ_ENABLE_D3D10_LAYER"]: EXPORTS.mozilla.layers += [ - 'd3d11/CompositorD3D11.h', - 'd3d11/DeviceAttachmentsD3D11.h', - 'd3d11/DiagnosticsD3D11.h', - 'd3d11/HelpersD3D11.h', - 'd3d11/MLGDeviceD3D11.h', - 'd3d11/ReadbackManagerD3D11.h', - 'd3d11/ShaderDefinitionsD3D11.h', - 'd3d11/TextureD3D11.h', + "d3d11/CompositorD3D11.h", + "d3d11/DeviceAttachmentsD3D11.h", + "d3d11/DiagnosticsD3D11.h", + "d3d11/HelpersD3D11.h", + "d3d11/MLGDeviceD3D11.h", + "d3d11/ReadbackManagerD3D11.h", + "d3d11/ShaderDefinitionsD3D11.h", + "d3d11/TextureD3D11.h", ] UNIFIED_SOURCES += [ - 'd3d11/DiagnosticsD3D11.cpp', - 'd3d11/MLGDeviceD3D11.cpp', - 'd3d11/TextureD3D11.cpp', + "d3d11/DiagnosticsD3D11.cpp", + "d3d11/MLGDeviceD3D11.cpp", + "d3d11/TextureD3D11.cpp", ] SOURCES += [ - 'd3d11/CompositorD3D11.cpp', - 'd3d11/DeviceAttachmentsD3D11.cpp', - 'd3d11/ReadbackManagerD3D11.cpp', + "d3d11/CompositorD3D11.cpp", + "d3d11/DeviceAttachmentsD3D11.cpp", + "d3d11/ReadbackManagerD3D11.cpp", ] EXPORTS.gfxipc += [ - 'ipc/ShadowLayerUtils.h', + "ipc/ShadowLayerUtils.h", ] EXPORTS.mozilla.dom += [ - 'apz/util/CheckerboardReportService.h', + "apz/util/CheckerboardReportService.h", ] EXPORTS.mozilla.gfx += [ - 'BuildConstants.h', + "BuildConstants.h", ] EXPORTS.mozilla.layers += [ - 'AnimationHelper.h', - 'AnimationInfo.h', - 'AnimationStorageData.h', - 'apz/public/APZInputBridge.h', - 'apz/public/APZPublicUtils.h', - 'apz/public/APZSampler.h', - 'apz/public/APZUpdater.h', - 'apz/public/CompositorController.h', - 'apz/public/GeckoContentController.h', - 'apz/public/GeckoContentControllerTypes.h', - 'apz/public/IAPZCTreeManager.h', - 'apz/public/MatrixMessage.h', - 'apz/public/MetricsSharingController.h', + "AnimationHelper.h", + "AnimationInfo.h", + "AnimationStorageData.h", + "apz/public/APZInputBridge.h", + "apz/public/APZPublicUtils.h", + "apz/public/APZSampler.h", + "apz/public/APZUpdater.h", + "apz/public/CompositorController.h", + "apz/public/GeckoContentController.h", + "apz/public/GeckoContentControllerTypes.h", + "apz/public/IAPZCTreeManager.h", + "apz/public/MatrixMessage.h", + "apz/public/MetricsSharingController.h", # exporting things from apz/src is temporary until we extract a # proper interface for the code there - 'apz/src/APZUtils.h', - 'apz/src/AsyncDragMetrics.h', - 'apz/src/FocusTarget.h', - 'apz/src/KeyboardMap.h', - 'apz/src/KeyboardScrollAction.h', - 'apz/testutil/APZTestData.h', - 'apz/util/ActiveElementManager.h', - 'apz/util/APZCCallbackHelper.h', - 'apz/util/APZEventState.h', - 'apz/util/APZThreadUtils.h', - 'apz/util/ChromeProcessController.h', - 'apz/util/ContentProcessController.h', - 'apz/util/DoubleTapToZoom.h', - 'apz/util/InputAPZContext.h', - 'apz/util/ScrollInputMethods.h', - 'apz/util/ScrollLinkedEffectDetector.h', - 'apz/util/TouchActionHelper.h', - 'apz/util/TouchCounter.h', - 'AtomicRefCountedWithFinalize.h', - 'AxisPhysicsModel.h', - 'AxisPhysicsMSDModel.h', - 'basic/BasicCompositor.h', - 'basic/MacIOSurfaceTextureHostBasic.h', - 'basic/TextureHostBasic.h', - 'BSPTree.h', - 'BufferTexture.h', - 'BuildConstants.h', - 'CanvasDrawEventRecorder.h', - 'CanvasRenderer.h', - 'client/CanvasClient.h', - 'client/CompositableClient.h', - 'client/ContentClient.h', - 'client/GPUVideoTextureClient.h', - 'client/ImageClient.h', - 'client/MultiTiledContentClient.h', - 'client/SingleTiledContentClient.h', - 'client/TextureClient.h', - 'client/TextureClientPool.h', - 'client/TextureClientRecycleAllocator.h', - 'client/TextureClientSharedSurface.h', - 'client/TextureRecorded.h', - 'client/TiledContentClient.h', - 'composite/AsyncCompositionManager.h', - 'composite/CanvasLayerComposite.h', - 'composite/ColorLayerComposite.h', - 'composite/ContainerLayerComposite.h', - 'composite/ContentHost.h', - 'composite/Diagnostics.h', - 'composite/FPSCounter.h', - 'composite/FrameUniformityData.h', - 'composite/GPUVideoTextureHost.h', - 'composite/ImageComposite.h', - 'composite/ImageHost.h', - 'composite/ImageLayerComposite.h', - 'composite/LayerManagerComposite.h', - 'composite/PaintedLayerComposite.h', - 'composite/TextRenderer.h', - 'composite/TextureHost.h', - 'composite/TiledContentHost.h', - 'CompositionRecorder.h', - 'Compositor.h', - 'CompositorAnimationStorage.h', - 'CompositorTypes.h', - 'D3D11ShareHandleImage.h', - 'D3D11YCbCrImage.h', - 'D3D9SurfaceImage.h', - 'DirectionUtils.h', - 'Effects.h', - 'ImageDataSerializer.h', - 'ipc/APZChild.h', - 'ipc/APZCTreeManagerChild.h', - 'ipc/APZCTreeManagerParent.h', - 'ipc/APZInputBridgeChild.h', - 'ipc/APZInputBridgeParent.h', - 'ipc/CanvasChild.h', - 'ipc/CanvasThread.h', - 'ipc/CanvasTranslator.h', - 'ipc/CompositableForwarder.h', - 'ipc/CompositableTransactionParent.h', - 'ipc/CompositorBridgeChild.h', - 'ipc/CompositorBridgeParent.h', - 'ipc/CompositorManagerChild.h', - 'ipc/CompositorManagerParent.h', - 'ipc/CompositorThread.h', - 'ipc/CompositorVsyncScheduler.h', - 'ipc/CompositorVsyncSchedulerOwner.h', - 'ipc/ContentCompositorBridgeParent.h', - 'ipc/ImageBridgeChild.h', - 'ipc/ImageBridgeParent.h', - 'ipc/ISurfaceAllocator.h', - 'ipc/KnowsCompositor.h', - 'ipc/LayerAnimationUtils.h', - 'ipc/LayersMessageUtils.h', - 'ipc/LayerTransactionChild.h', - 'ipc/LayerTransactionParent.h', - 'ipc/LayerTreeOwnerTracker.h', - 'ipc/RefCountedShmem.h', - 'ipc/RemoteContentController.h', - 'ipc/ShadowLayers.h', - 'ipc/SharedPlanarYCbCrImage.h', - 'ipc/SharedRGBImage.h', - 'ipc/SharedSurfacesChild.h', - 'ipc/SharedSurfacesMemoryReport.h', - 'ipc/SharedSurfacesParent.h', - 'ipc/SynchronousTask.h', - 'ipc/TextureForwarder.h', - 'ipc/UiCompositorControllerChild.h', - 'ipc/UiCompositorControllerMessageTypes.h', - 'ipc/UiCompositorControllerParent.h', - 'ipc/VideoBridgeChild.h', - 'ipc/VideoBridgeParent.h', - 'ipc/VideoBridgeUtils.h', - 'LayerAttributes.h', - 'LayerMetricsWrapper.h', - 'LayersHelpers.h', - 'LayersTypes.h', - 'MemoryPressureObserver.h', - 'mlgpu/LayerManagerMLGPU.h', - 'mlgpu/LayerMLGPU.h', - 'mlgpu/MemoryReportingMLGPU.h', - 'mlgpu/MLGDevice.h', - 'mlgpu/MLGDeviceTypes.h', - 'mlgpu/MLGPUScreenshotGrabber.h', - 'mlgpu/ShaderDefinitionsMLGPU.h', - 'mlgpu/UtilityMLGPU.h', - 'NativeLayer.h', - 'OOPCanvasRenderer.h', - 'opengl/CompositingRenderTargetOGL.h', - 'opengl/CompositorOGL.h', - 'opengl/MacIOSurfaceTextureClientOGL.h', - 'opengl/MacIOSurfaceTextureHostOGL.h', - 'opengl/TextureClientOGL.h', - 'opengl/TextureHostOGL.h', - 'PaintThread.h', - 'PersistentBufferProvider.h', - 'ProfilerScreenshots.h', - 'RenderTrace.h', - 'RepaintRequest.h', - 'RotatedBuffer.h', - 'SampleTime.h', - 'ScreenshotGrabber.h', - 'ScrollableLayerGuid.h', - 'ShareableCanvasRenderer.h', - 'SourceSurfaceSharedData.h', - 'SourceSurfaceVolatileData.h', - 'SurfacePool.h', - 'SyncObject.h', - 'TextureSourceProvider.h', - 'TextureWrapperImage.h', - 'TransactionIdAllocator.h', - 'TreeTraversal.h', - 'UpdateImageHelper.h', - 'wr/AsyncImagePipelineManager.h', - 'wr/ClipManager.h', - 'wr/DisplayItemCache.h', - 'wr/IpcResourceUpdateQueue.h', - 'wr/OMTAController.h', - 'wr/OMTASampler.h', - 'wr/RenderRootStateManager.h', - 'wr/RenderRootTypes.h', - 'wr/StackingContextHelper.h', - 'wr/WebRenderBridgeChild.h', - 'wr/WebRenderBridgeParent.h', - 'wr/WebRenderCanvasRenderer.h', - 'wr/WebRenderCommandBuilder.h', - 'wr/WebRenderDrawEventRecorder.h', - 'wr/WebRenderImageHost.h', - 'wr/WebRenderLayerManager.h', - 'wr/WebRenderMessageUtils.h', - 'wr/WebRenderScrollData.h', - 'wr/WebRenderScrollDataWrapper.h', - 'wr/WebRenderTextureHost.h', - 'wr/WebRenderUserData.h', - 'ZoomConstraints.h', + "apz/src/APZUtils.h", + "apz/src/AsyncDragMetrics.h", + "apz/src/FocusTarget.h", + "apz/src/KeyboardMap.h", + "apz/src/KeyboardScrollAction.h", + "apz/testutil/APZTestData.h", + "apz/util/ActiveElementManager.h", + "apz/util/APZCCallbackHelper.h", + "apz/util/APZEventState.h", + "apz/util/APZThreadUtils.h", + "apz/util/ChromeProcessController.h", + "apz/util/ContentProcessController.h", + "apz/util/DoubleTapToZoom.h", + "apz/util/InputAPZContext.h", + "apz/util/ScrollInputMethods.h", + "apz/util/ScrollLinkedEffectDetector.h", + "apz/util/TouchActionHelper.h", + "apz/util/TouchCounter.h", + "AtomicRefCountedWithFinalize.h", + "AxisPhysicsModel.h", + "AxisPhysicsMSDModel.h", + "basic/BasicCompositor.h", + "basic/MacIOSurfaceTextureHostBasic.h", + "basic/TextureHostBasic.h", + "BSPTree.h", + "BufferTexture.h", + "BuildConstants.h", + "CanvasDrawEventRecorder.h", + "CanvasRenderer.h", + "client/CanvasClient.h", + "client/CompositableClient.h", + "client/ContentClient.h", + "client/GPUVideoTextureClient.h", + "client/ImageClient.h", + "client/MultiTiledContentClient.h", + "client/SingleTiledContentClient.h", + "client/TextureClient.h", + "client/TextureClientPool.h", + "client/TextureClientRecycleAllocator.h", + "client/TextureClientSharedSurface.h", + "client/TextureRecorded.h", + "client/TiledContentClient.h", + "composite/AsyncCompositionManager.h", + "composite/CanvasLayerComposite.h", + "composite/ColorLayerComposite.h", + "composite/ContainerLayerComposite.h", + "composite/ContentHost.h", + "composite/Diagnostics.h", + "composite/FPSCounter.h", + "composite/FrameUniformityData.h", + "composite/GPUVideoTextureHost.h", + "composite/ImageComposite.h", + "composite/ImageHost.h", + "composite/ImageLayerComposite.h", + "composite/LayerManagerComposite.h", + "composite/PaintedLayerComposite.h", + "composite/TextRenderer.h", + "composite/TextureHost.h", + "composite/TiledContentHost.h", + "CompositionRecorder.h", + "Compositor.h", + "CompositorAnimationStorage.h", + "CompositorTypes.h", + "D3D11ShareHandleImage.h", + "D3D11YCbCrImage.h", + "D3D9SurfaceImage.h", + "DirectionUtils.h", + "Effects.h", + "ImageDataSerializer.h", + "ipc/APZChild.h", + "ipc/APZCTreeManagerChild.h", + "ipc/APZCTreeManagerParent.h", + "ipc/APZInputBridgeChild.h", + "ipc/APZInputBridgeParent.h", + "ipc/CanvasChild.h", + "ipc/CanvasThread.h", + "ipc/CanvasTranslator.h", + "ipc/CompositableForwarder.h", + "ipc/CompositableTransactionParent.h", + "ipc/CompositorBridgeChild.h", + "ipc/CompositorBridgeParent.h", + "ipc/CompositorManagerChild.h", + "ipc/CompositorManagerParent.h", + "ipc/CompositorThread.h", + "ipc/CompositorVsyncScheduler.h", + "ipc/CompositorVsyncSchedulerOwner.h", + "ipc/ContentCompositorBridgeParent.h", + "ipc/ImageBridgeChild.h", + "ipc/ImageBridgeParent.h", + "ipc/ISurfaceAllocator.h", + "ipc/KnowsCompositor.h", + "ipc/LayerAnimationUtils.h", + "ipc/LayersMessageUtils.h", + "ipc/LayerTransactionChild.h", + "ipc/LayerTransactionParent.h", + "ipc/LayerTreeOwnerTracker.h", + "ipc/RefCountedShmem.h", + "ipc/RemoteContentController.h", + "ipc/ShadowLayers.h", + "ipc/SharedPlanarYCbCrImage.h", + "ipc/SharedRGBImage.h", + "ipc/SharedSurfacesChild.h", + "ipc/SharedSurfacesMemoryReport.h", + "ipc/SharedSurfacesParent.h", + "ipc/SynchronousTask.h", + "ipc/TextureForwarder.h", + "ipc/UiCompositorControllerChild.h", + "ipc/UiCompositorControllerMessageTypes.h", + "ipc/UiCompositorControllerParent.h", + "ipc/VideoBridgeChild.h", + "ipc/VideoBridgeParent.h", + "ipc/VideoBridgeUtils.h", + "LayerAttributes.h", + "LayerMetricsWrapper.h", + "LayersHelpers.h", + "LayersTypes.h", + "MemoryPressureObserver.h", + "mlgpu/LayerManagerMLGPU.h", + "mlgpu/LayerMLGPU.h", + "mlgpu/MemoryReportingMLGPU.h", + "mlgpu/MLGDevice.h", + "mlgpu/MLGDeviceTypes.h", + "mlgpu/MLGPUScreenshotGrabber.h", + "mlgpu/ShaderDefinitionsMLGPU.h", + "mlgpu/UtilityMLGPU.h", + "NativeLayer.h", + "OOPCanvasRenderer.h", + "opengl/CompositingRenderTargetOGL.h", + "opengl/CompositorOGL.h", + "opengl/MacIOSurfaceTextureClientOGL.h", + "opengl/MacIOSurfaceTextureHostOGL.h", + "opengl/TextureClientOGL.h", + "opengl/TextureHostOGL.h", + "PaintThread.h", + "PersistentBufferProvider.h", + "ProfilerScreenshots.h", + "RenderTrace.h", + "RepaintRequest.h", + "RotatedBuffer.h", + "SampleTime.h", + "ScreenshotGrabber.h", + "ScrollableLayerGuid.h", + "ShareableCanvasRenderer.h", + "SourceSurfaceSharedData.h", + "SourceSurfaceVolatileData.h", + "SurfacePool.h", + "SyncObject.h", + "TextureSourceProvider.h", + "TextureWrapperImage.h", + "TransactionIdAllocator.h", + "TreeTraversal.h", + "UpdateImageHelper.h", + "wr/AsyncImagePipelineManager.h", + "wr/ClipManager.h", + "wr/DisplayItemCache.h", + "wr/IpcResourceUpdateQueue.h", + "wr/OMTAController.h", + "wr/OMTASampler.h", + "wr/RenderRootStateManager.h", + "wr/RenderRootTypes.h", + "wr/StackingContextHelper.h", + "wr/WebRenderBridgeChild.h", + "wr/WebRenderBridgeParent.h", + "wr/WebRenderCanvasRenderer.h", + "wr/WebRenderCommandBuilder.h", + "wr/WebRenderDrawEventRecorder.h", + "wr/WebRenderImageHost.h", + "wr/WebRenderLayerManager.h", + "wr/WebRenderMessageUtils.h", + "wr/WebRenderScrollData.h", + "wr/WebRenderScrollDataWrapper.h", + "wr/WebRenderTextureHost.h", + "wr/WebRenderUserData.h", + "ZoomConstraints.h", ] -if CONFIG['MOZ_X11']: +if CONFIG["MOZ_X11"]: EXPORTS.mozilla.layers += [ - 'basic/TextureClientX11.h', - 'basic/X11TextureSourceBasic.h', - 'composite/X11TextureHost.h', - 'ipc/ShadowLayerUtilsX11.h', - 'opengl/X11TextureSourceOGL.h', + "basic/TextureClientX11.h", + "basic/X11TextureSourceBasic.h", + "composite/X11TextureHost.h", + "ipc/ShadowLayerUtilsX11.h", + "opengl/X11TextureSourceOGL.h", ] SOURCES += [ - 'basic/TextureClientX11.cpp', - 'basic/X11BasicCompositor.cpp', - 'basic/X11TextureSourceBasic.cpp', - 'composite/X11TextureHost.cpp', - 'ipc/ShadowLayerUtilsX11.cpp', - 'opengl/X11TextureSourceOGL.cpp', + "basic/TextureClientX11.cpp", + "basic/X11BasicCompositor.cpp", + "basic/X11TextureSourceBasic.cpp", + "composite/X11TextureHost.cpp", + "ipc/ShadowLayerUtilsX11.cpp", + "opengl/X11TextureSourceOGL.cpp", ] -if CONFIG['MOZ_WAYLAND']: +if CONFIG["MOZ_WAYLAND"]: EXPORTS.mozilla.layers += [ - 'DMABUFSurfaceImage.h', - 'opengl/DMABUFTextureClientOGL.h', - 'opengl/DMABUFTextureHostOGL.h', + "DMABUFSurfaceImage.h", + "opengl/DMABUFTextureClientOGL.h", + "opengl/DMABUFTextureHostOGL.h", ] SOURCES += [ - 'DMABUFSurfaceImage.cpp', - 'opengl/DMABUFTextureClientOGL.cpp', - 'opengl/DMABUFTextureHostOGL.cpp', + "DMABUFSurfaceImage.cpp", + "opengl/DMABUFTextureClientOGL.cpp", + "opengl/DMABUFTextureHostOGL.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": EXPORTS.mozilla.layers += [ - 'NativeLayerCA.h', - 'SurfacePoolCA.h', - 'TextureSync.h', + "NativeLayerCA.h", + "SurfacePoolCA.h", + "TextureSync.h", ] EXPORTS += [ - 'MacIOSurfaceHelpers.h', - 'MacIOSurfaceImage.h', + "MacIOSurfaceHelpers.h", + "MacIOSurfaceImage.h", ] UNIFIED_SOURCES += [ - 'NativeLayerCA.mm', - 'SurfacePoolCA.mm', - 'TextureSync.cpp', + "NativeLayerCA.mm", + "SurfacePoolCA.mm", + "TextureSync.cpp", ] SOURCES += [ - 'ipc/ShadowLayerUtilsMac.cpp', - 'MacIOSurfaceHelpers.cpp', - 'MacIOSurfaceImage.cpp', + "ipc/ShadowLayerUtilsMac.cpp", + "MacIOSurfaceHelpers.cpp", + "MacIOSurfaceImage.cpp", ] OS_LIBS += [ - '-framework IOSurface', + "-framework IOSurface", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXPORTS.mozilla.layers += [ - 'AndroidHardwareBuffer.h', + "AndroidHardwareBuffer.h", ] UNIFIED_SOURCES += [ - 'AndroidHardwareBuffer.cpp', - 'apz/src/AndroidAPZ.cpp', - 'apz/src/AndroidFlingPhysics.cpp', - 'apz/src/AndroidVelocityTracker.cpp', + "AndroidHardwareBuffer.cpp", + "apz/src/AndroidAPZ.cpp", + "apz/src/AndroidFlingPhysics.cpp", + "apz/src/AndroidVelocityTracker.cpp", ] UNIFIED_SOURCES += [ - 'AnimationHelper.cpp', - 'AnimationInfo.cpp', - 'apz/src/APZCTreeManager.cpp', - 'apz/src/APZInputBridge.cpp', - 'apz/src/APZPublicUtils.cpp', - 'apz/src/APZSampler.cpp', - 'apz/src/APZUpdater.cpp', - 'apz/src/APZUtils.cpp', - 'apz/src/AsyncPanZoomController.cpp', - 'apz/src/AutoscrollAnimation.cpp', - 'apz/src/Axis.cpp', - 'apz/src/CheckerboardEvent.cpp', - 'apz/src/DragTracker.cpp', - 'apz/src/ExpectedGeckoMetrics.cpp', - 'apz/src/FlingAccelerator.cpp', - 'apz/src/FocusState.cpp', - 'apz/src/FocusTarget.cpp', - 'apz/src/GenericScrollAnimation.cpp', - 'apz/src/GestureEventListener.cpp', - 'apz/src/HitTestingTreeNode.cpp', - 'apz/src/InputBlockState.cpp', - 'apz/src/InputQueue.cpp', - 'apz/src/KeyboardMap.cpp', - 'apz/src/KeyboardScrollAction.cpp', - 'apz/src/KeyboardScrollAnimation.cpp', - 'apz/src/OverscrollHandoffState.cpp', - 'apz/src/OvershootDetector.cpp', - 'apz/src/PotentialCheckerboardDurationTracker.cpp', - 'apz/src/QueuedInput.cpp', - 'apz/src/SampledAPZCState.cpp', - 'apz/src/SimpleVelocityTracker.cpp', - 'apz/src/SmoothMsdScrollAnimation.cpp', - 'apz/src/SmoothScrollAnimation.cpp', - 'apz/src/WheelScrollAnimation.cpp', - 'apz/testutil/APZTestData.cpp', - 'apz/util/ActiveElementManager.cpp', - 'apz/util/APZCCallbackHelper.cpp', - 'apz/util/APZEventState.cpp', - 'apz/util/APZThreadUtils.cpp', - 'apz/util/CheckerboardReportService.cpp', - 'apz/util/ChromeProcessController.cpp', - 'apz/util/ContentProcessController.cpp', - 'apz/util/DoubleTapToZoom.cpp', - 'apz/util/InputAPZContext.cpp', - 'apz/util/ScrollLinkedEffectDetector.cpp', - 'apz/util/TouchActionHelper.cpp', - 'apz/util/TouchCounter.cpp', - 'AxisPhysicsModel.cpp', - 'AxisPhysicsMSDModel.cpp', - 'basic/BasicCanvasLayer.cpp', - 'basic/BasicColorLayer.cpp', - 'basic/BasicContainerLayer.cpp', - 'basic/BasicImages.cpp', - 'basic/BasicLayerManager.cpp', - 'basic/BasicLayersImpl.cpp', - 'basic/BasicPaintedLayer.cpp', - 'basic/TextureHostBasic.cpp', - 'BSPTree.cpp', - 'BufferTexture.cpp', - 'CanvasDrawEventRecorder.cpp', - 'CanvasRenderer.cpp', - 'client/CanvasClient.cpp', - 'client/ClientCanvasLayer.cpp', - 'client/ClientCanvasRenderer.cpp', - 'client/ClientColorLayer.cpp', - 'client/ClientContainerLayer.cpp', - 'client/ClientImageLayer.cpp', - 'client/ClientLayerManager.cpp', - 'client/ClientPaintedLayer.cpp', - 'client/ClientTiledPaintedLayer.cpp', - 'client/CompositableClient.cpp', - 'client/ContentClient.cpp', - 'client/GPUVideoTextureClient.cpp', - 'client/ImageClient.cpp', - 'client/MultiTiledContentClient.cpp', - 'client/SingleTiledContentClient.cpp', - 'client/TextureClientPool.cpp', - 'client/TextureClientRecycleAllocator.cpp', - 'client/TextureClientSharedSurface.cpp', - 'client/TextureRecorded.cpp', - 'client/TiledContentClient.cpp', - 'composite/AsyncCompositionManager.cpp', - 'composite/CanvasLayerComposite.cpp', - 'composite/ColorLayerComposite.cpp', - 'composite/CompositableHost.cpp', - 'composite/ContainerLayerComposite.cpp', - 'composite/ContentHost.cpp', - 'composite/Diagnostics.cpp', - 'composite/FPSCounter.cpp', - 'composite/FrameUniformityData.cpp', - 'composite/GPUVideoTextureHost.cpp', - 'composite/ImageComposite.cpp', - 'composite/ImageHost.cpp', - 'composite/ImageLayerComposite.cpp', - 'composite/LayerManagerComposite.cpp', - 'composite/PaintedLayerComposite.cpp', - 'composite/TextRenderer.cpp', - 'composite/TextureHost.cpp', - 'composite/TiledContentHost.cpp', - 'CompositionRecorder.cpp', - 'Compositor.cpp', - 'CompositorAnimationStorage.cpp', - 'CompositorTypes.cpp', - 'Effects.cpp', - 'FrameMetrics.cpp', - 'GLImages.cpp', - 'ImageDataSerializer.cpp', - 'ImageLayers.cpp', - 'ipc/APZChild.cpp', - 'ipc/APZCTreeManagerChild.cpp', - 'ipc/APZCTreeManagerParent.cpp', - 'ipc/APZInputBridgeChild.cpp', - 'ipc/APZInputBridgeParent.cpp', - 'ipc/CanvasChild.cpp', - 'ipc/CanvasThread.cpp', - 'ipc/CanvasTranslator.cpp', - 'ipc/CompositableTransactionParent.cpp', - 'ipc/CompositorBench.cpp', - 'ipc/CompositorBridgeChild.cpp', - 'ipc/CompositorBridgeParent.cpp', - 'ipc/CompositorManagerChild.cpp', - 'ipc/CompositorManagerParent.cpp', - 'ipc/CompositorThread.cpp', - 'ipc/CompositorVsyncScheduler.cpp', - 'ipc/ContentCompositorBridgeParent.cpp', - 'ipc/ImageBridgeChild.cpp', - 'ipc/ImageBridgeParent.cpp', - 'ipc/ISurfaceAllocator.cpp', - 'ipc/LayerAnimationUtils.cpp', - 'ipc/LayerTransactionChild.cpp', - 'ipc/LayerTransactionParent.cpp', - 'ipc/LayerTreeOwnerTracker.cpp', - 'ipc/RefCountedShmem.cpp', - 'ipc/RemoteContentController.cpp', - 'ipc/ShadowLayers.cpp', - 'ipc/SharedPlanarYCbCrImage.cpp', - 'ipc/SharedRGBImage.cpp', - 'ipc/SharedSurfacesChild.cpp', - 'ipc/SharedSurfacesParent.cpp', - 'ipc/UiCompositorControllerChild.cpp', - 'ipc/UiCompositorControllerParent.cpp', - 'ipc/VideoBridgeChild.cpp', - 'ipc/VideoBridgeParent.cpp', - 'Layers.cpp', - 'LayerScope.cpp', - 'LayersHelpers.cpp', - 'LayerSorter.cpp', - 'LayersTypes.cpp', - 'LayerTreeInvalidation.cpp', - 'MemoryPressureObserver.cpp', - 'mlgpu/BufferCache.cpp', - 'mlgpu/CanvasLayerMLGPU.cpp', - 'mlgpu/ContainerLayerMLGPU.cpp', - 'mlgpu/FrameBuilder.cpp', - 'mlgpu/ImageLayerMLGPU.cpp', - 'mlgpu/LayerManagerMLGPU.cpp', - 'mlgpu/LayerMLGPU.cpp', - 'mlgpu/MaskOperation.cpp', - 'mlgpu/MemoryReportingMLGPU.cpp', - 'mlgpu/MLGDevice.cpp', - 'mlgpu/MLGPUScreenshotGrabber.cpp', - 'mlgpu/PaintedLayerMLGPU.cpp', - 'mlgpu/RenderPassMLGPU.cpp', - 'mlgpu/RenderViewMLGPU.cpp', - 'mlgpu/SharedBufferMLGPU.cpp', - 'mlgpu/StagingBuffer.cpp', - 'mlgpu/TexturedLayerMLGPU.cpp', - 'mlgpu/TextureSourceProviderMLGPU.cpp', - 'opengl/CompositingRenderTargetOGL.cpp', - 'opengl/CompositorOGL.cpp', - 'opengl/GLBlitTextureImageHelper.cpp', - 'opengl/OGLShaderProgram.cpp', - 'opengl/TextureClientOGL.cpp', - 'opengl/TextureHostOGL.cpp', - 'PaintThread.cpp', - 'ProfilerScreenshots.cpp', - 'ReadbackProcessor.cpp', - 'RenderTrace.cpp', - 'RepaintRequest.cpp', - 'RotatedBuffer.cpp', - 'SampleTime.cpp', - 'ScreenshotGrabber.cpp', - 'ScrollableLayerGuid.cpp', - 'ShareableCanvasRenderer.cpp', - 'SourceSurfaceSharedData.cpp', - 'SourceSurfaceVolatileData.cpp', - 'SyncObject.cpp', - 'TextureSourceProvider.cpp', - 'TextureWrapperImage.cpp', - 'wr/AsyncImagePipelineManager.cpp', - 'wr/ClipManager.cpp', - 'wr/DisplayItemCache.cpp', - 'wr/IpcResourceUpdateQueue.cpp', - 'wr/OMTAController.cpp', - 'wr/OMTASampler.cpp', - 'wr/RenderRootStateManager.cpp', - 'wr/RenderRootTypes.cpp', - 'wr/StackingContextHelper.cpp', - 'wr/WebRenderBridgeChild.cpp', - 'wr/WebRenderBridgeParent.cpp', - 'wr/WebRenderCanvasRenderer.cpp', - 'wr/WebRenderCommandBuilder.cpp', - 'wr/WebRenderDrawEventRecorder.cpp', - 'wr/WebRenderImageHost.cpp', - 'wr/WebRenderLayerManager.cpp', - 'wr/WebRenderScrollData.cpp', - 'wr/WebRenderUserData.cpp', - 'ZoomConstraints.cpp', + "AnimationHelper.cpp", + "AnimationInfo.cpp", + "apz/src/APZCTreeManager.cpp", + "apz/src/APZInputBridge.cpp", + "apz/src/APZPublicUtils.cpp", + "apz/src/APZSampler.cpp", + "apz/src/APZUpdater.cpp", + "apz/src/APZUtils.cpp", + "apz/src/AsyncPanZoomController.cpp", + "apz/src/AutoscrollAnimation.cpp", + "apz/src/Axis.cpp", + "apz/src/CheckerboardEvent.cpp", + "apz/src/DragTracker.cpp", + "apz/src/ExpectedGeckoMetrics.cpp", + "apz/src/FlingAccelerator.cpp", + "apz/src/FocusState.cpp", + "apz/src/FocusTarget.cpp", + "apz/src/GenericScrollAnimation.cpp", + "apz/src/GestureEventListener.cpp", + "apz/src/HitTestingTreeNode.cpp", + "apz/src/InputBlockState.cpp", + "apz/src/InputQueue.cpp", + "apz/src/KeyboardMap.cpp", + "apz/src/KeyboardScrollAction.cpp", + "apz/src/KeyboardScrollAnimation.cpp", + "apz/src/OverscrollHandoffState.cpp", + "apz/src/OvershootDetector.cpp", + "apz/src/PotentialCheckerboardDurationTracker.cpp", + "apz/src/QueuedInput.cpp", + "apz/src/SampledAPZCState.cpp", + "apz/src/SimpleVelocityTracker.cpp", + "apz/src/SmoothMsdScrollAnimation.cpp", + "apz/src/SmoothScrollAnimation.cpp", + "apz/src/WheelScrollAnimation.cpp", + "apz/testutil/APZTestData.cpp", + "apz/util/ActiveElementManager.cpp", + "apz/util/APZCCallbackHelper.cpp", + "apz/util/APZEventState.cpp", + "apz/util/APZThreadUtils.cpp", + "apz/util/CheckerboardReportService.cpp", + "apz/util/ChromeProcessController.cpp", + "apz/util/ContentProcessController.cpp", + "apz/util/DoubleTapToZoom.cpp", + "apz/util/InputAPZContext.cpp", + "apz/util/ScrollLinkedEffectDetector.cpp", + "apz/util/TouchActionHelper.cpp", + "apz/util/TouchCounter.cpp", + "AxisPhysicsModel.cpp", + "AxisPhysicsMSDModel.cpp", + "basic/BasicCanvasLayer.cpp", + "basic/BasicColorLayer.cpp", + "basic/BasicContainerLayer.cpp", + "basic/BasicImages.cpp", + "basic/BasicLayerManager.cpp", + "basic/BasicLayersImpl.cpp", + "basic/BasicPaintedLayer.cpp", + "basic/TextureHostBasic.cpp", + "BSPTree.cpp", + "BufferTexture.cpp", + "CanvasDrawEventRecorder.cpp", + "CanvasRenderer.cpp", + "client/CanvasClient.cpp", + "client/ClientCanvasLayer.cpp", + "client/ClientCanvasRenderer.cpp", + "client/ClientColorLayer.cpp", + "client/ClientContainerLayer.cpp", + "client/ClientImageLayer.cpp", + "client/ClientLayerManager.cpp", + "client/ClientPaintedLayer.cpp", + "client/ClientTiledPaintedLayer.cpp", + "client/CompositableClient.cpp", + "client/ContentClient.cpp", + "client/GPUVideoTextureClient.cpp", + "client/ImageClient.cpp", + "client/MultiTiledContentClient.cpp", + "client/SingleTiledContentClient.cpp", + "client/TextureClientPool.cpp", + "client/TextureClientRecycleAllocator.cpp", + "client/TextureClientSharedSurface.cpp", + "client/TextureRecorded.cpp", + "client/TiledContentClient.cpp", + "composite/AsyncCompositionManager.cpp", + "composite/CanvasLayerComposite.cpp", + "composite/ColorLayerComposite.cpp", + "composite/CompositableHost.cpp", + "composite/ContainerLayerComposite.cpp", + "composite/ContentHost.cpp", + "composite/Diagnostics.cpp", + "composite/FPSCounter.cpp", + "composite/FrameUniformityData.cpp", + "composite/GPUVideoTextureHost.cpp", + "composite/ImageComposite.cpp", + "composite/ImageHost.cpp", + "composite/ImageLayerComposite.cpp", + "composite/LayerManagerComposite.cpp", + "composite/PaintedLayerComposite.cpp", + "composite/TextRenderer.cpp", + "composite/TextureHost.cpp", + "composite/TiledContentHost.cpp", + "CompositionRecorder.cpp", + "Compositor.cpp", + "CompositorAnimationStorage.cpp", + "CompositorTypes.cpp", + "Effects.cpp", + "FrameMetrics.cpp", + "GLImages.cpp", + "ImageDataSerializer.cpp", + "ImageLayers.cpp", + "ipc/APZChild.cpp", + "ipc/APZCTreeManagerChild.cpp", + "ipc/APZCTreeManagerParent.cpp", + "ipc/APZInputBridgeChild.cpp", + "ipc/APZInputBridgeParent.cpp", + "ipc/CanvasChild.cpp", + "ipc/CanvasThread.cpp", + "ipc/CanvasTranslator.cpp", + "ipc/CompositableTransactionParent.cpp", + "ipc/CompositorBench.cpp", + "ipc/CompositorBridgeChild.cpp", + "ipc/CompositorBridgeParent.cpp", + "ipc/CompositorManagerChild.cpp", + "ipc/CompositorManagerParent.cpp", + "ipc/CompositorThread.cpp", + "ipc/CompositorVsyncScheduler.cpp", + "ipc/ContentCompositorBridgeParent.cpp", + "ipc/ImageBridgeChild.cpp", + "ipc/ImageBridgeParent.cpp", + "ipc/ISurfaceAllocator.cpp", + "ipc/LayerAnimationUtils.cpp", + "ipc/LayerTransactionChild.cpp", + "ipc/LayerTransactionParent.cpp", + "ipc/LayerTreeOwnerTracker.cpp", + "ipc/RefCountedShmem.cpp", + "ipc/RemoteContentController.cpp", + "ipc/ShadowLayers.cpp", + "ipc/SharedPlanarYCbCrImage.cpp", + "ipc/SharedRGBImage.cpp", + "ipc/SharedSurfacesChild.cpp", + "ipc/SharedSurfacesParent.cpp", + "ipc/UiCompositorControllerChild.cpp", + "ipc/UiCompositorControllerParent.cpp", + "ipc/VideoBridgeChild.cpp", + "ipc/VideoBridgeParent.cpp", + "Layers.cpp", + "LayerScope.cpp", + "LayersHelpers.cpp", + "LayerSorter.cpp", + "LayersTypes.cpp", + "LayerTreeInvalidation.cpp", + "MemoryPressureObserver.cpp", + "mlgpu/BufferCache.cpp", + "mlgpu/CanvasLayerMLGPU.cpp", + "mlgpu/ContainerLayerMLGPU.cpp", + "mlgpu/FrameBuilder.cpp", + "mlgpu/ImageLayerMLGPU.cpp", + "mlgpu/LayerManagerMLGPU.cpp", + "mlgpu/LayerMLGPU.cpp", + "mlgpu/MaskOperation.cpp", + "mlgpu/MemoryReportingMLGPU.cpp", + "mlgpu/MLGDevice.cpp", + "mlgpu/MLGPUScreenshotGrabber.cpp", + "mlgpu/PaintedLayerMLGPU.cpp", + "mlgpu/RenderPassMLGPU.cpp", + "mlgpu/RenderViewMLGPU.cpp", + "mlgpu/SharedBufferMLGPU.cpp", + "mlgpu/StagingBuffer.cpp", + "mlgpu/TexturedLayerMLGPU.cpp", + "mlgpu/TextureSourceProviderMLGPU.cpp", + "opengl/CompositingRenderTargetOGL.cpp", + "opengl/CompositorOGL.cpp", + "opengl/GLBlitTextureImageHelper.cpp", + "opengl/OGLShaderProgram.cpp", + "opengl/TextureClientOGL.cpp", + "opengl/TextureHostOGL.cpp", + "PaintThread.cpp", + "ProfilerScreenshots.cpp", + "ReadbackProcessor.cpp", + "RenderTrace.cpp", + "RepaintRequest.cpp", + "RotatedBuffer.cpp", + "SampleTime.cpp", + "ScreenshotGrabber.cpp", + "ScrollableLayerGuid.cpp", + "ShareableCanvasRenderer.cpp", + "SourceSurfaceSharedData.cpp", + "SourceSurfaceVolatileData.cpp", + "SyncObject.cpp", + "TextureSourceProvider.cpp", + "TextureWrapperImage.cpp", + "wr/AsyncImagePipelineManager.cpp", + "wr/ClipManager.cpp", + "wr/DisplayItemCache.cpp", + "wr/IpcResourceUpdateQueue.cpp", + "wr/OMTAController.cpp", + "wr/OMTASampler.cpp", + "wr/RenderRootStateManager.cpp", + "wr/RenderRootTypes.cpp", + "wr/StackingContextHelper.cpp", + "wr/WebRenderBridgeChild.cpp", + "wr/WebRenderBridgeParent.cpp", + "wr/WebRenderCanvasRenderer.cpp", + "wr/WebRenderCommandBuilder.cpp", + "wr/WebRenderDrawEventRecorder.cpp", + "wr/WebRenderImageHost.cpp", + "wr/WebRenderLayerManager.cpp", + "wr/WebRenderScrollData.cpp", + "wr/WebRenderUserData.cpp", + "ZoomConstraints.cpp", # XXX here are some unified build error. #'wr/WebRenderTextureHost.cpp' ] SOURCES += [ - 'basic/BasicCompositor.cpp', - 'basic/BasicImageLayer.cpp', - 'client/TextureClient.cpp', - 'ImageContainer.cpp', - 'PersistentBufferProvider.cpp', - 'protobuf/LayerScopePacket.pb.cc', - 'wr/WebRenderTextureHost.cpp', + "basic/BasicCompositor.cpp", + "basic/BasicImageLayer.cpp", + "client/TextureClient.cpp", + "ImageContainer.cpp", + "PersistentBufferProvider.cpp", + "protobuf/LayerScopePacket.pb.cc", + "wr/WebRenderTextureHost.cpp", ] -DEFINES['GOOGLE_PROTOBUF_NO_RTTI'] = True -DEFINES['GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER'] = True -DEFINES['MOZ_APP_VERSION'] = CONFIG['MOZ_APP_VERSION'] +DEFINES["GOOGLE_PROTOBUF_NO_RTTI"] = True +DEFINES["GOOGLE_PROTOBUF_NO_STATIC_INITIALIZER"] = True +DEFINES["MOZ_APP_VERSION"] = CONFIG["MOZ_APP_VERSION"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": SOURCES += [ - 'basic/MacIOSurfaceTextureHostBasic.cpp', - 'opengl/MacIOSurfaceTextureClientOGL.cpp', - 'opengl/MacIOSurfaceTextureHostOGL.cpp', + "basic/MacIOSurfaceTextureHostBasic.cpp", + "opengl/MacIOSurfaceTextureClientOGL.cpp", + "opengl/MacIOSurfaceTextureHostOGL.cpp", ] IPDL_SOURCES += [ - 'ipc/LayersMessages.ipdlh', - 'ipc/LayersSurfaces.ipdlh', - 'ipc/PAPZ.ipdl', - 'ipc/PAPZCTreeManager.ipdl', - 'ipc/PAPZInputBridge.ipdl', - 'ipc/PCanvas.ipdl', - 'ipc/PCompositorBridge.ipdl', - 'ipc/PCompositorBridgeTypes.ipdlh', - 'ipc/PCompositorManager.ipdl', - 'ipc/PImageBridge.ipdl', - 'ipc/PLayerTransaction.ipdl', - 'ipc/PTexture.ipdl', - 'ipc/PUiCompositorController.ipdl', - 'ipc/PVideoBridge.ipdl', - 'ipc/PWebRenderBridge.ipdl', - 'ipc/WebRenderMessages.ipdlh', + "ipc/LayersMessages.ipdlh", + "ipc/LayersSurfaces.ipdlh", + "ipc/PAPZ.ipdl", + "ipc/PAPZCTreeManager.ipdl", + "ipc/PAPZInputBridge.ipdl", + "ipc/PCanvas.ipdl", + "ipc/PCompositorBridge.ipdl", + "ipc/PCompositorBridgeTypes.ipdlh", + "ipc/PCompositorManager.ipdl", + "ipc/PImageBridge.ipdl", + "ipc/PLayerTransaction.ipdl", + "ipc/PTexture.ipdl", + "ipc/PUiCompositorController.ipdl", + "ipc/PVideoBridge.ipdl", + "ipc/PWebRenderBridge.ipdl", + "ipc/WebRenderMessages.ipdlh", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - GeneratedFile('CompositorD3D11Shaders.h', script='d3d11/genshaders.py', - inputs=['d3d11/shaders.manifest']) - GeneratedFile('MLGShaders.h', script='d3d11/genshaders.py', - inputs=['d3d11/mlgshaders/shaders.manifest']) +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + GeneratedFile( + "CompositorD3D11Shaders.h", + script="d3d11/genshaders.py", + inputs=["d3d11/shaders.manifest"], + ) + GeneratedFile( + "MLGShaders.h", + script="d3d11/genshaders.py", + inputs=["d3d11/mlgshaders/shaders.manifest"], + ) LOCAL_INCLUDES += [ - '/docshell/base', # for nsDocShell.h - '/dom/canvas', # for intertwined WebGL headers - '/layout/base', # for TouchManager.h - '/layout/generic', # for nsTextFrame.h - '/media/libyuv/libyuv/include', # for libyuv.h + "/docshell/base", # for nsDocShell.h + "/dom/canvas", # for intertwined WebGL headers + "/layout/base", # for TouchManager.h + "/layout/generic", # for nsTextFrame.h + "/media/libyuv/libyuv/include", # for libyuv.h ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['MOZ_DEBUG']: - DEFINES['D3D_DEBUG_INFO'] = True +if CONFIG["MOZ_DEBUG"]: + DEFINES["D3D_DEBUG_INFO"] = True -if CONFIG['MOZ_ENABLE_D3D10_LAYER']: - DEFINES['MOZ_ENABLE_D3D10_LAYER'] = True +if CONFIG["MOZ_ENABLE_D3D10_LAYER"]: + DEFINES["MOZ_ENABLE_D3D10_LAYER"] = True -if CONFIG['ENABLE_TESTS']: - DIRS += ['apz/test/gtest'] - DIRS += ['apz/test/gtest/mvm'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["apz/test/gtest"] + DIRS += ["apz/test/gtest/mvm"] -MOCHITEST_MANIFESTS += ['apz/test/mochitest/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['apz/test/mochitest/browser.ini'] +MOCHITEST_MANIFESTS += ["apz/test/mochitest/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["apz/test/mochitest/browser.ini"] -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Suppress warnings in third-party code. - CXXFLAGS += [ - '-Wno-maybe-uninitialized' - ] - -if CONFIG['MOZ_ENABLE_SKIA']: - UNIFIED_SOURCES += [ - 'composite/PaintCounter.cpp', - ] + CXXFLAGS += ["-Wno-maybe-uninitialized"] -if CONFIG['FUZZING'] and CONFIG['FUZZING_INTERFACES']: - TEST_DIRS += [ - 'ipc/fuzztest' +if CONFIG["MOZ_ENABLE_SKIA"]: + UNIFIED_SOURCES += [ + "composite/PaintCounter.cpp", ] +if CONFIG["FUZZING"] and CONFIG["FUZZING_INTERFACES"]: + TEST_DIRS += ["ipc/fuzztest"] + # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/gfx/moz.build b/gfx/moz.build index 61d348a6bfee9e..6d6274e636577e 100644 --- a/gfx/moz.build +++ b/gfx/moz.build @@ -4,52 +4,52 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Graphics') - SCHEDULES.inclusive += ['android-hw-gfx'] -with Files('wr/**'): - BUG_COMPONENT = ('Core', 'Graphics: WebRender') +with Files("**"): + BUG_COMPONENT = ("Core", "Graphics") + SCHEDULES.inclusive += ["android-hw-gfx"] +with Files("wr/**"): + BUG_COMPONENT = ("Core", "Graphics: WebRender") -if CONFIG['MOZ_TREE_CAIRO']: - DIRS += ['cairo'] +if CONFIG["MOZ_TREE_CAIRO"]: + DIRS += ["cairo"] DIRS += [ - '2d', - 'ycbcr', - 'angle', - 'src', - 'qcms', - 'gl', - 'layers', - 'graphite2/src', - 'harfbuzz/src', - 'ots/src', - 'thebes', - 'ipc', - 'vr', - 'config', - 'webrender_bindings', - 'wgpu_bindings', + "2d", + "ycbcr", + "angle", + "src", + "qcms", + "gl", + "layers", + "graphite2/src", + "harfbuzz/src", + "ots/src", + "thebes", + "ipc", + "vr", + "config", + "webrender_bindings", + "wgpu_bindings", ] -if CONFIG['MOZ_ENABLE_SKIA']: - DIRS += ['skia'] +if CONFIG["MOZ_ENABLE_SKIA"]: + DIRS += ["skia"] -if CONFIG['MOZ_ENABLE_SKIA_PDF_SFNTLY']: - DIRS += ['sfntly/cpp/src'] +if CONFIG["MOZ_ENABLE_SKIA_PDF_SFNTLY"]: + DIRS += ["sfntly/cpp/src"] -if CONFIG['ENABLE_TESTS']: - DIRS += ['tests/gtest'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["tests/gtest"] -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] -SPHINX_TREES['/gfx'] = 'docs' +SPHINX_TREES["/gfx"] = "docs" -with Files('docs/**'): - SCHEDULES.exclusive = ['docs'] +with Files("docs/**"): + SCHEDULES.exclusive = ["docs"] -with Files('wr/**'): - SCHEDULES.exclusive = ['webrender'] +with Files("wr/**"): + SCHEDULES.exclusive = ["webrender"] -with Files('webrender_bindings/**'): - SCHEDULES.exclusive = ['webrender'] +with Files("webrender_bindings/**"): + SCHEDULES.exclusive = ["webrender"] diff --git a/gfx/src/moz.build b/gfx/src/moz.build index 168e04955f63f9..51058dd940a7da 100644 --- a/gfx/src/moz.build +++ b/gfx/src/moz.build @@ -5,93 +5,93 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'nsIFontEnumerator.idl', + "nsIFontEnumerator.idl", ] -XPIDL_MODULE = 'gfx' +XPIDL_MODULE = "gfx" -DEFINES['MOZ_APP_VERSION'] = '"%s"' % CONFIG['MOZ_APP_VERSION'] +DEFINES["MOZ_APP_VERSION"] = '"%s"' % CONFIG["MOZ_APP_VERSION"] EXPORTS += [ - 'DriverCrashGuard.h', - 'FilterDescription.h', - 'FilterSupport.h', - 'gfxCrashReporterUtils.h', - 'gfxTelemetry.h', - 'nsBoundingMetrics.h', - 'nsColor.h', - 'nsColorNameList.h', - 'nsColorNames.h', - 'nsCoord.h', - 'nsDeviceContext.h', - 'nsFont.h', - 'nsFontMetrics.h', - 'nsGfxCIID.h', - 'nsITheme.h', - 'nsMargin.h', - 'nsPoint.h', - 'nsRect.h', - 'nsRectAbsolute.h', - 'nsRegion.h', - 'nsRegionFwd.h', - 'nsSize.h', - 'nsTransform2D.h', - 'RegionBuilder.h', - 'X11UndefineNone.h' + "DriverCrashGuard.h", + "FilterDescription.h", + "FilterSupport.h", + "gfxCrashReporterUtils.h", + "gfxTelemetry.h", + "nsBoundingMetrics.h", + "nsColor.h", + "nsColorNameList.h", + "nsColorNames.h", + "nsCoord.h", + "nsDeviceContext.h", + "nsFont.h", + "nsFontMetrics.h", + "nsGfxCIID.h", + "nsITheme.h", + "nsMargin.h", + "nsPoint.h", + "nsRect.h", + "nsRectAbsolute.h", + "nsRegion.h", + "nsRegionFwd.h", + "nsSize.h", + "nsTransform2D.h", + "RegionBuilder.h", + "X11UndefineNone.h", ] EXPORTS.mozilla += [ - 'AppUnits.h', - 'ArrayView.h', - 'FontPropertyTypes.h', - 'RelativeLuminanceUtils.h', + "AppUnits.h", + "ArrayView.h", + "FontPropertyTypes.h", + "RelativeLuminanceUtils.h", ] EXPORTS.mozilla.gfx += [ - 'CompositorHitTestInfo.h', - 'TiledRegion.h', + "CompositorHitTestInfo.h", + "TiledRegion.h", ] -if CONFIG['MOZ_X11']: - EXPORTS.mozilla += ['X11Util.h'] +if CONFIG["MOZ_X11"]: + EXPORTS.mozilla += ["X11Util.h"] SOURCES += [ - 'X11Util.cpp', + "X11Util.cpp", ] UNIFIED_SOURCES += [ - 'DriverCrashGuard.cpp', - 'FilterSupport.cpp', - 'gfxCrashReporterUtils.cpp', - 'gfxTelemetry.cpp', - 'nsColor.cpp', - 'nsFont.cpp', - 'nsFontMetrics.cpp', - 'nsRect.cpp', - 'nsRegion.cpp', - 'nsThebesFontEnumerator.cpp', - 'nsTransform2D.cpp', - 'TiledRegion.cpp', + "DriverCrashGuard.cpp", + "FilterSupport.cpp", + "gfxCrashReporterUtils.cpp", + "gfxTelemetry.cpp", + "nsColor.cpp", + "nsFont.cpp", + "nsFontMetrics.cpp", + "nsRect.cpp", + "nsRegion.cpp", + "nsThebesFontEnumerator.cpp", + "nsTransform2D.cpp", + "TiledRegion.cpp", ] # nsDeviceContext.cpp cannot be built in unified mode because it pulls in OS X system headers. SOURCES += [ - 'nsDeviceContext.cpp', + "nsDeviceContext.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/ipc', # for ContentChild.h + "/dom/ipc", # for ContentChild.h ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - CXXFLAGS += CONFIG['MOZ_PANGO_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + CXXFLAGS += CONFIG["MOZ_PANGO_CFLAGS"] diff --git a/gfx/tests/gtest/moz.build b/gfx/tests/gtest/moz.build index 1fae6b9d5b26c9..721fbafdc08314 100644 --- a/gfx/tests/gtest/moz.build +++ b/gfx/tests/gtest/moz.build @@ -5,81 +5,87 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'gfxSurfaceRefCountTest.cpp', - 'MockWidget.cpp', - 'PolygonTestUtils.cpp', - 'TestArena.cpp', - 'TestArrayView.cpp', - 'TestBSPTree.cpp', - 'TestBufferRotation.cpp', - 'TestColorNames.cpp', - 'TestConfigManager.cpp', - 'TestGfxWidgets.cpp', - 'TestLayers.cpp', - 'TestMatrix.cpp', - 'TestMoz2D.cpp', - 'TestPolygon.cpp', - 'TestQcms.cpp', - 'TestRegion.cpp', - 'TestSkipChars.cpp', - 'TestSwizzle.cpp', - 'TestTextures.cpp', - 'TestTreeTraversal.cpp', + "gfxSurfaceRefCountTest.cpp", + "MockWidget.cpp", + "PolygonTestUtils.cpp", + "TestArena.cpp", + "TestArrayView.cpp", + "TestBSPTree.cpp", + "TestBufferRotation.cpp", + "TestColorNames.cpp", + "TestConfigManager.cpp", + "TestGfxWidgets.cpp", + "TestLayers.cpp", + "TestMatrix.cpp", + "TestMoz2D.cpp", + "TestPolygon.cpp", + "TestQcms.cpp", + "TestRegion.cpp", + "TestSkipChars.cpp", + "TestSwizzle.cpp", + "TestTextures.cpp", + "TestTreeTraversal.cpp", ] # skip the test on windows10-aarch64 due to perma-crash - bug 1544961 -if not(CONFIG['OS_TARGET'] == 'WINNT' and CONFIG['CPU_ARCH'] == 'aarch64'): +if not (CONFIG["OS_TARGET"] == "WINNT" and CONFIG["CPU_ARCH"] == "aarch64"): UNIFIED_SOURCES += [ - 'TestVsync.cpp', + "TestVsync.cpp", ] -if CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'TestCompositor.cpp', - 'TestRect.cpp', - 'TestTextureCompatibility.cpp', + "TestCompositor.cpp", + "TestRect.cpp", + "TestTextureCompatibility.cpp", ] -UNIFIED_SOURCES += [ '/gfx/2d/unittest/%s' % p for p in [ - 'TestBase.cpp', - 'TestBugs.cpp', - 'TestCairo.cpp', - 'TestPoint.cpp', - 'TestScaling.cpp', -]] +UNIFIED_SOURCES += [ + "/gfx/2d/unittest/%s" % p + for p in [ + "TestBase.cpp", + "TestBugs.cpp", + "TestCairo.cpp", + "TestPoint.cpp", + "TestScaling.cpp", + ] +] # not UNIFIED_SOURCES because layout_common_table_test.cc has classes # in an anonymous namespace which result in a GCC error when used in # tests (e g. "error: 'ScriptListTableTest_TestSuccess_Test' has a field # 'ScriptListTableTest_TestSuccess_Test::' whose type uses # the anonymous namespace"). -SOURCES += [ '/gfx/ots/tests/%s' % p for p in [ - 'cff_charstring_test.cc', - 'layout_common_table_test.cc', -]] +SOURCES += [ + "/gfx/ots/tests/%s" % p + for p in [ + "cff_charstring_test.cc", + "layout_common_table_test.cc", + ] +] # ICC profiles used for verifying QCMS transformations. The copyright # notice embedded in the profiles should be reviewed to ensure there are # no known restrictions on distribution. TEST_HARNESS_FILES.gtest += [ - 'icc_profiles/lcms_samsung_syncmaster.icc', - 'icc_profiles/lcms_thinkpad_w540.icc', + "icc_profiles/lcms_samsung_syncmaster.icc", + "icc_profiles/lcms_thinkpad_w540.icc", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/gfx/2d', - '/gfx/2d/unittest', - '/gfx/config', - '/gfx/layers', - '/gfx/ots/src', - '/gfx/qcms', + "/gfx/2d", + "/gfx/2d/unittest", + "/gfx/config", + "/gfx/layers", + "/gfx/ots/src", + "/gfx/qcms", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/gfx/tests/marionette/test_pref_rollout_workaround.py b/gfx/tests/marionette/test_pref_rollout_workaround.py index cac35b4c29ee90..cf2dc1bcfd8b17 100644 --- a/gfx/tests/marionette/test_pref_rollout_workaround.py +++ b/gfx/tests/marionette/test_pref_rollout_workaround.py @@ -11,18 +11,19 @@ from marionette_harness.marionette_test import MarionetteTestCase -gfx_rollout_override = 'gfx.webrender.all.qualified.gfxPref-default-override' -hw_qualified_override = 'gfx.webrender.all.qualified.hardware-override' -rollout_pref = 'gfx.webrender.all.qualified' +gfx_rollout_override = "gfx.webrender.all.qualified.gfxPref-default-override" +hw_qualified_override = "gfx.webrender.all.qualified.hardware-override" +rollout_pref = "gfx.webrender.all.qualified" + class WrPrefRolloutWorkAroundTestCase(MarionetteTestCase): - '''Test cases for WebRender gradual pref rollout work around. - Normandy sets default prefs when rolling out a pref change, but - gfx starts up before Normandy can set the pref's default value - so we save the default value on shutdown, and check it on startup. - This test verifies that we save and load the default value, - and that the right compositor is enabled due to the rollout. - ''' + """Test cases for WebRender gradual pref rollout work around. + Normandy sets default prefs when rolling out a pref change, but + gfx starts up before Normandy can set the pref's default value + so we save the default value on shutdown, and check it on startup. + This test verifies that we save and load the default value, + and that the right compositor is enabled due to the rollout. + """ def test_wr_rollout_workaround_on_non_qualifying_hw(self): # Override the StaticPrefs so that WR is not enabled, as it would be before a rollout. @@ -30,7 +31,7 @@ def test_wr_rollout_workaround_on_non_qualifying_hw(self): # Set HW override so we behave as if we on non-qualifying hardware. self.marionette.set_pref(pref=hw_qualified_override, value=False) # Ensure we don't fallback to the basic compositor for some spurious reason. - self.marionette.set_pref(pref='layers.acceleration.force-enabled', value=True) + self.marionette.set_pref(pref="layers.acceleration.force-enabled", value=True) # Restart browser. Gfx will observe hardware qualification override, and # gfx rollout override prefs. We should then be running in a browser which @@ -40,9 +41,16 @@ def test_wr_rollout_workaround_on_non_qualifying_hw(self): # Ensure we're not yet using WR; we're not rolled out yet! status, compositor = self.wr_status() - print('self.wr_status()={},{}'.format(status, compositor)) - self.assertEqual(status, 'disabled:FEATURE_FAILURE_NOT_QUALIFIED', 'Should start out as WR disabled, not qualified') - self.assertTrue(compositor != 'webrender', 'Before WR rollout on non-qualifying HW, should not be using WR.') + print("self.wr_status()={},{}".format(status, compositor)) + self.assertEqual( + status, + "disabled:FEATURE_FAILURE_NOT_QUALIFIED", + "Should start out as WR disabled, not qualified", + ) + self.assertTrue( + compositor != "webrender", + "Before WR rollout on non-qualifying HW, should not be using WR.", + ) # Set the rollout pref's default value, as Normandy would do, and restart. # Gfx's shutdown observer should save the default value of the pref. Upon @@ -51,25 +59,42 @@ def test_wr_rollout_workaround_on_non_qualifying_hw(self): self.marionette.set_pref(pref=rollout_pref, value=True, default_branch=True) self.marionette.restart(clean=False, in_app=True) status, compositor = self.wr_status() - print('self.wr_status()={},{}'.format(status, compositor)) - self.assertEqual(status, 'disabled:FEATURE_FAILURE_NOT_QUALIFIED', 'WR rolled out on non-qualifying hardware should not use WR.') - self.assertTrue(compositor != 'webrender', 'WR rolled out on non-qualifying HW should not be used.') + print("self.wr_status()={},{}".format(status, compositor)) + self.assertEqual( + status, + "disabled:FEATURE_FAILURE_NOT_QUALIFIED", + "WR rolled out on non-qualifying hardware should not use WR.", + ) + self.assertTrue( + compositor != "webrender", + "WR rolled out on non-qualifying HW should not be used.", + ) # Simulate a rollback of the rollout; set the pref to false at runtime. self.marionette.set_pref(pref=rollout_pref, value=False, default_branch=True) self.marionette.restart(clean=False, in_app=True) status, compositor = self.wr_status() - print('self.wr_status()={},{}'.format(status, compositor)) - self.assertEqual(status, 'disabled:FEATURE_FAILURE_NOT_QUALIFIED', 'WR rollback of rollout should revert to disabled on non-qualifying hardware.') - self.assertTrue(compositor != 'webrender', 'After roll back on non-qualifying HW, WR should not be used.') - - @skipIf(platform.machine() == "ARM64" and platform.system() == "Windows", "Bug 1536369 - Crashes on Windows 10 aarch64") + print("self.wr_status()={},{}".format(status, compositor)) + self.assertEqual( + status, + "disabled:FEATURE_FAILURE_NOT_QUALIFIED", + "WR rollback of rollout should revert to disabled on non-qualifying hardware.", + ) + self.assertTrue( + compositor != "webrender", + "After roll back on non-qualifying HW, WR should not be used.", + ) + + @skipIf( + platform.machine() == "ARM64" and platform.system() == "Windows", + "Bug 1536369 - Crashes on Windows 10 aarch64", + ) def test_wr_rollout_workaround_on_qualifying_hw(self): # Override the gfxPref so that WR is not enabled, as it would be before a rollout. self.marionette.set_pref(pref=gfx_rollout_override, value=False) # Set HW override so we behave as if we on qualifying hardware. self.marionette.set_pref(pref=hw_qualified_override, value=True) - self.marionette.set_pref(pref='layers.acceleration.force-enabled', value=True) + self.marionette.set_pref(pref="layers.acceleration.force-enabled", value=True) # Restart browser. Gfx will observe hardware qualification override, and # gfx rollout override prefs. We should then be running in a browser which @@ -79,9 +104,16 @@ def test_wr_rollout_workaround_on_qualifying_hw(self): # Ensure we're not yet using WR; we're not rolled out yet! status, compositor = self.wr_status() - print('self.wr_status()={},{}'.format(status, compositor)) - self.assertEqual(status, 'disabled:FEATURE_FAILURE_IN_EXPERIMENT', 'Should start out as WR disabled, in experiment') - self.assertTrue(compositor != 'webrender', 'Before WR rollout on qualifying HW, should not be using WR.') + print("self.wr_status()={},{}".format(status, compositor)) + self.assertEqual( + status, + "disabled:FEATURE_FAILURE_IN_EXPERIMENT", + "Should start out as WR disabled, in experiment", + ) + self.assertTrue( + compositor != "webrender", + "Before WR rollout on qualifying HW, should not be using WR.", + ) # Set the rollout pref's default value, as Normandy would do, and restart. # Gfx's shutdown observer should save the default value of the pref. Upon @@ -90,26 +122,46 @@ def test_wr_rollout_workaround_on_qualifying_hw(self): self.marionette.set_pref(pref=rollout_pref, value=True, default_branch=True) self.marionette.restart(clean=False, in_app=True) status, compositor = self.wr_status() - print('self.wr_status()={},{}'.format(status, compositor)) - self.assertEqual(status, 'available', 'WR rolled out on qualifying hardware should report be available #1.') - self.assertEqual(compositor, 'webrender', 'After rollout on qualifying HW, WR should be used.') + print("self.wr_status()={},{}".format(status, compositor)) + self.assertEqual( + status, + "available", + "WR rolled out on qualifying hardware should report be available #1.", + ) + self.assertEqual( + compositor, + "webrender", + "After rollout on qualifying HW, WR should be used.", + ) # Simulate a rollback of the rollout; set the pref to false at runtime. self.marionette.set_pref(pref=rollout_pref, value=False, default_branch=True) self.marionette.restart(clean=False, in_app=True) status, compositor = self.wr_status() - print('self.wr_status()={},{}'.format(status, compositor)) - self.assertEqual(status, 'disabled:FEATURE_FAILURE_IN_EXPERIMENT', 'WR rollback of rollout should revert to disabled on qualifying hardware.') - self.assertTrue(compositor != 'webrender', 'After roll back on qualifying HW, WR should not be used.') + print("self.wr_status()={},{}".format(status, compositor)) + self.assertEqual( + status, + "disabled:FEATURE_FAILURE_IN_EXPERIMENT", + "WR rollback of rollout should revert to disabled on qualifying hardware.", + ) + self.assertTrue( + compositor != "webrender", + "After roll back on qualifying HW, WR should not be used.", + ) def wr_status(self): self.marionette.set_context(self.marionette.CONTEXT_CHROME) - result = self.marionette.execute_script(''' + result = self.marionette.execute_script( + """ try { const gfxInfo = Components.classes['@mozilla.org/gfx/info;1'].getService(Ci.nsIGfxInfo); return {features: gfxInfo.getFeatures(), log: gfxInfo.getFeatureLog()}; } catch (e) { return {} } - ''') - return result['features']['webrender']['status'], result['features']['compositor'] + """ + ) + return ( + result["features"]["webrender"]["status"], + result["features"]["compositor"], + ) diff --git a/gfx/tests/moz.build b/gfx/tests/moz.build index 3e6c8c6de14f0e..7da49fa78948c8 100644 --- a/gfx/tests/moz.build +++ b/gfx/tests/moz.build @@ -4,7 +4,7 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['mochitest/mochitest.ini'] -BROWSER_CHROME_MANIFESTS += ['browser/browser.ini'] -MOCHITEST_CHROME_MANIFESTS += ['chrome/chrome.ini'] -MARIONETTE_GPU_MANIFESTS += ['marionette/manifest.ini'] +MOCHITEST_MANIFESTS += ["mochitest/mochitest.ini"] +BROWSER_CHROME_MANIFESTS += ["browser/browser.ini"] +MOCHITEST_CHROME_MANIFESTS += ["chrome/chrome.ini"] +MARIONETTE_GPU_MANIFESTS += ["marionette/manifest.ini"] diff --git a/gfx/thebes/gencjkcisvs.py b/gfx/thebes/gencjkcisvs.py index 4afab956447cf1..72a558843560fb 100644 --- a/gfx/thebes/gencjkcisvs.py +++ b/gfx/thebes/gencjkcisvs.py @@ -7,18 +7,20 @@ import re import sys -f = open(sys.argv[1] if len(sys.argv) > 1 else 'StandardizedVariants.txt') +f = open(sys.argv[1] if len(sys.argv) > 1 else "StandardizedVariants.txt") line = f.readline() -m = re.compile('^# (StandardizedVariants(-\d+(\.\d+)*)?\.txt)').search(line) +m = re.compile("^# (StandardizedVariants(-\d+(\.\d+)*)?\.txt)").search(line) fileversion = m.group(1) vsdict = {} -r = re.compile('^([0-9A-F]{4,6}) (FE0[0-9A-F]); CJK COMPATIBILITY IDEOGRAPH-([0-9A-F]{4,6});') +r = re.compile( + "^([0-9A-F]{4,6}) (FE0[0-9A-F]); CJK COMPATIBILITY IDEOGRAPH-([0-9A-F]{4,6});" +) while True: line = f.readline() if not line: break - if 'CJK COMPATIBILITY IDEOGRAPH-' not in line: + if "CJK COMPATIBILITY IDEOGRAPH-" not in line: continue m = r.search(line) @@ -38,8 +40,9 @@ offsets.append(length) length += 4 + 5 * len(mappings) -f = open(sys.argv[2] if len(sys.argv) > 2 else 'CJKCompatSVS.cpp', 'wb') -f.write("""// Generated by %s. Do not edit. +f = open(sys.argv[2] if len(sys.argv) > 2 else "CJKCompatSVS.cpp", "wb") +f.write( + """// Generated by %s. Do not edit. #include @@ -56,18 +59,24 @@ // field, CJK Compatibility Ideographs Supplement (U+2F800..U+2FA1F) will be // mapped to 0xFB00..0xFD1F. extern const uint8_t sCJKCompatSVSTable[] = { -""" % (os.path.basename(sys.argv[0]), fileversion)) -f.write(' U16(14), // format\n') -f.write(' U32(%d), // length\n' % length) -f.write(' U32(%d), // numVarSelectorRecords\n' % len(vsdict)) +""" + % (os.path.basename(sys.argv[0]), fileversion) +) +f.write(" U16(14), // format\n") +f.write(" U32(%d), // length\n" % length) +f.write(" U32(%d), // numVarSelectorRecords\n" % len(vsdict)) for i, k in enumerate(sorted(vsdict.keys())): - f.write(' U24(0x%04X), U32(0), U32(%d), // varSelectorRecord[%d]\n' % (k, offsets[i], i)) + f.write( + " U24(0x%04X), U32(0), U32(%d), // varSelectorRecord[%d]\n" + % (k, offsets[i], i) + ) for (k, mappings) in sorted(vsdict.items()): - f.write(' // 0x%04X\n' % k) - f.write(' U32(%d), // numUVSMappings\n' % len(mappings)) + f.write(" // 0x%04X\n" % k) + f.write(" U32(%d), // numUVSMappings\n" % len(mappings)) for (unified, compat) in sorted(mappings.items()): - f.write(' U24(0x%04X), GLYPH(0x%04X),\n' % (unified, compat)) -f.write("""}; + f.write(" U24(0x%04X), GLYPH(0x%04X),\n" % (unified, compat)) +f.write( + """}; #undef U16 #undef U24 @@ -75,4 +84,6 @@ #undef GLYPH static_assert(sizeof sCJKCompatSVSTable == %d, "Table generator has a bug."); -""" % length) +""" + % length +) diff --git a/gfx/thebes/moz.build b/gfx/thebes/moz.build index 8e5bcf1aea6f69..d317214a4458d0 100644 --- a/gfx/thebes/moz.build +++ b/gfx/thebes/moz.build @@ -4,293 +4,291 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('*Text*'): - BUG_COMPONENT = ('Core', 'Graphics: Text') +with Files("*Text*"): + BUG_COMPONENT = ("Core", "Graphics: Text") -with Files('*DWrite*'): - BUG_COMPONENT = ('Core', 'Graphics: Text') +with Files("*DWrite*"): + BUG_COMPONENT = ("Core", "Graphics: Text") XPIDL_SOURCES += [ - 'nsIFontLoadCompleteCallback.idl', + "nsIFontLoadCompleteCallback.idl", ] -XPIDL_MODULE = 'gfx' +XPIDL_MODULE = "gfx" EXPORTS += [ - 'DrawMode.h', - 'gfx2DGlue.h', - 'gfxAlphaRecovery.h', - 'gfxASurface.h', - 'gfxBaseSharedMemorySurface.h', - 'gfxBlur.h', - 'gfxColor.h', - 'gfxContext.h', - 'gfxDrawable.h', - 'gfxEnv.h', - 'gfxFailure.h', - 'gfxFont.h', - 'gfxFontConstants.h', - 'gfxFontEntry.h', - 'gfxFontFamilyList.h', - 'gfxFontFeatures.h', - 'gfxFontInfoLoader.h', - 'gfxFontPrefLangList.h', - 'gfxFontSrcPrincipal.h', - 'gfxFontSrcURI.h', - 'gfxFontUtils.h', - 'gfxFontVariations.h', - 'gfxGradientCache.h', - 'gfxImageSurface.h', - 'gfxLineSegment.h', - 'gfxMathTable.h', - 'gfxMatrix.h', - 'gfxOTSUtils.h', - 'gfxPattern.h', - 'gfxPlatform.h', - 'gfxPlatformFontList.h', - 'gfxPoint.h', - 'gfxQuad.h', - 'gfxQuaternion.h', - 'gfxRect.h', - 'gfxSharedImageSurface.h', - 'gfxSkipChars.h', - 'gfxSVGGlyphs.h', - 'gfxTextRun.h', - 'gfxTypes.h', - 'gfxUserFontSet.h', - 'gfxUtils.h', - 'SharedFontList.h', - 'SoftwareVsyncSource.h', - 'ThebesRLBoxTypes.h', - 'VsyncSource.h', + "DrawMode.h", + "gfx2DGlue.h", + "gfxAlphaRecovery.h", + "gfxASurface.h", + "gfxBaseSharedMemorySurface.h", + "gfxBlur.h", + "gfxColor.h", + "gfxContext.h", + "gfxDrawable.h", + "gfxEnv.h", + "gfxFailure.h", + "gfxFont.h", + "gfxFontConstants.h", + "gfxFontEntry.h", + "gfxFontFamilyList.h", + "gfxFontFeatures.h", + "gfxFontInfoLoader.h", + "gfxFontPrefLangList.h", + "gfxFontSrcPrincipal.h", + "gfxFontSrcURI.h", + "gfxFontUtils.h", + "gfxFontVariations.h", + "gfxGradientCache.h", + "gfxImageSurface.h", + "gfxLineSegment.h", + "gfxMathTable.h", + "gfxMatrix.h", + "gfxOTSUtils.h", + "gfxPattern.h", + "gfxPlatform.h", + "gfxPlatformFontList.h", + "gfxPoint.h", + "gfxQuad.h", + "gfxQuaternion.h", + "gfxRect.h", + "gfxSharedImageSurface.h", + "gfxSkipChars.h", + "gfxSVGGlyphs.h", + "gfxTextRun.h", + "gfxTypes.h", + "gfxUserFontSet.h", + "gfxUtils.h", + "SharedFontList.h", + "SoftwareVsyncSource.h", + "ThebesRLBoxTypes.h", + "VsyncSource.h", ] EXPORTS.mozilla.gfx += [ - 'D3D11Checks.h', - 'DeviceManagerDx.h', - 'DisplayConfigWindows.h', - 'PrintTarget.h', - 'PrintTargetThebes.h', - 'ThebesRLBox.h', + "D3D11Checks.h", + "DeviceManagerDx.h", + "DisplayConfigWindows.h", + "PrintTarget.h", + "PrintTargetThebes.h", + "ThebesRLBox.h", ] -if CONFIG['MOZ_ENABLE_SKIA']: - EXPORTS.mozilla.gfx += [ - 'SkMemoryReporter.h' - ] +if CONFIG["MOZ_ENABLE_SKIA"]: + EXPORTS.mozilla.gfx += ["SkMemoryReporter.h"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXPORTS += [ - 'gfxAndroidPlatform.h', - 'gfxFT2FontBase.h', - 'gfxFT2Fonts.h', + "gfxAndroidPlatform.h", + "gfxFT2FontBase.h", + "gfxFT2Fonts.h", ] EXPORTS.mozilla.gfx += [ - 'PrintTargetPDF.h', + "PrintTargetPDF.h", ] SOURCES += [ - 'gfxAndroidPlatform.cpp', - 'gfxFT2FontBase.cpp', - 'gfxFT2FontList.cpp', - 'gfxFT2Fonts.cpp', - 'gfxFT2Utils.cpp', - 'PrintTargetPDF.cpp', + "gfxAndroidPlatform.cpp", + "gfxFT2FontBase.cpp", + "gfxFT2FontList.cpp", + "gfxFT2Fonts.cpp", + "gfxFT2Utils.cpp", + "PrintTargetPDF.cpp", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": EXPORTS += [ - 'gfxPlatformMac.h', - 'gfxQuartzNativeDrawing.h', - 'gfxQuartzSurface.h', + "gfxPlatformMac.h", + "gfxQuartzNativeDrawing.h", + "gfxQuartzSurface.h", ] EXPORTS.mozilla.gfx += [ - 'PrintTargetCG.h', + "PrintTargetCG.h", ] SOURCES += [ - 'gfxCoreTextShaper.cpp', - 'gfxMacFont.cpp', - 'gfxPlatformMac.cpp', - 'gfxQuartzNativeDrawing.cpp', - 'gfxQuartzSurface.cpp', - 'PrintTargetCG.mm', + "gfxCoreTextShaper.cpp", + "gfxMacFont.cpp", + "gfxPlatformMac.cpp", + "gfxQuartzNativeDrawing.cpp", + "gfxQuartzSurface.cpp", + "PrintTargetCG.mm", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": EXPORTS += [ - 'gfxFT2FontBase.h', - 'gfxGdkNativeRenderer.h', - 'gfxPlatformGtk.h', + "gfxFT2FontBase.h", + "gfxGdkNativeRenderer.h", + "gfxPlatformGtk.h", ] EXPORTS.mozilla.gfx += [ - 'PrintTargetPDF.h', - 'PrintTargetPS.h', + "PrintTargetPDF.h", + "PrintTargetPS.h", ] SOURCES += [ - 'gfxFcPlatformFontList.cpp', - 'gfxFT2FontBase.cpp', - 'gfxFT2Utils.cpp', - 'gfxGdkNativeRenderer.cpp', - 'gfxPlatformGtk.cpp', - 'PrintTargetPDF.cpp', - 'PrintTargetPS.cpp', + "gfxFcPlatformFontList.cpp", + "gfxFT2FontBase.cpp", + "gfxFT2Utils.cpp", + "gfxGdkNativeRenderer.cpp", + "gfxPlatformGtk.cpp", + "PrintTargetPDF.cpp", + "PrintTargetPS.cpp", ] - if CONFIG['MOZ_X11']: + if CONFIG["MOZ_X11"]: EXPORTS += [ - 'gfxXlibNativeRenderer.h', - 'gfxXlibSurface.h', + "gfxXlibNativeRenderer.h", + "gfxXlibSurface.h", ] SOURCES += [ - 'gfxXlibNativeRenderer.cpp', - 'gfxXlibSurface.cpp', + "gfxXlibNativeRenderer.cpp", + "gfxXlibSurface.cpp", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": EXPORTS += [ - 'gfxDWriteFonts.h', - 'gfxGDIFont.h', - 'gfxGDIFontList.h', - 'gfxWindowsNativeDrawing.h', - 'gfxWindowsPlatform.h', - 'gfxWindowsSurface.h', + "gfxDWriteFonts.h", + "gfxGDIFont.h", + "gfxGDIFontList.h", + "gfxWindowsNativeDrawing.h", + "gfxWindowsPlatform.h", + "gfxWindowsSurface.h", ] EXPORTS.mozilla.gfx += [ - 'PrintTargetPDF.h', - 'PrintTargetWindows.h', + "PrintTargetPDF.h", + "PrintTargetWindows.h", ] SOURCES += [ - 'DisplayConfigWindows.cpp', - 'gfxDWriteCommon.cpp', - 'gfxDWriteFonts.cpp', - 'gfxGDIFont.cpp', - 'gfxGDIFontList.cpp', - 'gfxWindowsNativeDrawing.cpp', - 'gfxWindowsPlatform.cpp', - 'gfxWindowsSurface.cpp', - 'PrintTargetPDF.cpp', - 'PrintTargetWindows.cpp', + "DisplayConfigWindows.cpp", + "gfxDWriteCommon.cpp", + "gfxDWriteFonts.cpp", + "gfxGDIFont.cpp", + "gfxGDIFontList.cpp", + "gfxWindowsNativeDrawing.cpp", + "gfxWindowsPlatform.cpp", + "gfxWindowsSurface.cpp", + "PrintTargetPDF.cpp", + "PrintTargetWindows.cpp", ] UNIFIED_SOURCES += [ - 'gfxDWriteFontList.cpp', + "gfxDWriteFontList.cpp", ] # Are we targeting x86 or x64? If so, build gfxAlphaRecoverySSE2.cpp. -if CONFIG['INTEL_ARCHITECTURE']: - SOURCES += ['gfxAlphaRecoverySSE2.cpp'] +if CONFIG["INTEL_ARCHITECTURE"]: + SOURCES += ["gfxAlphaRecoverySSE2.cpp"] # The file uses SSE2 intrinsics, so it needs special compile flags on some # compilers. - SOURCES['gfxAlphaRecoverySSE2.cpp'].flags += CONFIG['SSE2_FLAGS'] + SOURCES["gfxAlphaRecoverySSE2.cpp"].flags += CONFIG["SSE2_FLAGS"] SOURCES += [ # Includes mac system header conflicting with point/size, # and includes glxXlibSurface.h which drags in Xrender.h - 'gfxASurface.cpp', + "gfxASurface.cpp", # on X11, gfxDrawable.cpp includes X headers for an old workaround which # we could consider removing soon (affects Ubuntus older than 10.04 LTS) # which currently prevent it from joining UNIFIED_SOURCES. - 'gfxDrawable.cpp', + "gfxDrawable.cpp", # gfxFontUtils.cpp and gfxPlatform.cpp include mac system header conflicting with point/size - 'gfxFontUtils.cpp', - 'gfxPlatform.cpp', - 'PrintTarget.cpp', - 'PrintTargetThebes.cpp', + "gfxFontUtils.cpp", + "gfxPlatform.cpp", + "PrintTarget.cpp", + "PrintTargetThebes.cpp", ] UNIFIED_SOURCES += [ - 'CJKCompatSVS.cpp', - 'gfxAlphaRecovery.cpp', - 'gfxBaseSharedMemorySurface.cpp', - 'gfxBlur.cpp', - 'gfxContext.cpp', - 'gfxFont.cpp', - 'gfxFontEntry.cpp', - 'gfxFontFeatures.cpp', - 'gfxFontInfoLoader.cpp', - 'gfxFontMissingGlyphs.cpp', - 'gfxFontSrcPrincipal.cpp', - 'gfxFontSrcURI.cpp', - 'gfxGlyphExtents.cpp', - 'gfxGradientCache.cpp', - 'gfxGraphiteShaper.cpp', - 'gfxHarfBuzzShaper.cpp', - 'gfxImageSurface.cpp', - 'gfxMathTable.cpp', - 'gfxPattern.cpp', - 'gfxPlatformFontList.cpp', - 'gfxScriptItemizer.cpp', - 'gfxSkipChars.cpp', - 'gfxSVGGlyphs.cpp', - 'gfxTextRun.cpp', - 'gfxUserFontSet.cpp', - 'gfxUtils.cpp', - 'SharedFontList.cpp', - 'SoftwareVsyncSource.cpp', - 'VsyncSource.cpp', + "CJKCompatSVS.cpp", + "gfxAlphaRecovery.cpp", + "gfxBaseSharedMemorySurface.cpp", + "gfxBlur.cpp", + "gfxContext.cpp", + "gfxFont.cpp", + "gfxFontEntry.cpp", + "gfxFontFeatures.cpp", + "gfxFontInfoLoader.cpp", + "gfxFontMissingGlyphs.cpp", + "gfxFontSrcPrincipal.cpp", + "gfxFontSrcURI.cpp", + "gfxGlyphExtents.cpp", + "gfxGradientCache.cpp", + "gfxGraphiteShaper.cpp", + "gfxHarfBuzzShaper.cpp", + "gfxImageSurface.cpp", + "gfxMathTable.cpp", + "gfxPattern.cpp", + "gfxPlatformFontList.cpp", + "gfxScriptItemizer.cpp", + "gfxSkipChars.cpp", + "gfxSVGGlyphs.cpp", + "gfxTextRun.cpp", + "gfxUserFontSet.cpp", + "gfxUtils.cpp", + "SharedFontList.cpp", + "SoftwareVsyncSource.cpp", + "VsyncSource.cpp", ] -if CONFIG['MOZ_ENABLE_SKIA']: +if CONFIG["MOZ_ENABLE_SKIA"]: UNIFIED_SOURCES += [ - 'SkMemoryReporter.cpp', + "SkMemoryReporter.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": UNIFIED_SOURCES += [ - 'gfxMacPlatformFontList.mm', + "gfxMacPlatformFontList.mm", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": UNIFIED_SOURCES += [ - 'D3D11Checks.cpp', + "D3D11Checks.cpp", ] SOURCES += [ - 'DeviceManagerDx.cpp', + "DeviceManagerDx.cpp", ] -if CONFIG['MOZ_ENABLE_SKIA_PDF']: +if CONFIG["MOZ_ENABLE_SKIA_PDF"]: EXPORTS.mozilla.gfx += [ - 'PrintTargetSkPDF.h', + "PrintTargetSkPDF.h", ] SOURCES += [ - 'PrintTargetSkPDF.cpp', + "PrintTargetSkPDF.cpp", ] # We use ICU for normalization functions: USE_LIBS += [ - 'icu', + "icu", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', - '/dom/xml', + "/dom/base", + "/dom/xml", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gtk'): - DEFINES['MOZ_ENABLE_FREETYPE'] = True +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("android", "gtk"): + DEFINES["MOZ_ENABLE_FREETYPE"] = True -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - for var in ('MOZ_ENABLE_D3D10_LAYER',): +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + for var in ("MOZ_ENABLE_D3D10_LAYER",): if CONFIG[var]: DEFINES[var] = True -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] -CFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] +CFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CFLAGS += CONFIG["TK_CFLAGS"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android'): - CXXFLAGS += CONFIG['CAIRO_FT_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("android"): + CXXFLAGS += CONFIG["CAIRO_FT_CFLAGS"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - CXXFLAGS += CONFIG['MOZ_PANGO_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + CXXFLAGS += CONFIG["MOZ_PANGO_CFLAGS"] -if CONFIG['MOZ_WAYLAND']: - CXXFLAGS += CONFIG['MOZ_WAYLAND_CFLAGS'] +if CONFIG["MOZ_WAYLAND"]: + CXXFLAGS += CONFIG["MOZ_WAYLAND_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -DEFINES['GRAPHITE2_STATIC'] = True +DEFINES["GRAPHITE2_STATIC"] = True -if CONFIG['CC_TYPE'] == 'clang': +if CONFIG["CC_TYPE"] == "clang": # Suppress warnings from Skia header files. - SOURCES['gfxPlatform.cpp'].flags += ['-Wno-implicit-fallthrough'] + SOURCES["gfxPlatform.cpp"].flags += ["-Wno-implicit-fallthrough"] diff --git a/gfx/vr/moz.build b/gfx/vr/moz.build index e4fb6e44804a59..6f24beb9bab1b2 100644 --- a/gfx/vr/moz.build +++ b/gfx/vr/moz.build @@ -5,114 +5,108 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'external_api/moz_external_vr.h', - 'FxROutputHandler.h', - 'FxRWindowManager.h', - 'gfxVR.h', - 'ipc/VRChild.h', - 'ipc/VRGPUChild.h', - 'ipc/VRGPUParent.h', - 'ipc/VRLayerChild.h', - 'ipc/VRManagerChild.h', - 'ipc/VRManagerParent.h', - 'ipc/VRMessageUtils.h', - 'ipc/VRParent.h', - 'ipc/VRProcessChild.h', - 'ipc/VRProcessManager.h', - 'ipc/VRProcessParent.h', - 'service/VRService.h', - 'VRDisplayClient.h', - 'VRDisplayPresentation.h', - 'VRManager.h', - 'VRPuppetCommandBuffer.h', - 'VRShMem.h', - 'VRThread.h', + "external_api/moz_external_vr.h", + "FxROutputHandler.h", + "FxRWindowManager.h", + "gfxVR.h", + "ipc/VRChild.h", + "ipc/VRGPUChild.h", + "ipc/VRGPUParent.h", + "ipc/VRLayerChild.h", + "ipc/VRManagerChild.h", + "ipc/VRManagerParent.h", + "ipc/VRMessageUtils.h", + "ipc/VRParent.h", + "ipc/VRProcessChild.h", + "ipc/VRProcessManager.h", + "ipc/VRProcessParent.h", + "service/VRService.h", + "VRDisplayClient.h", + "VRDisplayPresentation.h", + "VRManager.h", + "VRPuppetCommandBuffer.h", + "VRShMem.h", + "VRThread.h", ] LOCAL_INCLUDES += [ - '/dom/base', - '/dom/canvas', - '/gfx/layers/d3d11', - '/gfx/thebes', + "/dom/base", + "/dom/canvas", + "/gfx/layers/d3d11", + "/gfx/thebes", ] UNIFIED_SOURCES += [ - 'gfxVR.cpp', - 'ipc/VRChild.cpp', - 'ipc/VRGPUChild.cpp', - 'ipc/VRGPUParent.cpp', - 'ipc/VRManagerChild.cpp', - 'ipc/VRManagerParent.cpp', - 'ipc/VRParent.cpp', - 'ipc/VRProcessChild.cpp', - 'ipc/VRProcessManager.cpp', - 'ipc/VRProcessParent.cpp', - 'VRDisplayClient.cpp', - 'VRDisplayPresentation.cpp', - 'VRThread.cpp', + "gfxVR.cpp", + "ipc/VRChild.cpp", + "ipc/VRGPUChild.cpp", + "ipc/VRGPUParent.cpp", + "ipc/VRManagerChild.cpp", + "ipc/VRManagerParent.cpp", + "ipc/VRParent.cpp", + "ipc/VRProcessChild.cpp", + "ipc/VRProcessManager.cpp", + "ipc/VRProcessParent.cpp", + "VRDisplayClient.cpp", + "VRDisplayPresentation.cpp", + "VRThread.cpp", ] SOURCES += [ - 'ipc/VRLayerChild.cpp', - 'ipc/VRLayerParent.cpp', - 'VRManager.cpp', - 'VRPuppetCommandBuffer.cpp', - 'VRShMem.cpp' + "ipc/VRLayerChild.cpp", + "ipc/VRLayerParent.cpp", + "VRManager.cpp", + "VRPuppetCommandBuffer.cpp", + "VRShMem.cpp", ] -if CONFIG['OS_TARGET'] == 'Android': - LOCAL_INCLUDES += ['/widget/android'] +if CONFIG["OS_TARGET"] == "Android": + LOCAL_INCLUDES += ["/widget/android"] else: DIRS += [ - 'service', + "service", ] UNIFIED_SOURCES += [ - 'VRServiceHost.cpp', + "VRServiceHost.cpp", ] # Only target x64 for vrhost since WebVR is only supported on 64bit. # Also, only use MSVC compiler for Windows-specific linking -if CONFIG['OS_ARCH'] == 'WINNT' and CONFIG['HAVE_64BIT_BUILD'] and CONFIG['CC_TYPE'] not in ('clang', 'gcc'): - DIRS += [ - 'vrhost' - ] +if ( + CONFIG["OS_ARCH"] == "WINNT" + and CONFIG["HAVE_64BIT_BUILD"] + and CONFIG["CC_TYPE"] not in ("clang", "gcc") +): + DIRS += ["vrhost"] IPDL_SOURCES = [ - 'ipc/PVR.ipdl', - 'ipc/PVRGPU.ipdl', - 'ipc/PVRLayer.ipdl', - 'ipc/PVRManager.ipdl', + "ipc/PVR.ipdl", + "ipc/PVRGPU.ipdl", + "ipc/PVRLayer.ipdl", + "ipc/PVRManager.ipdl", ] # For now, only enable FxR CLH for Windows Nightly builds (BUG 1565349) -if CONFIG['OS_ARCH'] == 'WINNT' and CONFIG['NIGHTLY_BUILD']: +if CONFIG["OS_ARCH"] == "WINNT" and CONFIG["NIGHTLY_BUILD"]: XPCOM_MANIFESTS += [ - 'components.conf', - ] - SOURCES += [ - 'nsFxrCommandLineHandler.cpp', - ] - -if CONFIG['OS_ARCH'] == 'WINNT': - LOCAL_INCLUDES += [ - '/layout/generic', - '/widget', - '/widget/windows' + "components.conf", ] SOURCES += [ - 'FxROutputHandler.cpp', - 'FxRWindowManager.cpp' + "nsFxrCommandLineHandler.cpp", ] -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CXXFLAGS += CONFIG['TK_CFLAGS'] -CFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] -CFLAGS += CONFIG['TK_CFLAGS'] +if CONFIG["OS_ARCH"] == "WINNT": + LOCAL_INCLUDES += ["/layout/generic", "/widget", "/widget/windows"] + SOURCES += ["FxROutputHandler.cpp", "FxRWindowManager.cpp"] -include('/ipc/chromium/chromium-config.mozbuild') +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CXXFLAGS += CONFIG["TK_CFLAGS"] +CFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] +CFLAGS += CONFIG["TK_CFLAGS"] -FINAL_LIBRARY = 'xul' +include("/ipc/chromium/chromium-config.mozbuild") -with Files('**'): - BUG_COMPONENT = ('Core', 'WebVR') +FINAL_LIBRARY = "xul" +with Files("**"): + BUG_COMPONENT = ("Core", "WebVR") diff --git a/gfx/vr/service/moz.build b/gfx/vr/service/moz.build index 8b77e80fafd140..0edd36ceecd1ad 100644 --- a/gfx/vr/service/moz.build +++ b/gfx/vr/service/moz.build @@ -5,47 +5,44 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. # Build Oculus support on Windows only -if CONFIG['OS_TARGET'] == 'WINNT': +if CONFIG["OS_TARGET"] == "WINNT": SOURCES += [ - 'OculusSession.cpp', - 'OpenVRWMRMapper.cpp', + "OculusSession.cpp", + "OpenVRWMRMapper.cpp", ] # Build OSVR on all platforms except Android -if CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_TARGET"] != "Android": UNIFIED_SOURCES += [ - 'OSVRSession.cpp', - 'VRService.cpp', - 'VRSession.cpp', + "OSVRSession.cpp", + "VRService.cpp", + "VRSession.cpp", ] # PuppetSession includes MacIOSurface.h which includes Mac headers # which define Size and Points types in the root namespace that # often conflict with our own types. SOURCES += [ - 'PuppetSession.cpp', + "PuppetSession.cpp", ] - include('/ipc/chromium/chromium-config.mozbuild') + include("/ipc/chromium/chromium-config.mozbuild") # Build OpenVR on Windows, Linux, and macOS desktop targets -if CONFIG['OS_TARGET'] in ('WINNT', 'Linux', 'Darwin'): +if CONFIG["OS_TARGET"] in ("WINNT", "Linux", "Darwin"): DIRS += [ - 'openvr', - ] - LOCAL_INCLUDES += [ - '/dom/base', - '/gfx/layers/d3d11' + "openvr", ] + LOCAL_INCLUDES += ["/dom/base", "/gfx/layers/d3d11"] # OpenVRSession includes MacIOSurface.h which includes Mac headers # which define Size and Points types in the root namespace that # often conflict with our own types. SOURCES += [ - 'OpenVRControllerMapper.cpp', - 'OpenVRCosmosMapper.cpp', - 'OpenVRDefaultMapper.cpp', - 'OpenVRKnucklesMapper.cpp', - 'OpenVRSession.cpp', - 'OpenVRViveMapper.cpp', + "OpenVRControllerMapper.cpp", + "OpenVRCosmosMapper.cpp", + "OpenVRDefaultMapper.cpp", + "OpenVRKnucklesMapper.cpp", + "OpenVRSession.cpp", + "OpenVRViveMapper.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/gfx/vr/vrhost/moz.build b/gfx/vr/vrhost/moz.build index cdaad93e919704..1fa6e77f0388d1 100644 --- a/gfx/vr/vrhost/moz.build +++ b/gfx/vr/vrhost/moz.build @@ -4,47 +4,37 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += [ - '/gfx/vr/service/VRSession.cpp', - '/gfx/vr/VRShMem.cpp', - 'vrhostapi.cpp' -] +SOURCES += ["/gfx/vr/service/VRSession.cpp", "/gfx/vr/VRShMem.cpp", "vrhostapi.cpp"] # Since .def files do not support preprocessing, switch which file is used # to declare exports. See comments in the files for more info. -if CONFIG['NIGHTLY_BUILD']: - DEFFILE = 'vrhostnightly.def' - SOURCES += [ - 'vrhosttest.cpp' - ] +if CONFIG["NIGHTLY_BUILD"]: + DEFFILE = "vrhostnightly.def" + SOURCES += ["vrhosttest.cpp"] else: - DEFFILE = 'vrhost.def' + DEFFILE = "vrhost.def" LOCAL_INCLUDES += [ - '/gfx/vr', - '/gfx/vr/external_api', - '/gfx/vr/service', - '/ipc/chromium/src', + "/gfx/vr", + "/gfx/vr/external_api", + "/gfx/vr/service", + "/ipc/chromium/src", ] -EXPORTS.vrhost = [ - 'vrhostex.h' -] +EXPORTS.vrhost = ["vrhostex.h"] -DIRS += [ - 'testhost' -] +DIRS += ["testhost"] # this is Windows-only for now -DEFINES['XP_WIN'] = True +DEFINES["XP_WIN"] = True # fixes "lld-link: error: undefined symbol: __imp_moz_xmalloc" -DEFINES['MOZ_NO_MOZALLOC'] = True +DEFINES["MOZ_NO_MOZALLOC"] = True # fixes "STL code can only be used with infallible ::operator new()" DisableStlWrapping() # Define UNICODE for default support in this dll -DEFINES['UNICODE'] = True -DEFINES['_UNICODE'] = True +DEFINES["UNICODE"] = True +DEFINES["_UNICODE"] = True # Use SharedLibrary to generate the dll -SharedLibrary('vrhost') \ No newline at end of file +SharedLibrary("vrhost") diff --git a/gfx/vr/vrhost/testhost/moz.build b/gfx/vr/vrhost/testhost/moz.build index 7cb497c7d067c5..5b0d3b16e1c532 100644 --- a/gfx/vr/vrhost/testhost/moz.build +++ b/gfx/vr/vrhost/testhost/moz.build @@ -4,13 +4,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += [ - 'testhost.cpp' -] +SOURCES += ["testhost.cpp"] -USE_LIBS += [ - 'vrhost' -] +USE_LIBS += ["vrhost"] # Use Progam to generate the executable -Program('vrtesthost') \ No newline at end of file +Program("vrtesthost") diff --git a/gfx/webrender_bindings/moz.build b/gfx/webrender_bindings/moz.build index b5b3f4bf5b3820..6b05cebeb184da 100644 --- a/gfx/webrender_bindings/moz.build +++ b/gfx/webrender_bindings/moz.build @@ -4,115 +4,117 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('**'): - BUG_COMPONENT = ('Core', 'Graphics: WebRender') +with Files("**"): + BUG_COMPONENT = ("Core", "Graphics: WebRender") EXPORTS.mozilla.webrender += [ - 'RenderBufferTextureHost.h', - 'RenderBufferTextureHostSWGL.h', - 'RenderCompositor.h', - 'RenderCompositorEGL.h', - 'RenderCompositorOGL.h', - 'RenderCompositorSWGL.h', - 'RenderEGLImageTextureHost.h', - 'RendererOGL.h', - 'RendererScreenshotGrabber.h', - 'RenderExternalTextureHost.h', - 'RenderSharedSurfaceTextureHost.h', - 'RenderSharedSurfaceTextureHostSWGL.h', - 'RenderTextureHost.h', - 'RenderTextureHostSWGL.h', - 'RenderTextureHostWrapper.h', - 'RenderThread.h', - 'webrender_ffi.h', - 'WebRenderAPI.h', - 'WebRenderTypes.h', + "RenderBufferTextureHost.h", + "RenderBufferTextureHostSWGL.h", + "RenderCompositor.h", + "RenderCompositorEGL.h", + "RenderCompositorOGL.h", + "RenderCompositorSWGL.h", + "RenderEGLImageTextureHost.h", + "RendererOGL.h", + "RendererScreenshotGrabber.h", + "RenderExternalTextureHost.h", + "RenderSharedSurfaceTextureHost.h", + "RenderSharedSurfaceTextureHostSWGL.h", + "RenderTextureHost.h", + "RenderTextureHostSWGL.h", + "RenderTextureHostWrapper.h", + "RenderThread.h", + "webrender_ffi.h", + "WebRenderAPI.h", + "WebRenderTypes.h", ] UNIFIED_SOURCES += [ - 'Moz2DImageRenderer.cpp', - 'RenderBufferTextureHost.cpp', - 'RenderBufferTextureHostSWGL.cpp', - 'RenderCompositor.cpp', - 'RenderCompositorEGL.cpp', - 'RenderCompositorOGL.cpp', - 'RenderCompositorSWGL.cpp', - 'RenderEGLImageTextureHost.cpp', - 'RendererOGL.cpp', - 'RendererScreenshotGrabber.cpp', - 'RenderExternalTextureHost.cpp', - 'RenderSharedSurfaceTextureHost.cpp', - 'RenderSharedSurfaceTextureHostSWGL.cpp', - 'RenderTextureHost.cpp', - 'RenderTextureHostSWGL.cpp', - 'RenderTextureHostWrapper.cpp', - 'RenderThread.cpp', - 'WebRenderAPI.cpp', - 'WebRenderTypes.cpp', + "Moz2DImageRenderer.cpp", + "RenderBufferTextureHost.cpp", + "RenderBufferTextureHostSWGL.cpp", + "RenderCompositor.cpp", + "RenderCompositorEGL.cpp", + "RenderCompositorOGL.cpp", + "RenderCompositorSWGL.cpp", + "RenderEGLImageTextureHost.cpp", + "RendererOGL.cpp", + "RendererScreenshotGrabber.cpp", + "RenderExternalTextureHost.cpp", + "RenderSharedSurfaceTextureHost.cpp", + "RenderSharedSurfaceTextureHostSWGL.cpp", + "RenderTextureHost.cpp", + "RenderTextureHostSWGL.cpp", + "RenderTextureHostWrapper.cpp", + "RenderThread.cpp", + "WebRenderAPI.cpp", + "WebRenderTypes.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": EXPORTS.mozilla.webrender += [ - 'RenderCompositorNative.h', - 'RenderMacIOSurfaceTextureHost.h', + "RenderCompositorNative.h", + "RenderMacIOSurfaceTextureHost.h", ] UNIFIED_SOURCES += [ - 'RenderCompositorNative.cpp', - 'RenderMacIOSurfaceTextureHost.cpp', + "RenderCompositorNative.cpp", + "RenderMacIOSurfaceTextureHost.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": EXPORTS.mozilla.webrender += [ - 'RenderAndroidHardwareBufferTextureHost.h', - 'RenderAndroidSurfaceTextureHost.h', + "RenderAndroidHardwareBufferTextureHost.h", + "RenderAndroidSurfaceTextureHost.h", ] UNIFIED_SOURCES += [ - 'RenderAndroidHardwareBufferTextureHost.cpp', - 'RenderAndroidSurfaceTextureHost.cpp', + "RenderAndroidHardwareBufferTextureHost.cpp", + "RenderAndroidSurfaceTextureHost.cpp", ] -if CONFIG['MOZ_ENABLE_D3D10_LAYER']: - DEFINES['MOZ_ENABLE_D3D10_LAYER'] = True +if CONFIG["MOZ_ENABLE_D3D10_LAYER"]: + DEFINES["MOZ_ENABLE_D3D10_LAYER"] = True EXPORTS.mozilla.webrender += [ - 'DCLayerTree.h', - 'RenderCompositorANGLE.h', - 'RenderD3D11TextureHost.h', + "DCLayerTree.h", + "RenderCompositorANGLE.h", + "RenderD3D11TextureHost.h", ] UNIFIED_SOURCES += [ - 'RenderD3D11TextureHost.cpp', + "RenderD3D11TextureHost.cpp", ] SOURCES += [ - 'DCLayerTree.cpp', - 'RenderCompositorANGLE.cpp', + "DCLayerTree.cpp", + "RenderCompositorANGLE.cpp", ] -if CONFIG['MOZ_WAYLAND']: +if CONFIG["MOZ_WAYLAND"]: EXPORTS.mozilla.webrender += [ - 'RenderDMABUFTextureHost.h', + "RenderDMABUFTextureHost.h", ] SOURCES += [ - 'RenderDMABUFTextureHost.cpp', + "RenderDMABUFTextureHost.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] in ('android', 'gtk'): - CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] - CXXFLAGS += CONFIG['CAIRO_FT_CFLAGS'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] in ("android", "gtk"): + CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] + CXXFLAGS += CONFIG["CAIRO_FT_CFLAGS"] -if CONFIG['COMPILE_ENVIRONMENT']: +if CONFIG["COMPILE_ENVIRONMENT"]: EXPORTS.mozilla.webrender += [ - '!webrender_ffi_generated.h', + "!webrender_ffi_generated.h", ] - CbindgenHeader('webrender_ffi_generated.h', - inputs=[ - '/gfx/webrender_bindings', - '/gfx/wr/webrender', - '/gfx/wr/webrender_api', - ]) + CbindgenHeader( + "webrender_ffi_generated.h", + inputs=[ + "/gfx/webrender_bindings", + "/gfx/wr/webrender", + "/gfx/wr/webrender_api", + ], + ) -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] diff --git a/gfx/wgpu_bindings/moz.build b/gfx/wgpu_bindings/moz.build index 448fa47ae61b87..6f948a2eb2697a 100644 --- a/gfx/wgpu_bindings/moz.build +++ b/gfx/wgpu_bindings/moz.build @@ -5,20 +5,19 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.webgpu.ffi += [ - 'wgpu.h', + "wgpu.h", ] -UNIFIED_SOURCES += [ -] +UNIFIED_SOURCES += [] -if CONFIG['COMPILE_ENVIRONMENT']: - CbindgenHeader('wgpu_ffi_generated.h', inputs=['/gfx/wgpu_bindings', '/gfx/wgpu']) +if CONFIG["COMPILE_ENVIRONMENT"]: + CbindgenHeader("wgpu_ffi_generated.h", inputs=["/gfx/wgpu_bindings", "/gfx/wgpu"]) EXPORTS.mozilla.webgpu.ffi += [ - '!wgpu_ffi_generated.h', + "!wgpu_ffi_generated.h", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/hal/moz.build b/hal/moz.build index 7ee2a9fbb81127..268b24cebf59d2 100644 --- a/hal/moz.build +++ b/hal/moz.build @@ -3,132 +3,132 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. - -with Files('**'): - BUG_COMPONENT = ('Core', 'Hardware Abstraction Layer (HAL)') + +with Files("**"): + BUG_COMPONENT = ("Core", "Hardware Abstraction Layer (HAL)") EXPORTS.mozilla += [ - 'Hal.h', - 'HalBatteryInformation.h', - 'HalImpl.h', - 'HalNetworkInformation.h', - 'HalSandbox.h', - 'HalScreenConfiguration.h', - 'HalSensor.h', - 'HalTypes.h', - 'HalWakeLock.h', - 'HalWakeLockInformation.h', + "Hal.h", + "HalBatteryInformation.h", + "HalImpl.h", + "HalNetworkInformation.h", + "HalSandbox.h", + "HalScreenConfiguration.h", + "HalSensor.h", + "HalTypes.h", + "HalWakeLock.h", + "HalWakeLockInformation.h", ] EXPORTS.mozilla.fallback += [ - 'fallback/FallbackScreenConfiguration.h', + "fallback/FallbackScreenConfiguration.h", ] UNIFIED_SOURCES += [ - 'HalWakeLock.cpp', - 'sandbox/SandboxHal.cpp', - 'WindowIdentifier.cpp', + "HalWakeLock.cpp", + "sandbox/SandboxHal.cpp", + "WindowIdentifier.cpp", ] # Hal.cpp cannot be built in unified mode because it relies on HalImpl.h. SOURCES += [ - 'Hal.cpp', + "Hal.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/widget/android', + "/widget/android", ] UNIFIED_SOURCES += [ - 'android/AndroidProcessPriority.cpp', - 'android/AndroidSensor.cpp', + "android/AndroidProcessPriority.cpp", + "android/AndroidSensor.cpp", ] # AndroidHal.cpp cannot be built in unified mode because it relies on HalImpl.h. SOURCES += [ - 'android/AndroidHal.cpp', + "android/AndroidHal.cpp", ] -elif CONFIG['OS_TARGET'] == 'Linux': +elif CONFIG["OS_TARGET"] == "Linux": UNIFIED_SOURCES += [ - 'fallback/FallbackProcessPriority.cpp', - 'fallback/FallbackScreenConfiguration.cpp', - 'fallback/FallbackSensor.cpp', - 'fallback/FallbackVibration.cpp', + "fallback/FallbackProcessPriority.cpp", + "fallback/FallbackScreenConfiguration.cpp", + "fallback/FallbackSensor.cpp", + "fallback/FallbackVibration.cpp", ] - if CONFIG['MOZ_ENABLE_DBUS']: + if CONFIG["MOZ_ENABLE_DBUS"]: UNIFIED_SOURCES += [ - 'linux/UPowerClient.cpp', + "linux/UPowerClient.cpp", ] else: UNIFIED_SOURCES += [ - 'fallback/FallbackBattery.cpp', + "fallback/FallbackBattery.cpp", ] -elif CONFIG['OS_TARGET'] == 'WINNT': +elif CONFIG["OS_TARGET"] == "WINNT": UNIFIED_SOURCES += [ - 'fallback/FallbackScreenConfiguration.cpp', - 'fallback/FallbackVibration.cpp', - 'windows/WindowsProcessPriority.cpp', - 'windows/WindowsSensor.cpp', + "fallback/FallbackScreenConfiguration.cpp", + "fallback/FallbackVibration.cpp", + "windows/WindowsProcessPriority.cpp", + "windows/WindowsSensor.cpp", ] # WindowsBattery.cpp cannot be built in unified mode because it relies on HalImpl.h. SOURCES += [ - 'windows/WindowsBattery.cpp', + "windows/WindowsBattery.cpp", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": UNIFIED_SOURCES += [ - 'cocoa/CocoaBattery.cpp', - 'fallback/FallbackProcessPriority.cpp', - 'fallback/FallbackScreenConfiguration.cpp', - 'fallback/FallbackVibration.cpp', + "cocoa/CocoaBattery.cpp", + "fallback/FallbackProcessPriority.cpp", + "fallback/FallbackScreenConfiguration.cpp", + "fallback/FallbackVibration.cpp", ] -elif CONFIG['OS_TARGET'] in ('OpenBSD', 'NetBSD', 'FreeBSD', 'DragonFly'): +elif CONFIG["OS_TARGET"] in ("OpenBSD", "NetBSD", "FreeBSD", "DragonFly"): UNIFIED_SOURCES += [ - 'fallback/FallbackProcessPriority.cpp', - 'fallback/FallbackScreenConfiguration.cpp', - 'fallback/FallbackSensor.cpp', - 'fallback/FallbackVibration.cpp', + "fallback/FallbackProcessPriority.cpp", + "fallback/FallbackScreenConfiguration.cpp", + "fallback/FallbackSensor.cpp", + "fallback/FallbackVibration.cpp", ] - if CONFIG['MOZ_ENABLE_DBUS']: + if CONFIG["MOZ_ENABLE_DBUS"]: UNIFIED_SOURCES += [ - 'linux/UPowerClient.cpp', + "linux/UPowerClient.cpp", ] else: UNIFIED_SOURCES += [ - 'fallback/FallbackBattery.cpp', + "fallback/FallbackBattery.cpp", ] else: UNIFIED_SOURCES += [ - 'fallback/FallbackBattery.cpp', - 'fallback/FallbackProcessPriority.cpp', - 'fallback/FallbackScreenConfiguration.cpp', - 'fallback/FallbackSensor.cpp', - 'fallback/FallbackVibration.cpp', + "fallback/FallbackBattery.cpp", + "fallback/FallbackProcessPriority.cpp", + "fallback/FallbackScreenConfiguration.cpp", + "fallback/FallbackSensor.cpp", + "fallback/FallbackVibration.cpp", ] # Fallbacks for backends implemented on Android only. -if CONFIG['MOZ_WIDGET_TOOLKIT'] != 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] != "android": UNIFIED_SOURCES += [ - 'fallback/FallbackNetwork.cpp', + "fallback/FallbackNetwork.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": UNIFIED_SOURCES += [ - 'cocoa/CocoaSensor.mm', - 'cocoa/smslib.mm', + "cocoa/CocoaSensor.mm", + "cocoa/smslib.mm", ] IPDL_SOURCES = [ - 'sandbox/PHal.ipdl', + "sandbox/PHal.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -CFLAGS += CONFIG['GLIB_CFLAGS'] -CFLAGS += CONFIG['MOZ_DBUS_GLIB_CFLAGS'] -CXXFLAGS += CONFIG['GLIB_CFLAGS'] -CXXFLAGS += CONFIG['MOZ_DBUS_GLIB_CFLAGS'] +CFLAGS += CONFIG["GLIB_CFLAGS"] +CFLAGS += CONFIG["MOZ_DBUS_GLIB_CFLAGS"] +CXXFLAGS += CONFIG["GLIB_CFLAGS"] +CXXFLAGS += CONFIG["MOZ_DBUS_GLIB_CFLAGS"] diff --git a/image/build/moz.build b/image/build/moz.build index c1ea68486c0b75..7ba10e42b9babb 100644 --- a/image/build/moz.build +++ b/image/build/moz.build @@ -5,22 +5,22 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'nsImageModule.h', + "nsImageModule.h", ] SOURCES += [ - 'nsImageModule.cpp', + "nsImageModule.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/image', - '/image/encoders/bmp', - '/image/encoders/ico', - '/image/encoders/jpeg', - '/image/encoders/png', + "/image", + "/image/encoders/bmp", + "/image/encoders/ico", + "/image/encoders/jpeg", + "/image/encoders/png", ] diff --git a/image/decoders/icon/android/moz.build b/image/decoders/icon/android/moz.build index 5e58ff0b60bb45..a99ae228d4981f 100644 --- a/image/decoders/icon/android/moz.build +++ b/image/decoders/icon/android/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsIconChannel.cpp', + "nsIconChannel.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/decoders/icon/gtk/moz.build b/image/decoders/icon/gtk/moz.build index 2992bb904644dc..09acc05231661c 100644 --- a/image/decoders/icon/gtk/moz.build +++ b/image/decoders/icon/gtk/moz.build @@ -5,9 +5,9 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsIconChannel.cpp', + "nsIconChannel.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] diff --git a/image/decoders/icon/mac/moz.build b/image/decoders/icon/mac/moz.build index 158c326ea6eb15..fee884a473e712 100644 --- a/image/decoders/icon/mac/moz.build +++ b/image/decoders/icon/mac/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsIconChannelCocoa.mm', + "nsIconChannelCocoa.mm", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/decoders/icon/moz.build b/image/decoders/icon/moz.build index c87163cb7f0a5e..96cf951b3bc2ef 100644 --- a/image/decoders/icon/moz.build +++ b/image/decoders/icon/moz.build @@ -5,35 +5,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'nsIconProtocolHandler.cpp', - 'nsIconURI.cpp', + "nsIconProtocolHandler.cpp", + "nsIconURI.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") EXPORTS += [ - 'nsIconURI.h', + "nsIconURI.h", ] platform = None -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': - platform = 'gtk' +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": + platform = "gtk" -if CONFIG['OS_ARCH'] == 'WINNT': - platform = 'win' +if CONFIG["OS_ARCH"] == "WINNT": + platform = "win" -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': - platform = 'mac' +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": + platform = "mac" -if CONFIG['OS_TARGET'] == 'Android': - platform = 'android' +if CONFIG["OS_TARGET"] == "Android": + platform = "android" if platform: LOCAL_INCLUDES += [platform] diff --git a/image/decoders/icon/win/moz.build b/image/decoders/icon/win/moz.build index 6f9e24885c8499..2a06f68fecece3 100644 --- a/image/decoders/icon/win/moz.build +++ b/image/decoders/icon/win/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsIconChannel.cpp', + "nsIconChannel.cpp", ] LOCAL_INCLUDES += [ - '/image', + "/image", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/decoders/moz.build b/image/decoders/moz.build index 9d239008bf3646..848846bee4cc98 100644 --- a/image/decoders/moz.build +++ b/image/decoders/moz.build @@ -4,53 +4,53 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] # The Icon Channel stuff really shouldn't live in decoders/icon, but we'll # fix that another time. -if toolkit == 'gtk': - DIRS += ['icon/gtk', 'icon'] +if toolkit == "gtk": + DIRS += ["icon/gtk", "icon"] -if CONFIG['OS_ARCH'] == 'WINNT': - DIRS += ['icon/win', 'icon'] +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["icon/win", "icon"] -if toolkit == 'cocoa': - DIRS += ['icon/mac', 'icon'] -elif toolkit == 'android': - DIRS += ['icon/android', 'icon'] +if toolkit == "cocoa": + DIRS += ["icon/mac", "icon"] +elif toolkit == "android": + DIRS += ["icon/android", "icon"] UNIFIED_SOURCES += [ - 'EXIF.cpp', - 'iccjpeg.c', - 'nsBMPDecoder.cpp', - 'nsGIFDecoder2.cpp', - 'nsICODecoder.cpp', - 'nsIconDecoder.cpp', - 'nsJPEGDecoder.cpp', - 'nsPNGDecoder.cpp', - 'nsWebPDecoder.cpp', + "EXIF.cpp", + "iccjpeg.c", + "nsBMPDecoder.cpp", + "nsGIFDecoder2.cpp", + "nsICODecoder.cpp", + "nsIconDecoder.cpp", + "nsJPEGDecoder.cpp", + "nsPNGDecoder.cpp", + "nsWebPDecoder.cpp", ] -if CONFIG['MOZ_AV1']: +if CONFIG["MOZ_AV1"]: UNIFIED_SOURCES += [ - 'nsAVIFDecoder.cpp', + "nsAVIFDecoder.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ # Access to Skia headers for Downscaler. - '/gfx/2d', + "/gfx/2d", # Decoders need ImageLib headers. - '/image', + "/image", ] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/image/encoders/bmp/moz.build b/image/encoders/bmp/moz.build index f061d067a68e36..783fd37005119a 100644 --- a/image/encoders/bmp/moz.build +++ b/image/encoders/bmp/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsBMPEncoder.cpp', + "nsBMPEncoder.cpp", ] LOCAL_INCLUDES += [ - '/image', + "/image", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/encoders/ico/moz.build b/image/encoders/ico/moz.build index c45d49aaf741f3..1a3d62956d77f7 100644 --- a/image/encoders/ico/moz.build +++ b/image/encoders/ico/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsICOEncoder.cpp', + "nsICOEncoder.cpp", ] # Decoders need RasterImage.h LOCAL_INCLUDES += [ - '/image', - '/image/encoders/bmp', - '/image/encoders/png', + "/image", + "/image/encoders/bmp", + "/image/encoders/png", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/encoders/jpeg/moz.build b/image/encoders/jpeg/moz.build index 9e5551ce6b4ac2..6952fc1b9ff03e 100644 --- a/image/encoders/jpeg/moz.build +++ b/image/encoders/jpeg/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsJPEGEncoder.cpp', + "nsJPEGEncoder.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/encoders/moz.build b/image/encoders/moz.build index 1a76d773a3e6cd..c73a3167ebf439 100644 --- a/image/encoders/moz.build +++ b/image/encoders/moz.build @@ -5,8 +5,8 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'ico', - 'png', - 'jpeg', - 'bmp', + "ico", + "png", + "jpeg", + "bmp", ] diff --git a/image/encoders/png/moz.build b/image/encoders/png/moz.build index e6665e424617c0..ca29199dcd0588 100644 --- a/image/encoders/png/moz.build +++ b/image/encoders/png/moz.build @@ -5,11 +5,11 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. SOURCES += [ - 'nsPNGEncoder.cpp', + "nsPNGEncoder.cpp", ] LOCAL_INCLUDES += [ - '/image', + "/image", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/image/moz.build b/image/moz.build index 83a3576704a239..427077f4c0baaa 100644 --- a/image/moz.build +++ b/image/moz.build @@ -4,132 +4,132 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['build', 'decoders', 'encoders'] -if CONFIG['ENABLE_TESTS']: - DIRS += ['test/gtest'] +DIRS += ["build", "decoders", "encoders"] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["test/gtest"] -if CONFIG['FUZZING_INTERFACES']: - DIRS += ['test/fuzzing'] +if CONFIG["FUZZING_INTERFACES"]: + DIRS += ["test/fuzzing"] -with Files('**'): - BUG_COMPONENT = ('Core', 'ImageLib') +with Files("**"): + BUG_COMPONENT = ("Core", "ImageLib") -BROWSER_CHROME_MANIFESTS += ['test/browser/browser.ini'] +BROWSER_CHROME_MANIFESTS += ["test/browser/browser.ini"] -MOCHITEST_MANIFESTS += ['test/mochitest/mochitest.ini'] +MOCHITEST_MANIFESTS += ["test/mochitest/mochitest.ini"] -MOCHITEST_CHROME_MANIFESTS += ['test/mochitest/chrome.ini'] +MOCHITEST_CHROME_MANIFESTS += ["test/mochitest/chrome.ini"] -XPCSHELL_TESTS_MANIFESTS += ['test/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/unit/xpcshell.ini"] XPIDL_SOURCES += [ - 'imgICache.idl', - 'imgIContainer.idl', - 'imgIContainerDebug.idl', - 'imgIEncoder.idl', - 'imgILoader.idl', - 'imgINotificationObserver.idl', - 'imgIRequest.idl', - 'imgIScriptedNotificationObserver.idl', - 'imgITools.idl', - 'nsIIconURI.idl', + "imgICache.idl", + "imgIContainer.idl", + "imgIContainerDebug.idl", + "imgIEncoder.idl", + "imgILoader.idl", + "imgINotificationObserver.idl", + "imgIRequest.idl", + "imgIScriptedNotificationObserver.idl", + "imgITools.idl", + "nsIIconURI.idl", ] -XPIDL_MODULE = 'imglib2' +XPIDL_MODULE = "imglib2" EXPORTS += [ - 'FrameTimeout.h', - 'ImageBlocker.h', - 'ImageCacheKey.h', - 'ImageLogging.h', - 'ImageMetadata.h', - 'ImageOps.h', - 'ImageRegion.h', - 'ImgDrawResult.h', - 'imgLoader.h', - 'imgRequest.h', - 'imgRequestProxy.h', - 'IProgressObserver.h', - 'Orientation.h', - 'SurfaceCacheUtils.h', + "FrameTimeout.h", + "ImageBlocker.h", + "ImageCacheKey.h", + "ImageLogging.h", + "ImageMetadata.h", + "ImageOps.h", + "ImageRegion.h", + "ImgDrawResult.h", + "imgLoader.h", + "imgRequest.h", + "imgRequestProxy.h", + "IProgressObserver.h", + "Orientation.h", + "SurfaceCacheUtils.h", ] EXPORTS.mozilla.image += [ - 'encoders/bmp/nsBMPEncoder.h', - 'encoders/ico/nsICOEncoder.h', - 'encoders/jpeg/nsJPEGEncoder.h', - 'encoders/png/nsPNGEncoder.h', - 'ICOFileHeaders.h', - 'ImageMemoryReporter.h', + "encoders/bmp/nsBMPEncoder.h", + "encoders/ico/nsICOEncoder.h", + "encoders/jpeg/nsJPEGEncoder.h", + "encoders/png/nsPNGEncoder.h", + "ICOFileHeaders.h", + "ImageMemoryReporter.h", ] UNIFIED_SOURCES += [ - 'AnimationFrameBuffer.cpp', - 'AnimationSurfaceProvider.cpp', - 'ClippedImage.cpp', - 'DecodedSurfaceProvider.cpp', - 'Decoder.cpp', - 'DecoderFactory.cpp', - 'DynamicImage.cpp', - 'FrameAnimator.cpp', - 'FrozenImage.cpp', - 'IDecodingTask.cpp', - 'Image.cpp', - 'ImageBlocker.cpp', - 'ImageCacheKey.cpp', - 'ImageFactory.cpp', - 'ImageMemoryReporter.cpp', - 'ImageOps.cpp', - 'ImageWrapper.cpp', - 'imgFrame.cpp', - 'imgLoader.cpp', - 'imgRequest.cpp', - 'imgRequestProxy.cpp', - 'imgTools.cpp', - 'MultipartImage.cpp', - 'OrientedImage.cpp', - 'ProgressTracker.cpp', - 'RasterImage.cpp', - 'ScriptedNotificationObserver.cpp', - 'ShutdownTracker.cpp', - 'SourceBuffer.cpp', - 'SurfaceCache.cpp', - 'SurfaceCacheUtils.cpp', - 'SurfacePipe.cpp', - 'SVGDocumentWrapper.cpp', - 'VectorImage.cpp', + "AnimationFrameBuffer.cpp", + "AnimationSurfaceProvider.cpp", + "ClippedImage.cpp", + "DecodedSurfaceProvider.cpp", + "Decoder.cpp", + "DecoderFactory.cpp", + "DynamicImage.cpp", + "FrameAnimator.cpp", + "FrozenImage.cpp", + "IDecodingTask.cpp", + "Image.cpp", + "ImageBlocker.cpp", + "ImageCacheKey.cpp", + "ImageFactory.cpp", + "ImageMemoryReporter.cpp", + "ImageOps.cpp", + "ImageWrapper.cpp", + "imgFrame.cpp", + "imgLoader.cpp", + "imgRequest.cpp", + "imgRequestProxy.cpp", + "imgTools.cpp", + "MultipartImage.cpp", + "OrientedImage.cpp", + "ProgressTracker.cpp", + "RasterImage.cpp", + "ScriptedNotificationObserver.cpp", + "ShutdownTracker.cpp", + "SourceBuffer.cpp", + "SurfaceCache.cpp", + "SurfaceCacheUtils.cpp", + "SurfacePipe.cpp", + "SVGDocumentWrapper.cpp", + "VectorImage.cpp", ] -if CONFIG['MOZ_ENABLE_SKIA']: - UNIFIED_SOURCES += [ 'Downscaler.cpp'] +if CONFIG["MOZ_ENABLE_SKIA"]: + UNIFIED_SOURCES += ["Downscaler.cpp"] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': - SOURCES += [ 'DecodePool.cpp'] +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": + SOURCES += ["DecodePool.cpp"] else: - UNIFIED_SOURCES += [ 'DecodePool.cpp'] + UNIFIED_SOURCES += ["DecodePool.cpp"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ # Because SVGDocumentWrapper.cpp includes "mozilla/dom/SVGSVGElement.h" - '/dom/base', - '/dom/svg', + "/dom/base", + "/dom/svg", # We need to instantiate the decoders - '/image/decoders', + "/image/decoders", # For URI-related functionality - '/netwerk/base', + "/netwerk/base", # DecodePool uses thread-related facilities. - '/xpcom/threads', + "/xpcom/threads", ] # Because imgFrame.cpp includes "cairo.h" -CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG["MOZ_CAIRO_CFLAGS"] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/image/test/fuzzing/moz.build b/image/test/fuzzing/moz.build index b9087cfb5a088a..24af56396f7663 100644 --- a/image/test/fuzzing/moz.build +++ b/image/test/fuzzing/moz.build @@ -4,21 +4,21 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('FuzzingImage') +Library("FuzzingImage") SOURCES += [ - 'TestDecoders.cpp', + "TestDecoders.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/base', - '/gfx/2d', - '/image', - '/image/test/gtest', + "/dom/base", + "/gfx/2d", + "/image", + "/image/test/gtest", ] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] diff --git a/image/test/gtest/moz.build b/image/test/gtest/moz.build index 10febf71115e94..b750a619d4f144 100644 --- a/image/test/gtest/moz.build +++ b/image/test/gtest/moz.build @@ -4,120 +4,120 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('imagetest') +Library("imagetest") UNIFIED_SOURCES = [ - 'Common.cpp', - 'TestADAM7InterpolatingFilter.cpp', - 'TestAnimationFrameBuffer.cpp', - 'TestBlendAnimationFilter.cpp', - 'TestContainers.cpp', - 'TestCopyOnWrite.cpp', - 'TestDeinterlacingFilter.cpp', - 'TestFrameAnimator.cpp', - 'TestLoader.cpp', - 'TestRemoveFrameRectFilter.cpp', - 'TestStreamingLexer.cpp', - 'TestSurfaceSink.cpp', - 'TestSwizzleFilter.cpp', + "Common.cpp", + "TestADAM7InterpolatingFilter.cpp", + "TestAnimationFrameBuffer.cpp", + "TestBlendAnimationFilter.cpp", + "TestContainers.cpp", + "TestCopyOnWrite.cpp", + "TestDeinterlacingFilter.cpp", + "TestFrameAnimator.cpp", + "TestLoader.cpp", + "TestRemoveFrameRectFilter.cpp", + "TestStreamingLexer.cpp", + "TestSurfaceSink.cpp", + "TestSwizzleFilter.cpp", ] # skip the test on windows10-aarch64, aarch64 due to 1544961 -if not(CONFIG['OS_TARGET'] == 'WINNT' and CONFIG['CPU_ARCH'] == 'aarch64'): +if not (CONFIG["OS_TARGET"] == "WINNT" and CONFIG["CPU_ARCH"] == "aarch64"): UNIFIED_SOURCES += [ - 'TestDecoders.cpp', - 'TestDecodersPerf.cpp', - 'TestDecodeToSurface.cpp', - 'TestMetadata.cpp', - 'TestSourceBuffer.cpp', - 'TestSurfaceCache.cpp', + "TestDecoders.cpp", + "TestDecodersPerf.cpp", + "TestDecodeToSurface.cpp", + "TestMetadata.cpp", + "TestSourceBuffer.cpp", + "TestSurfaceCache.cpp", ] -if CONFIG['MOZ_ENABLE_SKIA']: +if CONFIG["MOZ_ENABLE_SKIA"]: UNIFIED_SOURCES += [ - 'TestDownscalingFilter.cpp', - 'TestSurfacePipeIntegration.cpp', + "TestDownscalingFilter.cpp", + "TestSurfacePipeIntegration.cpp", ] SOURCES += [ # Can't be unified because it manipulates the preprocessor environment. - 'TestDownscalingFilterNoSkia.cpp', + "TestDownscalingFilterNoSkia.cpp", ] TEST_HARNESS_FILES.gtest += [ - 'animated-with-extra-image-sub-blocks.gif', - 'blend.gif', - 'blend.png', - 'blend.webp', - 'bug-1655846.avif', - 'corrupt-with-bad-bmp-height.ico', - 'corrupt-with-bad-bmp-width.ico', - 'corrupt-with-bad-ico-bpp.ico', - 'corrupt.jpg', - 'downscaled.avif', - 'downscaled.bmp', - 'downscaled.gif', - 'downscaled.ico', - 'downscaled.icon', - 'downscaled.jpg', - 'downscaled.png', - 'downscaled.webp', - 'first-frame-green.gif', - 'first-frame-green.png', - 'first-frame-green.webp', - 'first-frame-padding.gif', - 'green-1x1-truncated.gif', - 'green-large-bmp.ico', - 'green-large-png.ico', - 'green-multiple-sizes.ico', - 'green.avif', - 'green.bmp', - 'green.gif', - 'green.icc_srgb.webp', - 'green.ico', - 'green.icon', - 'green.jpg', - 'green.png', - 'green.webp', - 'invalid-truncated-metadata.bmp', - 'large.avif', - 'large.webp', - 'multilayer.avif', - 'no-frame-delay.gif', - 'perf_cmyk.jpg', - 'perf_gray.jpg', - 'perf_gray.png', - 'perf_gray_alpha.png', - 'perf_srgb.gif', - 'perf_srgb.png', - 'perf_srgb_alpha.png', - 'perf_srgb_alpha_lossless.webp', - 'perf_srgb_alpha_lossy.webp', - 'perf_srgb_lossless.webp', - 'perf_srgb_lossy.webp', - 'perf_ycbcr.jpg', - 'rle4.bmp', - 'rle8.bmp', - 'stackcheck.avif', - 'transparent-ico-with-and-mask.ico', - 'transparent-if-within-ico.bmp', - 'transparent-no-alpha-header.webp', - 'transparent.gif', - 'transparent.png', - 'transparent.webp', + "animated-with-extra-image-sub-blocks.gif", + "blend.gif", + "blend.png", + "blend.webp", + "bug-1655846.avif", + "corrupt-with-bad-bmp-height.ico", + "corrupt-with-bad-bmp-width.ico", + "corrupt-with-bad-ico-bpp.ico", + "corrupt.jpg", + "downscaled.avif", + "downscaled.bmp", + "downscaled.gif", + "downscaled.ico", + "downscaled.icon", + "downscaled.jpg", + "downscaled.png", + "downscaled.webp", + "first-frame-green.gif", + "first-frame-green.png", + "first-frame-green.webp", + "first-frame-padding.gif", + "green-1x1-truncated.gif", + "green-large-bmp.ico", + "green-large-png.ico", + "green-multiple-sizes.ico", + "green.avif", + "green.bmp", + "green.gif", + "green.icc_srgb.webp", + "green.ico", + "green.icon", + "green.jpg", + "green.png", + "green.webp", + "invalid-truncated-metadata.bmp", + "large.avif", + "large.webp", + "multilayer.avif", + "no-frame-delay.gif", + "perf_cmyk.jpg", + "perf_gray.jpg", + "perf_gray.png", + "perf_gray_alpha.png", + "perf_srgb.gif", + "perf_srgb.png", + "perf_srgb_alpha.png", + "perf_srgb_alpha_lossless.webp", + "perf_srgb_alpha_lossy.webp", + "perf_srgb_lossless.webp", + "perf_srgb_lossy.webp", + "perf_ycbcr.jpg", + "rle4.bmp", + "rle8.bmp", + "stackcheck.avif", + "transparent-ico-with-and-mask.ico", + "transparent-if-within-ico.bmp", + "transparent-no-alpha-header.webp", + "transparent.gif", + "transparent.png", + "transparent.webp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/dom/base', - '/gfx/2d', - '/image', + "/dom/base", + "/gfx/2d", + "/image", ] -LOCAL_INCLUDES += CONFIG['SKIA_INCLUDES'] +LOCAL_INCLUDES += CONFIG["SKIA_INCLUDES"] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/intl/build/moz.build b/intl/build/moz.build index 10e71f0aee5353..8c5dcfc2297baf 100644 --- a/intl/build/moz.build +++ b/intl/build/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../locale', - '../lwbrk', - '../strres', - '../uconv', - '../unicharutil', + "../locale", + "../lwbrk", + "../strres", + "../uconv", + "../unicharutil", ] diff --git a/intl/gtest/moz.build b/intl/gtest/moz.build index 187e03af58a844..80d897ad035717 100644 --- a/intl/gtest/moz.build +++ b/intl/gtest/moz.build @@ -7,9 +7,7 @@ UNIFIED_SOURCES += [] # skip the test on windows10-aarch64 due to perma-crash - bug 1544961 -if not(CONFIG['OS_TARGET'] == 'WINNT' and CONFIG['CPU_ARCH'] == 'aarch64'): - UNIFIED_SOURCES += [ - 'TestEncoding.cpp' - ] +if not (CONFIG["OS_TARGET"] == "WINNT" and CONFIG["CPU_ARCH"] == "aarch64"): + UNIFIED_SOURCES += ["TestEncoding.cpp"] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/intl/hyphenation/glue/moz.build b/intl/hyphenation/glue/moz.build index 89a44e7ef54fa1..bb299a542e959b 100644 --- a/intl/hyphenation/glue/moz.build +++ b/intl/hyphenation/glue/moz.build @@ -5,21 +5,21 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'nsHyphenationManager.h', - 'nsHyphenator.h', + "nsHyphenationManager.h", + "nsHyphenator.h", ] UNIFIED_SOURCES += [ - 'nsHyphenationManager.cpp', - 'nsHyphenator.cpp', + "nsHyphenationManager.cpp", + "nsHyphenator.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -if CONFIG['COMPILE_ENVIRONMENT']: - CbindgenHeader('mapped_hyph.h', inputs=['/third_party/rust/mapped_hyph']) +if CONFIG["COMPILE_ENVIRONMENT"]: + CbindgenHeader("mapped_hyph.h", inputs=["/third_party/rust/mapped_hyph"]) diff --git a/intl/icu_sources_data.py b/intl/icu_sources_data.py index 408267f2b71f91..181c889fa56336 100644 --- a/intl/icu_sources_data.py +++ b/intl/icu_sources_data.py @@ -28,106 +28,103 @@ # if you need them. However, files marked with a "Cluster" comment # can only be removed together, as they have (directional) dependencies. # If you want to rerun this analysis, contact :decoder. -UNUSED_SOURCES = set([ - 'intl/icu/source/common/bytestrieiterator.cpp', - 'intl/icu/source/common/cstr.cpp', - 'intl/icu/source/common/cwchar.cpp', - 'intl/icu/source/common/icudataver.cpp', - 'intl/icu/source/common/icuplug.cpp', - 'intl/icu/source/common/pluralmap.cpp', - 'intl/icu/source/common/ucat.cpp', - 'intl/icu/source/common/ucnv2022.cpp', - 'intl/icu/source/common/ucnv_ct.cpp', - 'intl/icu/source/common/ucnvdisp.cpp', - 'intl/icu/source/common/ucnv_ext.cpp', - 'intl/icu/source/common/ucnvhz.cpp', - 'intl/icu/source/common/ucnvisci.cpp', - 'intl/icu/source/common/ucnv_lmb.cpp', - 'intl/icu/source/common/ucnvmbcs.cpp', - 'intl/icu/source/common/uidna.cpp', - 'intl/icu/source/common/unorm.cpp', - 'intl/icu/source/common/usc_impl.cpp', - 'intl/icu/source/common/ustr_wcs.cpp', - 'intl/icu/source/common/util_props.cpp', - 'intl/icu/source/i18n/anytrans.cpp', - 'intl/icu/source/i18n/brktrans.cpp', - 'intl/icu/source/i18n/casetrn.cpp', - 'intl/icu/source/i18n/cpdtrans.cpp', - 'intl/icu/source/i18n/esctrn.cpp', - 'intl/icu/source/i18n/fmtable_cnv.cpp', - 'intl/icu/source/i18n/funcrepl.cpp', - 'intl/icu/source/i18n/gender.cpp', - 'intl/icu/source/i18n/name2uni.cpp', - 'intl/icu/source/i18n/nortrans.cpp', - 'intl/icu/source/i18n/nultrans.cpp', - 'intl/icu/source/i18n/quant.cpp', - 'intl/icu/source/i18n/rbt.cpp', - 'intl/icu/source/i18n/rbt_data.cpp', - 'intl/icu/source/i18n/rbt_pars.cpp', - 'intl/icu/source/i18n/rbt_rule.cpp', - 'intl/icu/source/i18n/rbt_set.cpp', - 'intl/icu/source/i18n/regexcmp.cpp', - 'intl/icu/source/i18n/regeximp.cpp', - 'intl/icu/source/i18n/regexst.cpp', - 'intl/icu/source/i18n/regextxt.cpp', - 'intl/icu/source/i18n/rematch.cpp', - 'intl/icu/source/i18n/remtrans.cpp', - 'intl/icu/source/i18n/repattrn.cpp', - 'intl/icu/source/i18n/scientificnumberformatter.cpp', - 'intl/icu/source/i18n/strmatch.cpp', - 'intl/icu/source/i18n/strrepl.cpp', - 'intl/icu/source/i18n/titletrn.cpp', - 'intl/icu/source/i18n/tolowtrn.cpp', - 'intl/icu/source/i18n/toupptrn.cpp', - 'intl/icu/source/i18n/translit.cpp', - 'intl/icu/source/i18n/transreg.cpp', - 'intl/icu/source/i18n/tridpars.cpp', - 'intl/icu/source/i18n/unesctrn.cpp', - 'intl/icu/source/i18n/uni2name.cpp', - 'intl/icu/source/i18n/uregexc.cpp', - 'intl/icu/source/i18n/uregex.cpp', - 'intl/icu/source/i18n/uregion.cpp', - 'intl/icu/source/i18n/uspoof_build.cpp', - 'intl/icu/source/i18n/uspoof_conf.cpp', - 'intl/icu/source/i18n/utrans.cpp', - 'intl/icu/source/i18n/vzone.cpp', - 'intl/icu/source/i18n/zrule.cpp', - 'intl/icu/source/i18n/ztrans.cpp', - - # Cluster - 'intl/icu/source/common/resbund_cnv.cpp', - 'intl/icu/source/common/ures_cnv.cpp', - - # Cluster - 'intl/icu/source/common/propsvec.cpp', - 'intl/icu/source/common/ucnvsel.cpp', - 'intl/icu/source/common/ucnv_set.cpp', - - # Cluster - 'intl/icu/source/common/ubiditransform.cpp', - 'intl/icu/source/common/ushape.cpp', - - # Cluster - 'intl/icu/source/i18n/csdetect.cpp', - 'intl/icu/source/i18n/csmatch.cpp', - 'intl/icu/source/i18n/csr2022.cpp', - 'intl/icu/source/i18n/csrecog.cpp', - 'intl/icu/source/i18n/csrmbcs.cpp', - 'intl/icu/source/i18n/csrsbcs.cpp', - 'intl/icu/source/i18n/csrucode.cpp', - 'intl/icu/source/i18n/csrutf8.cpp', - 'intl/icu/source/i18n/inputext.cpp', - 'intl/icu/source/i18n/ucsdet.cpp', - - # Cluster - 'intl/icu/source/i18n/alphaindex.cpp', - 'intl/icu/source/i18n/ulocdata.cpp', -]) +UNUSED_SOURCES = set( + [ + "intl/icu/source/common/bytestrieiterator.cpp", + "intl/icu/source/common/cstr.cpp", + "intl/icu/source/common/cwchar.cpp", + "intl/icu/source/common/icudataver.cpp", + "intl/icu/source/common/icuplug.cpp", + "intl/icu/source/common/pluralmap.cpp", + "intl/icu/source/common/ucat.cpp", + "intl/icu/source/common/ucnv2022.cpp", + "intl/icu/source/common/ucnv_ct.cpp", + "intl/icu/source/common/ucnvdisp.cpp", + "intl/icu/source/common/ucnv_ext.cpp", + "intl/icu/source/common/ucnvhz.cpp", + "intl/icu/source/common/ucnvisci.cpp", + "intl/icu/source/common/ucnv_lmb.cpp", + "intl/icu/source/common/ucnvmbcs.cpp", + "intl/icu/source/common/uidna.cpp", + "intl/icu/source/common/unorm.cpp", + "intl/icu/source/common/usc_impl.cpp", + "intl/icu/source/common/ustr_wcs.cpp", + "intl/icu/source/common/util_props.cpp", + "intl/icu/source/i18n/anytrans.cpp", + "intl/icu/source/i18n/brktrans.cpp", + "intl/icu/source/i18n/casetrn.cpp", + "intl/icu/source/i18n/cpdtrans.cpp", + "intl/icu/source/i18n/esctrn.cpp", + "intl/icu/source/i18n/fmtable_cnv.cpp", + "intl/icu/source/i18n/funcrepl.cpp", + "intl/icu/source/i18n/gender.cpp", + "intl/icu/source/i18n/name2uni.cpp", + "intl/icu/source/i18n/nortrans.cpp", + "intl/icu/source/i18n/nultrans.cpp", + "intl/icu/source/i18n/quant.cpp", + "intl/icu/source/i18n/rbt.cpp", + "intl/icu/source/i18n/rbt_data.cpp", + "intl/icu/source/i18n/rbt_pars.cpp", + "intl/icu/source/i18n/rbt_rule.cpp", + "intl/icu/source/i18n/rbt_set.cpp", + "intl/icu/source/i18n/regexcmp.cpp", + "intl/icu/source/i18n/regeximp.cpp", + "intl/icu/source/i18n/regexst.cpp", + "intl/icu/source/i18n/regextxt.cpp", + "intl/icu/source/i18n/rematch.cpp", + "intl/icu/source/i18n/remtrans.cpp", + "intl/icu/source/i18n/repattrn.cpp", + "intl/icu/source/i18n/scientificnumberformatter.cpp", + "intl/icu/source/i18n/strmatch.cpp", + "intl/icu/source/i18n/strrepl.cpp", + "intl/icu/source/i18n/titletrn.cpp", + "intl/icu/source/i18n/tolowtrn.cpp", + "intl/icu/source/i18n/toupptrn.cpp", + "intl/icu/source/i18n/translit.cpp", + "intl/icu/source/i18n/transreg.cpp", + "intl/icu/source/i18n/tridpars.cpp", + "intl/icu/source/i18n/unesctrn.cpp", + "intl/icu/source/i18n/uni2name.cpp", + "intl/icu/source/i18n/uregexc.cpp", + "intl/icu/source/i18n/uregex.cpp", + "intl/icu/source/i18n/uregion.cpp", + "intl/icu/source/i18n/uspoof_build.cpp", + "intl/icu/source/i18n/uspoof_conf.cpp", + "intl/icu/source/i18n/utrans.cpp", + "intl/icu/source/i18n/vzone.cpp", + "intl/icu/source/i18n/zrule.cpp", + "intl/icu/source/i18n/ztrans.cpp", + # Cluster + "intl/icu/source/common/resbund_cnv.cpp", + "intl/icu/source/common/ures_cnv.cpp", + # Cluster + "intl/icu/source/common/propsvec.cpp", + "intl/icu/source/common/ucnvsel.cpp", + "intl/icu/source/common/ucnv_set.cpp", + # Cluster + "intl/icu/source/common/ubiditransform.cpp", + "intl/icu/source/common/ushape.cpp", + # Cluster + "intl/icu/source/i18n/csdetect.cpp", + "intl/icu/source/i18n/csmatch.cpp", + "intl/icu/source/i18n/csr2022.cpp", + "intl/icu/source/i18n/csrecog.cpp", + "intl/icu/source/i18n/csrmbcs.cpp", + "intl/icu/source/i18n/csrsbcs.cpp", + "intl/icu/source/i18n/csrucode.cpp", + "intl/icu/source/i18n/csrutf8.cpp", + "intl/icu/source/i18n/inputext.cpp", + "intl/icu/source/i18n/ucsdet.cpp", + # Cluster + "intl/icu/source/i18n/alphaindex.cpp", + "intl/icu/source/i18n/ulocdata.cpp", + ] +) def find_source_file(dir, filename): base = os.path.splitext(filename)[0] - for ext in ('.cpp', '.c'): + for ext in (".cpp", ".c"): f = mozpath.join(dir, base + ext) if os.path.isfile(f): return f @@ -137,13 +134,17 @@ def find_source_file(dir, filename): def get_sources_from_makefile(makefile): srcdir = os.path.dirname(makefile) with open(makefile) as f: - contents = f.read().replace('\\\n', '').split('\n') + contents = f.read().replace("\\\n", "").split("\n") for line in contents: - if line.startswith('OBJECTS ='): - return sorted((find_source_file(srcdir, s) - for s in line[len('OBJECTS ='):].strip().split()), - key=lambda x: x.lower()) - raise AssertionError('OBJECTS definition not found in file %s' % makefile) + if line.startswith("OBJECTS ="): + return sorted( + ( + find_source_file(srcdir, s) + for s in line[len("OBJECTS =") :].strip().split() + ), + key=lambda x: x.lower(), + ) + raise AssertionError("OBJECTS definition not found in file %s" % makefile) def list_headers(path): @@ -156,34 +157,39 @@ def list_headers(path): def write_sources(mozbuild, sources, headers): - with open(mozbuild, 'w', newline='\n', encoding='utf-8') as f: - f.write('# THIS FILE IS GENERATED BY /intl/icu_sources_data.py ' + - 'DO NOT EDIT\n') + with open(mozbuild, "w", newline="\n", encoding="utf-8") as f: + f.write( + "# THIS FILE IS GENERATED BY /intl/icu_sources_data.py " + "DO NOT EDIT\n" + ) def write_list(name, content): if content: - f.write('%s %s [\n' % (name, '=' if name.islower() else '+=')) - f.write(''.join(" '/%s',\n" % s for s in content)) - f.write(']\n') + f.write("%s %s [\n" % (name, "=" if name.islower() else "+=")) + f.write("".join(" '/%s',\n" % s for s in content)) + f.write("]\n") - write_list('sources', [s for s in sources if s not in UNUSED_SOURCES]) - write_list('other_sources', [s for s in sources if s in UNUSED_SOURCES]) - write_list('EXPORTS.unicode', headers) + write_list("sources", [s for s in sources if s not in UNUSED_SOURCES]) + write_list("other_sources", [s for s in sources if s in UNUSED_SOURCES]) + write_list("EXPORTS.unicode", headers) def update_sources(topsrcdir): - print('Updating ICU sources lists...') - for d in ['common', 'i18n', 'tools/toolutil', 'tools/icupkg']: - base_path = mozpath.join(topsrcdir, 'intl/icu/source/%s' % d) - makefile = mozpath.join(base_path, 'Makefile.in') - mozbuild = mozpath.join(topsrcdir, - 'config/external/icu/%s/sources.mozbuild' % mozpath.basename(d)) - sources = [mozpath.relpath(s, topsrcdir) - for s in get_sources_from_makefile(makefile)] - unicode_dir = mozpath.join(base_path, 'unicode') + print("Updating ICU sources lists...") + for d in ["common", "i18n", "tools/toolutil", "tools/icupkg"]: + base_path = mozpath.join(topsrcdir, "intl/icu/source/%s" % d) + makefile = mozpath.join(base_path, "Makefile.in") + mozbuild = mozpath.join( + topsrcdir, "config/external/icu/%s/sources.mozbuild" % mozpath.basename(d) + ) + sources = [ + mozpath.relpath(s, topsrcdir) for s in get_sources_from_makefile(makefile) + ] + unicode_dir = mozpath.join(base_path, "unicode") if os.path.exists(unicode_dir): - headers = [mozpath.normsep(os.path.relpath(s, topsrcdir)) - for s in list_headers(unicode_dir)] + headers = [ + mozpath.normsep(os.path.relpath(s, topsrcdir)) + for s in list_headers(unicode_dir) + ] else: headers = None write_sources(mozbuild, sources, headers) @@ -192,12 +198,17 @@ def update_sources(topsrcdir): def try_run(name, command, cwd=None, **kwargs): try: with tempfile.NamedTemporaryFile(prefix=name, delete=False) as f: - subprocess.check_call(command, cwd=cwd, stdout=f, - stderr=subprocess.STDOUT, **kwargs) + subprocess.check_call( + command, cwd=cwd, stdout=f, stderr=subprocess.STDOUT, **kwargs + ) except subprocess.CalledProcessError: - print('''Error running "{}" in directory {} - See output in {}'''.format(' '.join(command), cwd, f.name), - file=sys.stderr) + print( + """Error running "{}" in directory {} + See output in {}""".format( + " ".join(command), cwd, f.name + ), + file=sys.stderr, + ) return False else: os.unlink(f.name) @@ -205,67 +216,72 @@ def try_run(name, command, cwd=None, **kwargs): def get_data_file(data_dir): - files = glob.glob(mozpath.join(data_dir, 'icudt*.dat')) + files = glob.glob(mozpath.join(data_dir, "icudt*.dat")) return files[0] if files else None def update_data_file(topsrcdir): - objdir = tempfile.mkdtemp(prefix='icu-obj-') - configure = mozpath.join(topsrcdir, 'intl/icu/source/configure') + objdir = tempfile.mkdtemp(prefix="icu-obj-") + configure = mozpath.join(topsrcdir, "intl/icu/source/configure") env = dict(os.environ) # bug 1262101 - these should be shared with the moz.build files - env.update({ - 'CPPFLAGS': ('-DU_NO_DEFAULT_INCLUDE_UTF_HEADERS=1 ' + - '-DU_HIDE_OBSOLETE_UTF_OLD_H=1' + - '-DUCONFIG_NO_LEGACY_CONVERSION ' + - '-DUCONFIG_NO_TRANSLITERATION ' + - '-DUCONFIG_NO_REGULAR_EXPRESSIONS ' + - '-DUCONFIG_NO_BREAK_ITERATION ' + - '-DU_CHARSET_IS_UTF8') - }) + env.update( + { + "CPPFLAGS": ( + "-DU_NO_DEFAULT_INCLUDE_UTF_HEADERS=1 " + + "-DU_HIDE_OBSOLETE_UTF_OLD_H=1" + + "-DUCONFIG_NO_LEGACY_CONVERSION " + + "-DUCONFIG_NO_TRANSLITERATION " + + "-DUCONFIG_NO_REGULAR_EXPRESSIONS " + + "-DUCONFIG_NO_BREAK_ITERATION " + + "-DU_CHARSET_IS_UTF8" + ) + } + ) # Exclude data that we currently don't need. # # The file format for ICU's data build tool is described at # . - env["ICU_DATA_FILTER_FILE"] = mozpath.join(topsrcdir, 'intl/icu/data_filter.json') + env["ICU_DATA_FILTER_FILE"] = mozpath.join(topsrcdir, "intl/icu/data_filter.json") - print('Running ICU configure...') + print("Running ICU configure...") if not try_run( - 'icu-configure', - ['sh', configure, - '--with-data-packaging=archive', - '--enable-static', - '--disable-shared', - '--disable-extras', - '--disable-icuio', - '--disable-layout', - '--disable-layoutex', - '--disable-tests', - '--disable-samples', - '--disable-strict'], - cwd=objdir, - env=env): + "icu-configure", + [ + "sh", + configure, + "--with-data-packaging=archive", + "--enable-static", + "--disable-shared", + "--disable-extras", + "--disable-icuio", + "--disable-layout", + "--disable-layoutex", + "--disable-tests", + "--disable-samples", + "--disable-strict", + ], + cwd=objdir, + env=env, + ): return False - print('Running ICU make...') + print("Running ICU make...") if not try_run( - 'icu-make', - ['make', - '--jobs=%d' % multiprocessing.cpu_count(), - '--output-sync'], - cwd=objdir): + "icu-make", + ["make", "--jobs=%d" % multiprocessing.cpu_count(), "--output-sync"], + cwd=objdir, + ): return False - print('Copying ICU data file...') - tree_data_path = mozpath.join(topsrcdir, - 'config/external/icu/data/') + print("Copying ICU data file...") + tree_data_path = mozpath.join(topsrcdir, "config/external/icu/data/") old_data_file = get_data_file(tree_data_path) if not old_data_file: - print('Error: no ICU data file in %s' % tree_data_path, - file=sys.stderr) + print("Error: no ICU data file in %s" % tree_data_path, file=sys.stderr) return False - new_data_file = get_data_file(mozpath.join(objdir, 'data/out')) + new_data_file = get_data_file(mozpath.join(objdir, "data/out")) if not new_data_file: - print('Error: no ICU data in ICU objdir', file=sys.stderr) + print("Error: no ICU data in ICU objdir", file=sys.stderr) return False if os.path.basename(old_data_file) != os.path.basename(new_data_file): # Data file name has the major version number embedded. @@ -274,22 +290,21 @@ def update_data_file(topsrcdir): try: shutil.rmtree(objdir) except Exception: - print('Warning: failed to remove %s' % objdir, file=sys.stderr) + print("Warning: failed to remove %s" % objdir, file=sys.stderr) return True def main(): if len(sys.argv) != 2: - print('Usage: icu_sources_data.py ', - file=sys.stderr) + print("Usage: icu_sources_data.py ", file=sys.stderr) sys.exit(1) topsrcdir = mozpath.abspath(sys.argv[1]) update_sources(topsrcdir) if not update_data_file(topsrcdir): - print('Error updating ICU data file', file=sys.stderr) + print("Error updating ICU data file", file=sys.stderr) sys.exit(1) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/intl/l10n/moz.build b/intl/l10n/moz.build index da8f1cfc1663e2..71cfcc7173b933 100644 --- a/intl/l10n/moz.build +++ b/intl/l10n/moz.build @@ -5,53 +5,52 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.intl += [ - 'FluentBindings.h', - 'FluentBundle.h', - 'FluentResource.h', - 'Localization.h', + "FluentBindings.h", + "FluentBundle.h", + "FluentResource.h", + "Localization.h", ] UNIFIED_SOURCES += [ - 'FluentBundle.cpp', - 'FluentResource.cpp', - 'L10nRegistry.cpp', - 'Localization.cpp', + "FluentBundle.cpp", + "FluentResource.cpp", + "L10nRegistry.cpp", + "Localization.cpp", ] EXTRA_JS_MODULES += [ - 'L10nRegistry.jsm', - 'Localization.jsm', + "L10nRegistry.jsm", + "Localization.jsm", ] TESTING_JS_MODULES += [ - 'FluentSyntax.jsm', + "FluentSyntax.jsm", ] TEST_DIRS += [ - 'rust/gtest', + "rust/gtest", ] XPIDL_SOURCES += [ - 'mozILocalization.idl', + "mozILocalization.idl", ] -XPIDL_MODULE = 'locale' +XPIDL_MODULE = "locale" LOCAL_INCLUDES += [ - '/dom/base', + "/dom/base", ] -if CONFIG['COMPILE_ENVIRONMENT']: - CbindgenHeader('fluent_ffi_generated.h', - inputs=['/intl/l10n/rust/fluent-ffi']) +if CONFIG["COMPILE_ENVIRONMENT"]: + CbindgenHeader("fluent_ffi_generated.h", inputs=["/intl/l10n/rust/fluent-ffi"]) EXPORTS.mozilla.intl += [ - '!fluent_ffi_generated.h', + "!fluent_ffi_generated.h", ] -XPCSHELL_TESTS_MANIFESTS += ['test/xpcshell.ini'] -MOCHITEST_CHROME_MANIFESTS += ['test/mochitest/chrome.ini'] +XPCSHELL_TESTS_MANIFESTS += ["test/xpcshell.ini"] +MOCHITEST_CHROME_MANIFESTS += ["test/mochitest/chrome.ini"] -SPHINX_TREES['/l10n'] = 'docs' +SPHINX_TREES["/l10n"] = "docs" -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/intl/l10n/rust/gtest/moz.build b/intl/l10n/rust/gtest/moz.build index a71b43e3f17345..7c73e04fc8d0f6 100644 --- a/intl/l10n/rust/gtest/moz.build +++ b/intl/l10n/rust/gtest/moz.build @@ -5,7 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'Test.cpp', + "Test.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/intl/locale/android/moz.build b/intl/locale/android/moz.build index bc0563a0c37cbd..b0cb0cc6a5227b 100644 --- a/intl/locale/android/moz.build +++ b/intl/locale/android/moz.build @@ -4,12 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -UNIFIED_SOURCES += [ - 'OSPreferences_android.cpp' -] +UNIFIED_SOURCES += ["OSPreferences_android.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '..', + "..", ] diff --git a/intl/locale/gtk/moz.build b/intl/locale/gtk/moz.build index df590b3cd939bd..3b977a97200cc9 100644 --- a/intl/locale/gtk/moz.build +++ b/intl/locale/gtk/moz.build @@ -4,12 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += ['OSPreferences_gtk.cpp'] +SOURCES += ["OSPreferences_gtk.cpp"] -CXXFLAGS += CONFIG['GLIB_CFLAGS'] +CXXFLAGS += CONFIG["GLIB_CFLAGS"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '..', + "..", ] diff --git a/intl/locale/mac/moz.build b/intl/locale/mac/moz.build index 6f933e2a5b6c06..68eb6d8d352089 100644 --- a/intl/locale/mac/moz.build +++ b/intl/locale/mac/moz.build @@ -4,11 +4,9 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -UNIFIED_SOURCES += [ - 'OSPreferences_mac.cpp' -] +UNIFIED_SOURCES += ["OSPreferences_mac.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '..', + "..", ] diff --git a/intl/locale/moz.build b/intl/locale/moz.build index 48c10499e8cf50..28662541952f87 100644 --- a/intl/locale/moz.build +++ b/intl/locale/moz.build @@ -4,91 +4,92 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] -toolkit = CONFIG['MOZ_WIDGET_TOOLKIT'] +toolkit = CONFIG["MOZ_WIDGET_TOOLKIT"] -if toolkit == 'windows': - DIRS += ['windows'] -elif toolkit == 'cocoa': - DIRS += ['mac'] -elif toolkit == 'gtk': - DIRS += ['gtk'] -elif toolkit == 'android': - DIRS += ['android'] +if toolkit == "windows": + DIRS += ["windows"] +elif toolkit == "cocoa": + DIRS += ["mac"] +elif toolkit == "gtk": + DIRS += ["gtk"] +elif toolkit == "android": + DIRS += ["android"] XPIDL_SOURCES += [ - 'mozILocaleService.idl', - 'mozIOSPreferences.idl', - 'nsICollation.idl', + "mozILocaleService.idl", + "mozIOSPreferences.idl", + "nsICollation.idl", ] -XPIDL_MODULE = 'locale' +XPIDL_MODULE = "locale" EXPORTS += [ - 'DateTimeFormat.h', - 'nsCollationCID.h', - 'nsLanguageAtomService.h', - 'nsUConvPropertySearch.h', + "DateTimeFormat.h", + "nsCollationCID.h", + "nsLanguageAtomService.h", + "nsUConvPropertySearch.h", ] EXPORTS.mozilla.intl += [ - 'LocaleService.h', - 'MozLocale.h', - 'MozLocaleBindings.h', - 'OSPreferences.h', - 'Quotes.h', + "LocaleService.h", + "MozLocale.h", + "MozLocaleBindings.h", + "OSPreferences.h", + "Quotes.h", ] UNIFIED_SOURCES += [ - 'DateTimeFormat.cpp', - 'LocaleService.cpp', - 'MozLocale.cpp', - 'nsCollation.cpp', - 'nsCollationFactory.cpp', - 'nsLanguageAtomService.cpp', - 'nsUConvPropertySearch.cpp', - 'OSPreferences.cpp', - 'Quotes.cpp', + "DateTimeFormat.cpp", + "LocaleService.cpp", + "MozLocale.cpp", + "nsCollation.cpp", + "nsCollationFactory.cpp", + "nsLanguageAtomService.cpp", + "nsUConvPropertySearch.cpp", + "OSPreferences.cpp", + "Quotes.cpp", ] EXTRA_JS_MODULES += [ - 'PluralForm.jsm', + "PluralForm.jsm", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '/intl/uconv', + "/intl/uconv", ] RESOURCE_FILES += [ - 'language.properties', + "language.properties", ] -prefixes = ( - 'encodingsgroups', -) +prefixes = ("encodingsgroups",) for prefix in prefixes: - input_file = prefix + '.properties' - header = prefix + '.properties.h' - GeneratedFile(header, script='props2arrays.py', inputs=[input_file]) - -if CONFIG['ENABLE_TESTS']: - DIRS += ['tests/gtest'] - -if CONFIG['COMPILE_ENVIRONMENT']: - CbindgenHeader('fluent_langneg_ffi_generated.h', - inputs=['/intl/locale/rust/fluent-langneg-ffi']) - CbindgenHeader('unic_langid_ffi_generated.h', - inputs=['/intl/locale/rust/unic-langid-ffi']) + input_file = prefix + ".properties" + header = prefix + ".properties.h" + GeneratedFile(header, script="props2arrays.py", inputs=[input_file]) + +if CONFIG["ENABLE_TESTS"]: + DIRS += ["tests/gtest"] + +if CONFIG["COMPILE_ENVIRONMENT"]: + CbindgenHeader( + "fluent_langneg_ffi_generated.h", + inputs=["/intl/locale/rust/fluent-langneg-ffi"], + ) + CbindgenHeader( + "unic_langid_ffi_generated.h", inputs=["/intl/locale/rust/unic-langid-ffi"] + ) EXPORTS.mozilla.intl += [ - '!fluent_langneg_ffi_generated.h', - '!unic_langid_ffi_generated.h', + "!fluent_langneg_ffi_generated.h", + "!unic_langid_ffi_generated.h", ] diff --git a/intl/locale/props2arrays.py b/intl/locale/props2arrays.py index 16891e763ca351..0572cb1c5f26e1 100644 --- a/intl/locale/props2arrays.py +++ b/intl/locale/props2arrays.py @@ -6,10 +6,10 @@ def main(header, propFile): mappings = {} - with open(propFile, 'r') as f: + with open(propFile, "r") as f: for line in f: line = line.strip() - if not line.startswith('#'): + if not line.startswith("#"): parts = line.split("=", 1) if len(parts) == 2 and len(parts[0]) > 0: mappings[parts[0].strip()] = parts[1].strip() @@ -19,6 +19,8 @@ def main(header, propFile): header.write("// This is a generated file. Please do not edit.\n") header.write("// Please edit the corresponding .properties file instead.\n") - entries = ['{ "%s", "%s", %d }' - % (key, mappings[key], len(mappings[key])) for key in sorted(keys)] - header.write(',\n'.join(entries) + '\n') + entries = [ + '{ "%s", "%s", %d }' % (key, mappings[key], len(mappings[key])) + for key in sorted(keys) + ] + header.write(",\n".join(entries) + "\n") diff --git a/intl/locale/tests/gtest/moz.build b/intl/locale/tests/gtest/moz.build index bda68d000c1430..e23c828f261eaf 100644 --- a/intl/locale/tests/gtest/moz.build +++ b/intl/locale/tests/gtest/moz.build @@ -5,12 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestCollation.cpp', - 'TestDateTimeFormat.cpp', - 'TestLocaleService.cpp', - 'TestLocaleServiceNegotiate.cpp', - 'TestMozLocale.cpp', - 'TestOSPreferences.cpp', + "TestCollation.cpp", + "TestDateTimeFormat.cpp", + "TestLocaleService.cpp", + "TestLocaleServiceNegotiate.cpp", + "TestMozLocale.cpp", + "TestOSPreferences.cpp", ] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/intl/locale/windows/moz.build b/intl/locale/windows/moz.build index 55bf36050e6901..9ac234b35491b4 100644 --- a/intl/locale/windows/moz.build +++ b/intl/locale/windows/moz.build @@ -4,13 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -SOURCES += [ - 'OSPreferences_win.cpp' -] +SOURCES += ["OSPreferences_win.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '..', + "..", ] - diff --git a/intl/locales/moz.build b/intl/locales/moz.build index eca84c67630661..655449d9c511eb 100644 --- a/intl/locales/moz.build +++ b/intl/locales/moz.build @@ -7,50 +7,50 @@ # For Android, we ship hyphenation files in uncompiled format to minimize # the package size; for other products, ship precompiled files to eliminate # the cost of compilation on first use. -if CONFIG['MOZ_BUILD_APP'] == 'mobile/android': +if CONFIG["MOZ_BUILD_APP"] == "mobile/android": hyphenation_ext = ".dic" else: hyphenation_ext = ".hyf" locales = [ - 'af', - 'bg', - 'ca', - 'cy', - 'da', - 'de-1901', - 'de-1996', - 'de-CH', + "af", + "bg", + "ca", + "cy", + "da", + "de-1901", + "de-1996", + "de-CH", # 'en-US', # en-US is renamed -- see below. - 'eo', - 'es', - 'et', - 'fi', - 'fr', - 'gl', - 'hr', - 'hsb', - 'hu', - 'ia', - 'is', - 'it', - 'kmr', - 'la', - 'lt', - 'mn', - 'nb', - 'nl', - 'nn', - 'pl', - 'pt', - 'ru', - 'sh', - 'sl', - 'sv', - 'tr', - 'uk', + "eo", + "es", + "et", + "fi", + "fr", + "gl", + "hr", + "hsb", + "hu", + "ia", + "is", + "it", + "kmr", + "la", + "lt", + "mn", + "nb", + "nl", + "nn", + "pl", + "pt", + "ru", + "sh", + "sl", + "sv", + "tr", + "uk", ] -filename = '{locale}/hyphenation/hyph_{locale}' + hyphenation_ext +filename = "{locale}/hyphenation/hyph_{locale}" + hyphenation_ext FINAL_TARGET_FILES.hyphenation += [filename.format(locale=locale) for locale in locales] # en-US is a special case: the dic file is named like en_US. -FINAL_TARGET_FILES.hyphenation += ['en-US/hyphenation/hyph_en_US' + hyphenation_ext] +FINAL_TARGET_FILES.hyphenation += ["en-US/hyphenation/hyph_en_US" + hyphenation_ext] diff --git a/intl/lwbrk/gtest/moz.build b/intl/lwbrk/gtest/moz.build index 64a3919cb3e9a6..c9fbab8e7646f6 100644 --- a/intl/lwbrk/gtest/moz.build +++ b/intl/lwbrk/gtest/moz.build @@ -5,8 +5,7 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'TestLineBreak.cpp', + "TestLineBreak.cpp", ] -FINAL_LIBRARY = 'xul-gtest' - +FINAL_LIBRARY = "xul-gtest" diff --git a/intl/lwbrk/moz.build b/intl/lwbrk/moz.build index bd264fac607e99..b47a49e2796c6c 100644 --- a/intl/lwbrk/moz.build +++ b/intl/lwbrk/moz.build @@ -4,37 +4,37 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['gtest'] +TEST_DIRS += ["gtest"] EXPORTS.mozilla.intl += [ - 'LineBreaker.h', - 'WordBreaker.h', + "LineBreaker.h", + "WordBreaker.h", ] UNIFIED_SOURCES += [ - 'LineBreaker.cpp', - 'WordBreaker.cpp', + "LineBreaker.cpp", + "WordBreaker.cpp", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": SOURCES += [ - 'nsPangoBreaker.cpp', + "nsPangoBreaker.cpp", ] - CXXFLAGS += CONFIG['MOZ_PANGO_CFLAGS'] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': + CXXFLAGS += CONFIG["MOZ_PANGO_CFLAGS"] +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "windows": SOURCES += [ - 'nsUniscribeBreaker.cpp', + "nsUniscribeBreaker.cpp", ] -elif CONFIG['MOZ_WIDGET_TOOLKIT'] == 'cocoa': +elif CONFIG["MOZ_WIDGET_TOOLKIT"] == "cocoa": UNIFIED_SOURCES += [ - 'nsCarbonBreaker.cpp', + "nsCarbonBreaker.cpp", ] else: SOURCES += [ - 'nsRuleBreaker.cpp', + "nsRuleBreaker.cpp", ] SOURCES += [ - 'rulebrk.c', + "rulebrk.c", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/intl/moz.build b/intl/moz.build index 3ba9809ea88853..9934d4313c2e96 100644 --- a/intl/moz.build +++ b/intl/moz.build @@ -5,35 +5,35 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. TEST_DIRS += [ - 'gtest', + "gtest", ] DIRS += [ - 'hyphenation/glue', - 'locale', - 'locales', - 'lwbrk', - 'strres', - 'unicharutil', - 'l10n', + "hyphenation/glue", + "locale", + "locales", + "lwbrk", + "strres", + "unicharutil", + "l10n", ] DIRS += [ - 'uconv', - 'build', + "uconv", + "build", ] EXPORTS.mozilla += [ - 'Encoding.h', - 'EncodingDetector.h', - 'JapaneseDetector.h', + "Encoding.h", + "EncodingDetector.h", + "JapaneseDetector.h", ] EXPORTS += [ - '../third_party/rust/chardetng_c/include/chardetng.h', - '../third_party/rust/encoding_c/include/encoding_rs.h', - '../third_party/rust/encoding_c/include/encoding_rs_statics.h', - '../third_party/rust/shift_or_euc_c/include/shift_or_euc.h', + "../third_party/rust/chardetng_c/include/chardetng.h", + "../third_party/rust/encoding_c/include/encoding_rs.h", + "../third_party/rust/encoding_c/include/encoding_rs_statics.h", + "../third_party/rust/shift_or_euc_c/include/shift_or_euc.h", ] with Files("**"): @@ -54,4 +54,4 @@ with Files("update*"): with Files("icu_sources_data.py"): BUG_COMPONENT = ("Firefox Build System", "General") -SPHINX_TREES['/intl'] = 'docs' +SPHINX_TREES["/intl"] = "docs" diff --git a/intl/strres/moz.build b/intl/strres/moz.build index 6089288da172a8..7436fc9ec3684b 100644 --- a/intl/strres/moz.build +++ b/intl/strres/moz.build @@ -4,25 +4,25 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['tests/unit/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/unit/xpcshell.ini"] XPIDL_SOURCES += [ - 'nsIStringBundle.idl', + "nsIStringBundle.idl", ] -XPIDL_MODULE = 'intl' +XPIDL_MODULE = "intl" UNIFIED_SOURCES += [ - 'nsStringBundle.cpp', + "nsStringBundle.cpp", ] LOCAL_INCLUDES += [ - '/xpcom/ds', + "/xpcom/ds", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/intl/uconv/moz.build b/intl/uconv/moz.build index 3784ac29ba2c91..acad06dfa17851 100644 --- a/intl/uconv/moz.build +++ b/intl/uconv/moz.build @@ -4,31 +4,31 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -TEST_DIRS += ['tests'] +TEST_DIRS += ["tests"] XPIDL_SOURCES += [ - 'nsIScriptableUConv.idl', - 'nsITextToSubURI.idl', + "nsIScriptableUConv.idl", + "nsITextToSubURI.idl", ] -XPIDL_MODULE = 'uconv' +XPIDL_MODULE = "uconv" EXPORTS += [ - 'nsConverterInputStream.h', + "nsConverterInputStream.h", ] UNIFIED_SOURCES += [ - 'nsConverterInputStream.cpp', - 'nsConverterOutputStream.cpp', - 'nsScriptableUConv.cpp', - 'nsTextToSubURI.cpp', + "nsConverterInputStream.cpp", + "nsConverterOutputStream.cpp", + "nsScriptableUConv.cpp", + "nsTextToSubURI.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/intl/uconv/tests/moz.build b/intl/uconv/tests/moz.build index 1e622c73bbfd3e..4400a9849b47cd 100644 --- a/intl/uconv/tests/moz.build +++ b/intl/uconv/tests/moz.build @@ -4,7 +4,6 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -XPCSHELL_TESTS_MANIFESTS += ['unit/xpcshell.ini'] - -MOCHITEST_MANIFESTS += ['mochitest.ini'] +XPCSHELL_TESTS_MANIFESTS += ["unit/xpcshell.ini"] +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/intl/unicharutil/moz.build b/intl/unicharutil/moz.build index 4dcdb1360584f8..ec7b335f9d3185 100644 --- a/intl/unicharutil/moz.build +++ b/intl/unicharutil/moz.build @@ -4,10 +4,10 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['util'] +DIRS += ["util"] EXPORTS += [ - 'nsUGenCategory.h', + "nsUGenCategory.h", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/intl/unicharutil/util/moz.build b/intl/unicharutil/util/moz.build index b12b3bb4f1fb7c..993daf7ded90cb 100644 --- a/intl/unicharutil/util/moz.build +++ b/intl/unicharutil/util/moz.build @@ -5,24 +5,24 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'GreekCasing.h', - 'ICUUtils.h', - 'IrishCasing.h', - 'nsBidiUtils.h', - 'nsSpecialCasingData.h', - 'nsUnicharUtils.h', - 'nsUnicodeProperties.h', - 'nsUnicodeScriptCodes.h', + "GreekCasing.h", + "ICUUtils.h", + "IrishCasing.h", + "nsBidiUtils.h", + "nsSpecialCasingData.h", + "nsUnicharUtils.h", + "nsUnicodeProperties.h", + "nsUnicodeScriptCodes.h", ] UNIFIED_SOURCES += [ - 'GreekCasing.cpp', - 'ICUUtils.cpp', - 'IrishCasing.cpp', - 'nsBidiUtils.cpp', - 'nsSpecialCasingData.cpp', - 'nsUnicharUtils.cpp', - 'nsUnicodeProperties.cpp', + "GreekCasing.cpp", + "ICUUtils.cpp", + "IrishCasing.cpp", + "nsBidiUtils.cpp", + "nsSpecialCasingData.cpp", + "nsUnicharUtils.cpp", + "nsUnicodeProperties.cpp", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/ipc/app/moz.build b/ipc/app/moz.build index 2dbc8084ca6186..4b4e9187db5018 100644 --- a/ipc/app/moz.build +++ b/ipc/app/moz.build @@ -4,73 +4,73 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': - Program(CONFIG['MOZ_CHILD_PROCESS_NAME']) +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": + Program(CONFIG["MOZ_CHILD_PROCESS_NAME"]) SOURCES += [ - 'MozillaRuntimeMainAndroid.cpp', + "MozillaRuntimeMainAndroid.cpp", ] else: - GeckoProgram(CONFIG['MOZ_CHILD_PROCESS_NAME'], linkage='dependent') + GeckoProgram(CONFIG["MOZ_CHILD_PROCESS_NAME"], linkage="dependent") SOURCES += [ - 'MozillaRuntimeMain.cpp', + "MozillaRuntimeMain.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/toolkit/xre', - '/xpcom/base', + "/toolkit/xre", + "/xpcom/base", ] # DELAYLOAD_DLLS in this block ensures that the DLL blocklist is functional -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": DELAYLOAD_DLLS += [ - 'nss3.dll', + "nss3.dll", ] - if CONFIG['MOZ_SANDBOX']: + if CONFIG["MOZ_SANDBOX"]: # For sandbox includes and the include dependencies those have LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", + ] + + OS_LIBS += [ + "version", ] - - OS_LIBS += [ - 'version', - ] USE_LIBS += [ - 'sandbox_s', + "sandbox_s", ] DELAYLOAD_DLLS += [ - 'winmm.dll', - 'user32.dll', + "winmm.dll", + "user32.dll", ] - - OS_LIBS += [ - 'ntdll', - ] + + OS_LIBS += [ + "ntdll", + ] DELAYLOAD_DLLS += [ - 'xul.dll', + "xul.dll", ] -if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_TARGET'] == 'Darwin': +if CONFIG["MOZ_SANDBOX"] and CONFIG["OS_TARGET"] == "Darwin": # For sandbox includes and the include dependencies those have LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", ] USE_LIBS += [ - 'mozsandbox', + "mozsandbox", ] -if CONFIG['CC_TYPE'] == 'clang-cl': +if CONFIG["CC_TYPE"] == "clang-cl": # Always enter a Windows program through wmain, whether or not we're # a console application. - WIN32_EXE_LDFLAGS += ['-ENTRY:wmainCRTStartup'] + WIN32_EXE_LDFLAGS += ["-ENTRY:wmainCRTStartup"] # Control the default heap size. # This is the heap returned by GetProcessHeap(). @@ -80,11 +80,11 @@ if CONFIG['CC_TYPE'] == 'clang-cl': # The heap will grow if need be. # # Set it to 256k. See bug 127069. -if CONFIG['OS_ARCH'] == 'WINNT' and CONFIG['CC_TYPE'] not in ('clang', 'gcc'): - LDFLAGS += ['/HEAP:0x40000'] +if CONFIG["OS_ARCH"] == "WINNT" and CONFIG["CC_TYPE"] not in ("clang", "gcc"): + LDFLAGS += ["/HEAP:0x40000"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wshadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wshadow"] -with Files("**"): - BUG_COMPONENT = ("Core", "DOM: Content Processes") +with Files("**"): + BUG_COMPONENT = ("Core", "DOM: Content Processes") diff --git a/ipc/chromium/moz.build b/ipc/chromium/moz.build index 6de7a1f7a77d05..62acea0547bce0 100644 --- a/ipc/chromium/moz.build +++ b/ipc/chromium/moz.build @@ -4,123 +4,123 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -libevent_path_prefix = 'src/third_party' -include(libevent_path_prefix + '/libeventcommon.mozbuild') +libevent_path_prefix = "src/third_party" +include(libevent_path_prefix + "/libeventcommon.mozbuild") UNIFIED_SOURCES += [ - 'src/base/at_exit.cc', - 'src/base/command_line.cc', - 'src/base/file_path.cc', - 'src/base/histogram.cc', - 'src/base/logging.cc', - 'src/base/message_loop.cc', - 'src/base/message_pump_default.cc', - 'src/base/pickle.cc', - 'src/base/rand_util.cc', - 'src/base/revocable_store.cc', - 'src/base/string_piece.cc', - 'src/base/string_util.cc', - 'src/base/thread.cc', - 'src/base/time.cc', - 'src/base/timer.cc', - 'src/chrome/common/child_process.cc', - 'src/chrome/common/child_process_host.cc', - 'src/chrome/common/child_thread.cc', - 'src/chrome/common/chrome_switches.cc', - 'src/chrome/common/ipc_channel_utils.cc', - 'src/chrome/common/ipc_message.cc', + "src/base/at_exit.cc", + "src/base/command_line.cc", + "src/base/file_path.cc", + "src/base/histogram.cc", + "src/base/logging.cc", + "src/base/message_loop.cc", + "src/base/message_pump_default.cc", + "src/base/pickle.cc", + "src/base/rand_util.cc", + "src/base/revocable_store.cc", + "src/base/string_piece.cc", + "src/base/string_util.cc", + "src/base/thread.cc", + "src/base/time.cc", + "src/base/timer.cc", + "src/chrome/common/child_process.cc", + "src/chrome/common/child_process_host.cc", + "src/chrome/common/child_thread.cc", + "src/chrome/common/chrome_switches.cc", + "src/chrome/common/ipc_channel_utils.cc", + "src/chrome/common/ipc_message.cc", ] if os_win: SOURCES += [ - 'src/base/condition_variable_win.cc', - 'src/base/lock_impl_win.cc', - 'src/base/message_pump_win.cc', - 'src/base/object_watcher.cc', - 'src/base/platform_thread_win.cc', - 'src/base/process_util_win.cc', - 'src/base/shared_memory_win.cc', - 'src/base/sys_string_conversions_win.cc', - 'src/base/thread_local_win.cc', - 'src/base/time_win.cc', - 'src/base/waitable_event_win.cc', - 'src/base/win_util.cc', - 'src/chrome/common/ipc_channel_win.cc', - 'src/chrome/common/process_watcher_win.cc', + "src/base/condition_variable_win.cc", + "src/base/lock_impl_win.cc", + "src/base/message_pump_win.cc", + "src/base/object_watcher.cc", + "src/base/platform_thread_win.cc", + "src/base/process_util_win.cc", + "src/base/shared_memory_win.cc", + "src/base/sys_string_conversions_win.cc", + "src/base/thread_local_win.cc", + "src/base/time_win.cc", + "src/base/waitable_event_win.cc", + "src/base/win_util.cc", + "src/chrome/common/ipc_channel_win.cc", + "src/chrome/common/process_watcher_win.cc", ] -elif not CONFIG['MOZ_SYSTEM_LIBEVENT']: - DIRS += ['src/third_party'] +elif not CONFIG["MOZ_SYSTEM_LIBEVENT"]: + DIRS += ["src/third_party"] if os_posix: UNIFIED_SOURCES += [ - 'src/base/condition_variable_posix.cc', - 'src/base/lock_impl_posix.cc', - 'src/base/message_pump_libevent.cc', - 'src/base/platform_thread_posix.cc', - 'src/base/process_util_posix.cc', - 'src/base/shared_memory_posix.cc', - 'src/base/string16.cc', - 'src/base/thread_local_posix.cc', - 'src/base/waitable_event_posix.cc', - 'src/chrome/common/file_descriptor_set_posix.cc', - 'src/chrome/common/ipc_channel_posix.cc', - 'src/chrome/common/process_watcher_posix_sigchld.cc', + "src/base/condition_variable_posix.cc", + "src/base/lock_impl_posix.cc", + "src/base/message_pump_libevent.cc", + "src/base/platform_thread_posix.cc", + "src/base/process_util_posix.cc", + "src/base/shared_memory_posix.cc", + "src/base/string16.cc", + "src/base/thread_local_posix.cc", + "src/base/waitable_event_posix.cc", + "src/chrome/common/file_descriptor_set_posix.cc", + "src/chrome/common/ipc_channel_posix.cc", + "src/chrome/common/process_watcher_posix_sigchld.cc", ] if os_macosx: UNIFIED_SOURCES += [ - 'src/base/chrome_application_mac.mm', - 'src/base/mac_util.mm', - 'src/base/message_pump_mac.mm', - 'src/base/process_util_mac.mm', - 'src/base/scoped_nsautorelease_pool.mm', - 'src/base/sys_string_conversions_mac.mm', - 'src/base/time_mac.cc', - 'src/chrome/common/mach_ipc_mac.mm', - 'src/chrome/common/mach_message_source_mac.cc', + "src/base/chrome_application_mac.mm", + "src/base/mac_util.mm", + "src/base/message_pump_mac.mm", + "src/base/process_util_mac.mm", + "src/base/scoped_nsautorelease_pool.mm", + "src/base/sys_string_conversions_mac.mm", + "src/base/time_mac.cc", + "src/chrome/common/mach_ipc_mac.mm", + "src/chrome/common/mach_message_source_mac.cc", ] SOURCES += [ # This file cannot be built in unified mode because of the redefinition # of NoOp. - 'src/base/platform_thread_mac.mm', + "src/base/platform_thread_mac.mm", ] if os_bsd: SOURCES += [ - 'src/base/process_util_linux.cc', - 'src/base/time_posix.cc', + "src/base/process_util_linux.cc", + "src/base/time_posix.cc", ] if os_linux: SOURCES += [ - 'src/base/process_util_linux.cc', - 'src/base/time_posix.cc', + "src/base/process_util_linux.cc", + "src/base/time_posix.cc", ] - if CONFIG['OS_TARGET'] == 'Android': + if CONFIG["OS_TARGET"] == "Android": UNIFIED_SOURCES += [ - 'src/base/message_pump_android.cc', + "src/base/message_pump_android.cc", ] - DEFINES['ANDROID'] = True - DEFINES['_POSIX_MONOTONIC_CLOCK'] = 0 + DEFINES["ANDROID"] = True + DEFINES["_POSIX_MONOTONIC_CLOCK"] = 0 if os_bsd or os_linux: - if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'gtk': + if CONFIG["MOZ_WIDGET_TOOLKIT"] == "gtk": SOURCES += [ - 'src/base/message_pump_glib.cc', + "src/base/message_pump_glib.cc", ] if os_solaris: SOURCES += [ - 'src/base/process_util_linux.cc', - 'src/base/time_posix.cc', + "src/base/process_util_linux.cc", + "src/base/time_posix.cc", ] -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/ipc/glue/moz.build b/ipc/glue/moz.build index cb71bde1d41444..924743735760e7 100644 --- a/ipc/glue/moz.build +++ b/ipc/glue/moz.build @@ -3,260 +3,256 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('/dom/media/webrtc/third_party_build/webrtc.mozbuild') +include("/dom/media/webrtc/third_party_build/webrtc.mozbuild") EXPORTS += [ - 'nsIIPCSerializableInputStream.h', + "nsIIPCSerializableInputStream.h", ] EXPORTS.mozilla.ipc += [ - 'BackgroundChild.h', - 'BackgroundParent.h', - 'BackgroundUtils.h', - 'BrowserProcessSubThread.h', - 'ByteBuf.h', - 'ByteBufUtils.h', - 'CrashReporterClient.h', - 'CrashReporterHelper.h', - 'CrashReporterHost.h', - 'CrossProcessMutex.h', - 'CrossProcessSemaphore.h', - 'EnvironmentMap.h', - 'FileDescriptor.h', - 'FileDescriptorSetChild.h', - 'FileDescriptorSetParent.h', - 'FileDescriptorUtils.h', - 'GeckoChildProcessHost.h', - 'IdleSchedulerChild.h', - 'IdleSchedulerParent.h', - 'InputStreamUtils.h', - 'IOThreadChild.h', - 'IPCStreamAlloc.h', - 'IPCStreamDestination.h', - 'IPCStreamSource.h', - 'IPCStreamUtils.h', - 'IPDLParamTraits.h', - 'LibrarySandboxPreload.h', - 'MessageChannel.h', - 'MessageLink.h', - 'Neutering.h', - 'ProcessChild.h', - 'ProtocolUtils.h', - 'ScopedXREEmbed.h', - 'SharedMemory.h', - 'SharedMemoryBasic.h', - 'Shmem.h', - 'TaintingIPCUtils.h', - 'TaskFactory.h', - 'Transport.h', - 'TransportSecurityInfoUtils.h', - 'URIUtils.h', - 'WindowsMessageLoop.h', + "BackgroundChild.h", + "BackgroundParent.h", + "BackgroundUtils.h", + "BrowserProcessSubThread.h", + "ByteBuf.h", + "ByteBufUtils.h", + "CrashReporterClient.h", + "CrashReporterHelper.h", + "CrashReporterHost.h", + "CrossProcessMutex.h", + "CrossProcessSemaphore.h", + "EnvironmentMap.h", + "FileDescriptor.h", + "FileDescriptorSetChild.h", + "FileDescriptorSetParent.h", + "FileDescriptorUtils.h", + "GeckoChildProcessHost.h", + "IdleSchedulerChild.h", + "IdleSchedulerParent.h", + "InputStreamUtils.h", + "IOThreadChild.h", + "IPCStreamAlloc.h", + "IPCStreamDestination.h", + "IPCStreamSource.h", + "IPCStreamUtils.h", + "IPDLParamTraits.h", + "LibrarySandboxPreload.h", + "MessageChannel.h", + "MessageLink.h", + "Neutering.h", + "ProcessChild.h", + "ProtocolUtils.h", + "ScopedXREEmbed.h", + "SharedMemory.h", + "SharedMemoryBasic.h", + "Shmem.h", + "TaintingIPCUtils.h", + "TaskFactory.h", + "Transport.h", + "TransportSecurityInfoUtils.h", + "URIUtils.h", + "WindowsMessageLoop.h", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": EXPORTS.mozilla.ipc += [ - 'Transport_win.h', + "Transport_win.h", ] SOURCES += [ - 'SharedMemory_windows.cpp', - 'Transport_win.cpp', - 'WindowsMessageLoop.cpp', + "SharedMemory_windows.cpp", + "Transport_win.cpp", + "WindowsMessageLoop.cpp", ] else: EXPORTS.mozilla.ipc += [ - 'Transport_posix.h', + "Transport_posix.h", ] UNIFIED_SOURCES += [ - 'SharedMemory_posix.cpp', - 'Transport_posix.cpp', + "SharedMemory_posix.cpp", + "Transport_posix.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - 'CrossProcessMutex_windows.cpp', + "CrossProcessMutex_windows.cpp", ] -elif not CONFIG['OS_ARCH'] in ('NetBSD', 'OpenBSD'): +elif not CONFIG["OS_ARCH"] in ("NetBSD", "OpenBSD"): UNIFIED_SOURCES += [ - 'CrossProcessMutex_posix.cpp', + "CrossProcessMutex_posix.cpp", ] else: UNIFIED_SOURCES += [ - 'CrossProcessMutex_unimplemented.cpp', + "CrossProcessMutex_unimplemented.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": SOURCES += [ - 'CrossProcessSemaphore_windows.cpp', + "CrossProcessSemaphore_windows.cpp", ] -elif CONFIG['OS_ARCH'] != 'Darwin': +elif CONFIG["OS_ARCH"] != "Darwin": UNIFIED_SOURCES += [ - 'CrossProcessSemaphore_posix.cpp', + "CrossProcessSemaphore_posix.cpp", ] else: UNIFIED_SOURCES += [ - 'CrossProcessSemaphore_unimplemented.cpp', + "CrossProcessSemaphore_unimplemented.cpp", ] # Android has its own, # almost-but-not-quite-compatible-with-POSIX-or-/dev/shm shared memory # impl. -if CONFIG['OS_TARGET'] == 'Android': - EXPORTS.mozilla.ipc += ['SharedMemoryBasic_android.h'] +if CONFIG["OS_TARGET"] == "Android": + EXPORTS.mozilla.ipc += ["SharedMemoryBasic_android.h"] UNIFIED_SOURCES += [ - 'SharedMemoryBasic_android.cpp', + "SharedMemoryBasic_android.cpp", ] -elif CONFIG['OS_ARCH'] == 'Darwin': - EXPORTS.mozilla.ipc += ['SharedMemoryBasic_mach.h'] +elif CONFIG["OS_ARCH"] == "Darwin": + EXPORTS.mozilla.ipc += ["SharedMemoryBasic_mach.h"] SOURCES += [ - 'SharedMemoryBasic_mach.mm', + "SharedMemoryBasic_mach.mm", ] else: - EXPORTS.mozilla.ipc += ['SharedMemoryBasic_chromium.h'] + EXPORTS.mozilla.ipc += ["SharedMemoryBasic_chromium.h"] -if CONFIG['OS_ARCH'] == 'Linux': +if CONFIG["OS_ARCH"] == "Linux": UNIFIED_SOURCES += [ - 'ProcessUtils_linux.cpp', - ] -elif CONFIG['OS_ARCH'] in ('DragonFly', 'FreeBSD', 'NetBSD', 'OpenBSD'): - UNIFIED_SOURCES += [ - 'ProcessUtils_bsd.cpp' - ] -elif CONFIG['OS_ARCH'] == 'Darwin': - UNIFIED_SOURCES += [ - 'ProcessUtils_mac.mm' + "ProcessUtils_linux.cpp", ] +elif CONFIG["OS_ARCH"] in ("DragonFly", "FreeBSD", "NetBSD", "OpenBSD"): + UNIFIED_SOURCES += ["ProcessUtils_bsd.cpp"] +elif CONFIG["OS_ARCH"] == "Darwin": + UNIFIED_SOURCES += ["ProcessUtils_mac.mm"] else: UNIFIED_SOURCES += [ - 'ProcessUtils_none.cpp', + "ProcessUtils_none.cpp", ] -if CONFIG['OS_ARCH'] != 'WINNT': +if CONFIG["OS_ARCH"] != "WINNT": EXPORTS.mozilla.ipc += [ - 'FileDescriptorShuffle.h', + "FileDescriptorShuffle.h", ] UNIFIED_SOURCES += [ - 'FileDescriptorShuffle.cpp', + "FileDescriptorShuffle.cpp", ] EXPORTS.ipc += [ - 'IPCMessageUtils.h', + "IPCMessageUtils.h", ] UNIFIED_SOURCES += [ - 'BackgroundImpl.cpp', - 'BackgroundUtils.cpp', - 'BrowserProcessSubThread.cpp', - 'CrashReporterClient.cpp', - 'CrashReporterHost.cpp', - 'FileDescriptor.cpp', - 'FileDescriptorUtils.cpp', - 'IdleSchedulerChild.cpp', - 'IdleSchedulerParent.cpp', - 'InputStreamUtils.cpp', - 'IPCMessageUtils.cpp', - 'IPCStreamChild.cpp', - 'IPCStreamDestination.cpp', - 'IPCStreamParent.cpp', - 'IPCStreamSource.cpp', - 'IPCStreamUtils.cpp', - 'LibrarySandboxPreload.cpp', - 'MessageChannel.cpp', - 'MessageLink.cpp', - 'MessagePump.cpp', - 'ProcessChild.cpp', - 'ProcessUtils_common.cpp', - 'ProtocolUtils.cpp', - 'ScopedXREEmbed.cpp', - 'SharedMemory.cpp', - 'Shmem.cpp', - 'StringUtil.cpp', - 'TransportSecurityInfoUtils.cpp', - 'URIUtils.cpp', + "BackgroundImpl.cpp", + "BackgroundUtils.cpp", + "BrowserProcessSubThread.cpp", + "CrashReporterClient.cpp", + "CrashReporterHost.cpp", + "FileDescriptor.cpp", + "FileDescriptorUtils.cpp", + "IdleSchedulerChild.cpp", + "IdleSchedulerParent.cpp", + "InputStreamUtils.cpp", + "IPCMessageUtils.cpp", + "IPCStreamChild.cpp", + "IPCStreamDestination.cpp", + "IPCStreamParent.cpp", + "IPCStreamSource.cpp", + "IPCStreamUtils.cpp", + "LibrarySandboxPreload.cpp", + "MessageChannel.cpp", + "MessageLink.cpp", + "MessagePump.cpp", + "ProcessChild.cpp", + "ProcessUtils_common.cpp", + "ProtocolUtils.cpp", + "ScopedXREEmbed.cpp", + "SharedMemory.cpp", + "Shmem.cpp", + "StringUtil.cpp", + "TransportSecurityInfoUtils.cpp", + "URIUtils.cpp", ] SOURCES += [ - 'BackgroundChildImpl.cpp', - 'BackgroundParentImpl.cpp', - 'FileDescriptorSetChild.cpp', - 'FileDescriptorSetParent.cpp', + "BackgroundChildImpl.cpp", + "BackgroundParentImpl.cpp", + "FileDescriptorSetChild.cpp", + "FileDescriptorSetParent.cpp", ] -if CONFIG['OS_ARCH'] == 'Darwin': +if CONFIG["OS_ARCH"] == "Darwin": # GeckoChildProcessHost.cpp cannot be built unified due to OSX header # clashes with TextRange. SOURCES += [ - 'GeckoChildProcessHost.cpp', + "GeckoChildProcessHost.cpp", ] else: UNIFIED_SOURCES += [ - 'GeckoChildProcessHost.cpp', + "GeckoChildProcessHost.cpp", ] LOCAL_INCLUDES += [ - '/caps', - '/dom/broadcastchannel', - '/dom/indexedDB', - '/dom/storage', - '/netwerk/base', - '/third_party/libwebrtc', - '/third_party/libwebrtc/webrtc', - '/xpcom/build', + "/caps", + "/dom/broadcastchannel", + "/dom/indexedDB", + "/dom/storage", + "/netwerk/base", + "/third_party/libwebrtc", + "/third_party/libwebrtc/webrtc", + "/xpcom/build", ] IPDL_SOURCES = [ - 'InputStreamParams.ipdlh', - 'IPCStream.ipdlh', - 'PBackground.ipdl', - 'PBackgroundSharedTypes.ipdlh', - 'PBackgroundTest.ipdl', - 'PChildToParentStream.ipdl', - 'PFileDescriptorSet.ipdl', - 'PIdleScheduler.ipdl', - 'PParentToChildStream.ipdl', - 'ProtocolTypes.ipdlh', - 'URIParams.ipdlh', + "InputStreamParams.ipdlh", + "IPCStream.ipdlh", + "PBackground.ipdl", + "PBackgroundSharedTypes.ipdlh", + "PBackgroundTest.ipdl", + "PChildToParentStream.ipdl", + "PFileDescriptorSet.ipdl", + "PIdleScheduler.ipdl", + "PParentToChildStream.ipdl", + "ProtocolTypes.ipdlh", + "URIParams.ipdlh", ] -if CONFIG['MOZ_ENABLE_FORKSERVER']: +if CONFIG["MOZ_ENABLE_FORKSERVER"]: EXPORTS.mozilla.ipc += [ - 'ForkServer.h', - 'ForkServiceChild.h', - 'MiniTransceiver.h', + "ForkServer.h", + "ForkServiceChild.h", + "MiniTransceiver.h", ] UNIFIED_SOURCES += [ - 'ForkServer.cpp', - 'ForkServiceChild.cpp', - 'MiniTransceiver.cpp', + "ForkServer.cpp", + "ForkServiceChild.cpp", + "MiniTransceiver.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] LOCAL_INCLUDES += [ - '/dom/ipc', - '/toolkit/crashreporter', - '/toolkit/xre', - '/xpcom/base', - '/xpcom/threads', + "/dom/ipc", + "/toolkit/crashreporter", + "/toolkit/xre", + "/xpcom/base", + "/xpcom/threads", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -for var in ('MOZ_CHILD_PROCESS_NAME', 'MOZ_CHILD_PROCESS_BUNDLE'): +for var in ("MOZ_CHILD_PROCESS_NAME", "MOZ_CHILD_PROCESS_BUNDLE"): DEFINES[var] = '"%s"' % CONFIG[var] -if CONFIG['MOZ_SANDBOX'] and CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["MOZ_SANDBOX"] and CONFIG["OS_ARCH"] == "WINNT": LOCAL_INCLUDES += [ - '/security/sandbox/chromium', - '/security/sandbox/chromium-shim', - '/security/sandbox/win/src/sandboxbroker', + "/security/sandbox/chromium", + "/security/sandbox/chromium-shim", + "/security/sandbox/win/src/sandboxbroker", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-shadow"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/ipc/gtest/moz.build b/ipc/gtest/moz.build index 33aaa285d60de8..f1f7fb595ebf72 100644 --- a/ipc/gtest/moz.build +++ b/ipc/gtest/moz.build @@ -4,13 +4,13 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('ipctest') +Library("ipctest") SOURCES += [ - 'TestLogging.cpp', - 'TestSharedMemory.cpp', + "TestLogging.cpp", + "TestSharedMemory.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/ipc/ipdl/ipdl.py b/ipc/ipdl/ipdl.py index 657041cef4efd2..e41691a7428b8c 100644 --- a/ipc/ipdl/ipdl.py +++ b/ipc/ipdl/ipdl.py @@ -16,31 +16,69 @@ def log(minv, fmt, *args): if _verbosity >= minv: print(fmt % args) + # process command line -op = optparse.OptionParser(usage='ipdl.py [options] IPDLfiles...') -op.add_option('-I', '--include', dest='includedirs', default=[], - action='append', - help='Additional directory to search for included protocol specifications') -op.add_option('-s', '--sync-msg-list', dest='syncMsgList', default='sync-messages.ini', - help="Config file listing allowed sync messages") -op.add_option('-m', '--msg-metadata', dest='msgMetadata', default='message-metadata.ini', - help="Predicted message sizes for reducing serialization malloc overhead.") -op.add_option('-v', '--verbose', dest='verbosity', default=1, action='count', - help='Verbose logging (specify -vv or -vvv for very verbose logging)') -op.add_option('-q', '--quiet', dest='verbosity', action='store_const', const=0, - help="Suppress logging output") -op.add_option('-d', '--outheaders-dir', dest='headersdir', default='.', - help="""Directory into which C++ headers will be generated. +op = optparse.OptionParser(usage="ipdl.py [options] IPDLfiles...") +op.add_option( + "-I", + "--include", + dest="includedirs", + default=[], + action="append", + help="Additional directory to search for included protocol specifications", +) +op.add_option( + "-s", + "--sync-msg-list", + dest="syncMsgList", + default="sync-messages.ini", + help="Config file listing allowed sync messages", +) +op.add_option( + "-m", + "--msg-metadata", + dest="msgMetadata", + default="message-metadata.ini", + help="Predicted message sizes for reducing serialization malloc overhead.", +) +op.add_option( + "-v", + "--verbose", + dest="verbosity", + default=1, + action="count", + help="Verbose logging (specify -vv or -vvv for very verbose logging)", +) +op.add_option( + "-q", + "--quiet", + dest="verbosity", + action="store_const", + const=0, + help="Suppress logging output", +) +op.add_option( + "-d", + "--outheaders-dir", + dest="headersdir", + default=".", + help="""Directory into which C++ headers will be generated. A protocol Foo in the namespace bar will cause the headers dir/bar/Foo.h, dir/bar/FooParent.h, and dir/bar/FooParent.h -to be generated""") -op.add_option('-o', '--outcpp-dir', dest='cppdir', default='.', - help="""Directory into which C++ sources will be generated +to be generated""", +) +op.add_option( + "-o", + "--outcpp-dir", + dest="cppdir", + default=".", + help="""Directory into which C++ sources will be generated A protocol Foo in the namespace bar will cause the sources cppdir/FooParent.cpp, cppdir/FooChild.cpp -to be generated""") +to be generated""", +) options, files = op.parse_args() _verbosity = options.verbosity @@ -53,8 +91,8 @@ def log(minv, fmt, *args): if not len(files): op.error("No IPDL files specified") -ipcmessagestartpath = os.path.join(headersdir, 'IPCMessageStart.h') -ipc_msgtype_name_path = os.path.join(cppdir, 'IPCMessageTypeName.cpp') +ipcmessagestartpath = os.path.join(headersdir, "IPCMessageStart.h") +ipc_msgtype_name_path = os.path.join(cppdir, "IPCMessageTypeName.cpp") log(2, 'Generated C++ headers will be generated relative to "%s"', headersdir) log(2, 'Generated C++ sources will be generated in "%s"', cppdir) @@ -65,38 +103,38 @@ def log(minv, fmt, *args): def normalizedFilename(f): - if f == '-': - return '' + if f == "-": + return "" return f -log(2, 'Reading sync message list') +log(2, "Reading sync message list") parser = RawConfigParser() parser.read_file(open(options.syncMsgList)) syncMsgList = parser.sections() for section in syncMsgList: if not parser.get(section, "description"): - print('Error: Sync message %s lacks a description' % section, file=sys.stderr) + print("Error: Sync message %s lacks a description" % section, file=sys.stderr) sys.exit(1) # Read message metadata. Right now we only have 'segment_capacity' # for the standard segment size used for serialization. -log(2, 'Reading message metadata...') +log(2, "Reading message metadata...") msgMetadataConfig = RawConfigParser() msgMetadataConfig.read_file(open(options.msgMetadata)) segmentCapacityDict = {} for msgName in msgMetadataConfig.sections(): - if msgMetadataConfig.has_option(msgName, 'segment_capacity'): - capacity = msgMetadataConfig.get(msgName, 'segment_capacity') + if msgMetadataConfig.has_option(msgName, "segment_capacity"): + capacity = msgMetadataConfig.get(msgName, "segment_capacity") segmentCapacityDict[msgName] = capacity # First pass: parse and type-check all protocols for f in files: log(2, os.path.basename(f)) filename = normalizedFilename(f) - if f == '-': + if f == "-": fd = sys.stdin else: fd = open(f) @@ -106,16 +144,20 @@ def normalizedFilename(f): ast = ipdl.parse(specstring, filename, includedirs=includedirs) if ast is None: - print('Specification could not be parsed.', file=sys.stderr) + print("Specification could not be parsed.", file=sys.stderr) sys.exit(1) - log(2, 'checking types') + log(2, "checking types") if not ipdl.typecheck(ast): - print('Specification is not well typed.', file=sys.stderr) + print("Specification is not well typed.", file=sys.stderr) sys.exit(1) if not ipdl.checkSyncMessage(ast, syncMsgList): - print('Error: New sync IPC messages must be reviewed by an IPC peer and recorded in %s' % options.syncMsgList, file=sys.stderr) # NOQA: E501 + print( + "Error: New sync IPC messages must be reviewed by an IPC peer and recorded in %s" + % options.syncMsgList, + file=sys.stderr, + ) # NOQA: E501 sys.exit(1) if not ipdl.checkFixedSyncMessages(parser): @@ -134,7 +176,7 @@ def normalizedFilename(f): allprotocols.append(ast.protocol.name) # e.g. PContent::RequestMemoryReport (not prefixed or suffixed.) for md in ast.protocol.messageDecls: - allmessageprognames.append('%s::%s' % (md.namespace, md.decl.progname)) + allmessageprognames.append("%s::%s" % (md.namespace, md.decl.progname)) allprotocols.sort() @@ -142,35 +184,42 @@ def normalizedFilename(f): # This is a fool-proof of the 'message-metadata.ini' file. undefinedMessages = set(segmentCapacityDict.keys()) - set(allmessageprognames) if len(undefinedMessages) > 0: - print('Error: Undefined message names in message-metadata.ini:', file=sys.stderr) + print("Error: Undefined message names in message-metadata.ini:", file=sys.stderr) print(undefinedMessages, file=sys.stderr) sys.exit(1) ipcmsgstart = StringIO() -print(""" +print( + """ // CODE GENERATED by ipdl.py. Do not edit. #ifndef IPCMessageStart_h #define IPCMessageStart_h enum IPCMessageStart { -""", file=ipcmsgstart) +""", + file=ipcmsgstart, +) for name in allprotocols: print(" %sMsgStart," % name, file=ipcmsgstart) -print(""" +print( + """ LastMsgIndex }; static_assert(LastMsgIndex <= 65536, "need to update IPC_MESSAGE_MACRO"); #endif // ifndef IPCMessageStart_h -""", file=ipcmsgstart) +""", + file=ipcmsgstart, +) ipc_msgtype_name = StringIO() -print(""" +print( + """ // CODE GENERATED by ipdl.py. Do not edit. #include @@ -182,16 +231,19 @@ def normalizedFilename(f): namespace { enum IPCMessages { -""", file=ipc_msgtype_name) +""", + file=ipc_msgtype_name, +) for protocol in sorted(allmessages.keys()): for (msg, num) in allmessages[protocol].idnums: if num: print(" %s = %s," % (msg, num), file=ipc_msgtype_name) - elif not msg.endswith('End'): + elif not msg.endswith("End"): print(" %s__%s," % (protocol, msg), file=ipc_msgtype_name) -print(""" +print( + """ }; } // anonymous namespace @@ -201,17 +253,24 @@ def normalizedFilename(f): const char* StringFromIPCMessageType(uint32_t aMessageType) { switch (aMessageType) { -""", file=ipc_msgtype_name) +""", + file=ipc_msgtype_name, +) for protocol in sorted(allmessages.keys()): for (msg, num) in allmessages[protocol].idnums: - if num or msg.endswith('End'): + if num or msg.endswith("End"): continue - print(""" + print( + """ case %s__%s: - return "%s::%s";""" % (protocol, msg, protocol, msg), file=ipc_msgtype_name) + return "%s::%s";""" + % (protocol, msg, protocol, msg), + file=ipc_msgtype_name, + ) -print(""" +print( + """ case CHANNEL_OPENED_MESSAGE_TYPE: return "CHANNEL_OPENED_MESSAGE"; case SHMEM_DESTROYED_MESSAGE_TYPE: @@ -234,13 +293,16 @@ def normalizedFilename(f): const char* ProtocolIdToName(IPCMessageStart aId) { switch (aId) { -""", file=ipc_msgtype_name) +""", + file=ipc_msgtype_name, +) for name in allprotocols: print(" case %sMsgStart:" % name, file=ipc_msgtype_name) - print(" return \"%s\";" % name, file=ipc_msgtype_name) + print(' return "%s";' % name, file=ipc_msgtype_name) -print(""" +print( + """ default: return ""; } @@ -248,7 +310,9 @@ def normalizedFilename(f): } // namespace ipc } // namespace mozilla -""", file=ipc_msgtype_name) +""", + file=ipc_msgtype_name, +) ipdl.writeifmodified(ipcmsgstart.getvalue(), ipcmessagestartpath) ipdl.writeifmodified(ipc_msgtype_name.getvalue(), ipc_msgtype_name_path) diff --git a/ipc/ipdl/ipdl/__init__.py b/ipc/ipdl/ipdl/__init__.py index fd8252e8f4b41e..a85a12e14a99ad 100644 --- a/ipc/ipdl/ipdl/__init__.py +++ b/ipc/ipdl/ipdl/__init__.py @@ -4,8 +4,15 @@ from __future__ import print_function -__all__ = ['gencxx', 'genipdl', 'parse', 'typecheck', 'writeifmodified', - 'checkSyncMessage', 'checkFixedSyncMessages'] +__all__ = [ + "gencxx", + "genipdl", + "parse", + "typecheck", + "writeifmodified", + "checkSyncMessage", + "checkFixedSyncMessages", +] import os import sys @@ -20,28 +27,30 @@ from ipdl.cxx.cgen import CxxCodeGen -def parse(specstring, filename='/stdin', includedirs=[], errout=sys.stderr): - '''Return an IPDL AST if parsing was successful. Print errors to |errout| - if it is not.''' +def parse(specstring, filename="/stdin", includedirs=[], errout=sys.stderr): + """Return an IPDL AST if parsing was successful. Print errors to |errout| + if it is not.""" # The file type and name are later enforced by the type checker. # This is just a hint to the parser. prefix, ext = os.path.splitext(filename) name = os.path.basename(prefix) - if ext == '.ipdlh': - type = 'header' + if ext == ".ipdlh": + type = "header" else: - type = 'protocol' + type = "protocol" try: - return Parser(type, name).parse(specstring, os.path.abspath(filename), includedirs) + return Parser(type, name).parse( + specstring, os.path.abspath(filename), includedirs + ) except ParseError as p: print(p, file=errout) return None def typecheck(ast, errout=sys.stderr): - '''Return True iff |ast| is well typed. Print errors to |errout| if - it is not.''' + """Return True iff |ast| is well typed. Print errors to |errout| if + it is not.""" return TypeCheck().check(ast, errout) @@ -52,15 +61,16 @@ def resolveHeader(hdr): return [ hdr, os.path.join( - outheadersdir, - *([ns.name for ns in ast.namespaces] + [hdr.name])) + outheadersdir, *([ns.name for ns in ast.namespaces] + [hdr.name]) + ), ] def resolveCpp(cpp): return [cpp, os.path.join(outcppdir, cpp.name)] - for ast, filename in ([resolveHeader(hdr) for hdr in headers] - + [resolveCpp(cpp) for cpp in cpps]): + for ast, filename in [resolveHeader(hdr) for hdr in headers] + [ + resolveCpp(cpp) for cpp in cpps + ]: tempfile = StringIO() CxxCodeGen(tempfile).cgen(ast) writeifmodified(tempfile.getvalue(), filename) @@ -75,16 +85,16 @@ def genmsgenum(ast): def writeifmodified(contents, file): - contents = contents.encode('utf-8') + contents = contents.encode("utf-8") dir = os.path.dirname(file) os.path.exists(dir) or os.makedirs(dir) oldcontents = None if os.path.exists(file): - fd = open(file, 'rb') + fd = open(file, "rb") oldcontents = fd.read() fd.close() if oldcontents != contents: - fd = open(file, 'wb') + fd = open(file, "wb") fd.write(contents) fd.close() diff --git a/ipc/ipdl/ipdl/ast.py b/ipc/ipdl/ipdl/ast.py index 600a1a613d2c3d..cf37b39a76306a 100644 --- a/ipc/ipdl/ipdl/ast.py +++ b/ipc/ipdl/ipdl/ast.py @@ -14,10 +14,12 @@ HIGH_PRIORITY = 3 MEDIUMHIGH_PRIORITY = 4 + class Visitor: def defaultVisit(self, node): - raise Exception("INTERNAL ERROR: no visitor for node type `%s'" % - (node.__class__.__name__)) + raise Exception( + "INTERNAL ERROR: no visitor for node type `%s'" % (node.__class__.__name__) + ) def visitTranslationUnit(self, tu): for cxxInc in tu.cxxIncludes: @@ -91,19 +93,19 @@ def visitDecl(self, d): class Loc: - def __init__(self, filename='', lineno=0): + def __init__(self, filename="", lineno=0): assert filename self.filename = filename self.lineno = lineno def __repr__(self): - return '%r:%r' % (self.filename, self.lineno) + return "%r:%r" % (self.filename, self.lineno) def __str__(self): - return '%s:%s' % (self.filename, self.lineno) + return "%s:%s" % (self.filename, self.lineno) -Loc.NONE = Loc(filename='', lineno=0) +Loc.NONE = Loc(filename="", lineno=0) class _struct: @@ -115,9 +117,9 @@ def __init__(self, loc=Loc.NONE): self.loc = loc def accept(self, visitor): - visit = getattr(visitor, 'visit' + self.__class__.__name__, None) + visit = getattr(visitor, "visit" + self.__class__.__name__, None) if visit is None: - return getattr(visitor, 'defaultVisit')(self) + return getattr(visitor, "defaultVisit")(self) return visit(self) def addAttrs(self, attrsName): @@ -135,8 +137,7 @@ def addOuterNamespace(self, namespace): self.namespaces.insert(0, namespace) def qname(self): - return QualifiedId(self.loc, self.name, - [ns.name for ns in self.namespaces]) + return QualifiedId(self.loc, self.name, [ns.name for ns in self.namespaces]) class TranslationUnit(NamespacedNode): @@ -151,17 +152,23 @@ def __init__(self, type, name): self.structsAndUnions = [] self.protocol = None - def addCxxInclude(self, cxxInclude): self.cxxIncludes.append(cxxInclude) + def addCxxInclude(self, cxxInclude): + self.cxxIncludes.append(cxxInclude) - def addInclude(self, inc): self.includes.append(inc) + def addInclude(self, inc): + self.includes.append(inc) - def addStructDecl(self, struct): self.structsAndUnions.append(struct) + def addStructDecl(self, struct): + self.structsAndUnions.append(struct) - def addUnionDecl(self, union): self.structsAndUnions.append(union) + def addUnionDecl(self, union): + self.structsAndUnions.append(union) - def addUsingStmt(self, using): self.using.append(using) + def addUsingStmt(self, using): + self.using.append(using) - def setProtocol(self, protocol): self.protocol = protocol + def setProtocol(self, protocol): + self.protocol = protocol class CxxInclude(Node): @@ -173,19 +180,26 @@ def __init__(self, loc, cxxFile): class Include(Node): def __init__(self, loc, type, name): Node.__init__(self, loc) - suffix = 'ipdl' - if type == 'header': - suffix += 'h' + suffix = "ipdl" + if type == "header": + suffix += "h" self.file = "%s.%s" % (name, suffix) class UsingStmt(Node): - def __init__(self, loc, cxxTypeSpec, cxxHeader=None, kind=None, - refcounted=False, moveonly=False): + def __init__( + self, + loc, + cxxTypeSpec, + cxxHeader=None, + kind=None, + refcounted=False, + moveonly=False, + ): Node.__init__(self, loc) assert not isinstance(cxxTypeSpec, str) assert cxxHeader is None or isinstance(cxxHeader, str) - assert kind is None or kind == 'class' or kind == 'struct' + assert kind is None or kind == "class" or kind == "struct" self.type = cxxTypeSpec self.header = cxxHeader self.kind = kind @@ -196,10 +210,10 @@ def canBeForwardDeclared(self): return self.isClass() or self.isStruct() def isClass(self): - return self.kind == 'class' + return self.kind == "class" def isStruct(self): - return self.kind == 'struct' + return self.kind == "struct" def isRefcounted(self): return self.refcounted @@ -207,41 +221,46 @@ def isRefcounted(self): def isMoveonly(self): return self.moveonly + # "singletons" class PrettyPrinted: @classmethod - def __hash__(cls): return hash_str(cls.pretty) + def __hash__(cls): + return hash_str(cls.pretty) @classmethod - def __str__(cls): return cls.pretty + def __str__(cls): + return cls.pretty class ASYNC(PrettyPrinted): - pretty = 'async' + pretty = "async" + class TAINTED(PrettyPrinted): - pretty = 'tainted' + pretty = "tainted" + class INTR(PrettyPrinted): - pretty = 'intr' + pretty = "intr" class SYNC(PrettyPrinted): - pretty = 'sync' + pretty = "sync" class INOUT(PrettyPrinted): - pretty = 'inout' + pretty = "inout" class IN(PrettyPrinted): - pretty = 'in' + pretty = "in" class OUT(PrettyPrinted): - pretty = 'out' + pretty = "out" class Namespace(Node): @@ -308,9 +327,9 @@ def __init__(self, loc): self.direction = None self.inParams = [] self.outParams = [] - self.compress = '' - self.tainted = '' - self.verify = '' + self.compress = "" + self.tainted = "" + self.verify = "" def addInParams(self, inParamsList): self.inParams += inParamsList @@ -320,13 +339,13 @@ def addOutParams(self, outParamsList): def addModifiers(self, modifiers): for modifier in modifiers: - if modifier.startswith('compress'): + if modifier.startswith("compress"): self.compress = modifier - elif modifier == 'verify': + elif modifier == "verify": self.verify = modifier - elif modifier.startswith('tainted'): + elif modifier.startswith("tainted"): self.tainted = modifier - elif modifier != '': + elif modifier != "": raise Exception("Unexpected message modifier `%s'" % modifier) @@ -340,19 +359,20 @@ def __init__(self, loc, typespec, name): class TypeSpec(Node): def __init__(self, loc, spec): Node.__init__(self, loc) - self.spec = spec # QualifiedId - self.array = False # bool - self.maybe = False # bool - self.nullable = False # bool - self.uniqueptr = False # bool + self.spec = spec # QualifiedId + self.array = False # bool + self.maybe = False # bool + self.nullable = False # bool + self.uniqueptr = False # bool def basename(self): return self.spec.baseid - def __str__(self): return str(self.spec) + def __str__(self): + return str(self.spec) -class QualifiedId: # FIXME inherit from node? +class QualifiedId: # FIXME inherit from node? def __init__(self, loc, baseid, quals=[]): assert isinstance(baseid, str) for qual in quals: @@ -369,7 +389,8 @@ def qualify(self, id): def __str__(self): if 0 == len(self.quals): return self.baseid - return '::'.join(self.quals) + '::' + self.baseid + return "::".join(self.quals) + "::" + self.baseid + # added by type checking passes @@ -377,9 +398,9 @@ def __str__(self): class Decl(Node): def __init__(self, loc): Node.__init__(self, loc) - self.progname = None # what the programmer typed, if relevant - self.shortname = None # shortest way to refer to this decl - self.fullname = None # full way to refer to this decl + self.progname = None # what the programmer typed, if relevant + self.shortname = None # shortest way to refer to this decl + self.fullname = None # full way to refer to this decl self.loc = loc self.type = None self.scope = None diff --git a/ipc/ipdl/ipdl/builtin.py b/ipc/ipdl/ipdl/builtin.py index 32581da2adef45..a1b253964affbb 100644 --- a/ipc/ipdl/ipdl/builtin.py +++ b/ipc/ipdl/ipdl/builtin.py @@ -8,56 +8,53 @@ Types = ( # C types - 'bool', - 'char', - 'short', - 'int', - 'long', - 'float', - 'double', - + "bool", + "char", + "short", + "int", + "long", + "float", + "double", # stdint types - 'int8_t', - 'uint8_t', - 'int16_t', - 'uint16_t', - 'int32_t', - 'uint32_t', - 'int64_t', - 'uint64_t', - 'intptr_t', - 'uintptr_t', - + "int8_t", + "uint8_t", + "int16_t", + "uint16_t", + "int32_t", + "uint32_t", + "int64_t", + "uint64_t", + "intptr_t", + "uintptr_t", # You may be tempted to add size_t. Do not! See bug 1525199. - # Mozilla types: "less" standard things we know how serialize/deserialize - 'nsresult', - 'nsString', - 'nsCString', - 'nsDependentSubstring', - 'nsDependentCSubstring', - 'mozilla::ipc::Shmem', - 'mozilla::ipc::ByteBuf', - 'mozilla::UniquePtr', - 'mozilla::ipc::FileDescriptor' + "nsresult", + "nsString", + "nsCString", + "nsDependentSubstring", + "nsDependentCSubstring", + "mozilla::ipc::Shmem", + "mozilla::ipc::ByteBuf", + "mozilla::UniquePtr", + "mozilla::ipc::FileDescriptor", ) HeaderIncludes = ( - 'mozilla/Attributes.h', - 'IPCMessageStart.h', - 'ipc/IPCMessageUtils.h', - 'mozilla/RefPtr.h', - 'nsString.h', - 'nsTArray.h', - 'mozilla/ipc/ProtocolUtils.h', - 'mozilla/ipc/TaintingIPCUtils.h', - 'nsTHashtable.h', - 'mozilla/OperatorNewExtensions.h', - 'mozilla/UniquePtr.h', + "mozilla/Attributes.h", + "IPCMessageStart.h", + "ipc/IPCMessageUtils.h", + "mozilla/RefPtr.h", + "nsString.h", + "nsTArray.h", + "mozilla/ipc/ProtocolUtils.h", + "mozilla/ipc/TaintingIPCUtils.h", + "nsTHashtable.h", + "mozilla/OperatorNewExtensions.h", + "mozilla/UniquePtr.h", ) CppIncludes = ( - 'nsIFile.h', - 'GeckoProfiler.h', + "nsIFile.h", + "GeckoProfiler.h", ) diff --git a/ipc/ipdl/ipdl/cgen.py b/ipc/ipdl/ipdl/cgen.py index 93955c68592496..8ed8da4d8105dd 100644 --- a/ipc/ipdl/ipdl/cgen.py +++ b/ipc/ipdl/ipdl/cgen.py @@ -16,24 +16,26 @@ def __init__(self, outf=sys.stdout, indentCols=4): def write(self, str): self.outf.write(str) - def printdent(self, str=''): - self.write((' ' * self.col) + str) + def printdent(self, str=""): + self.write((" " * self.col) + str) - def println(self, str=''): - self.write(str + '\n') + def println(self, str=""): + self.write(str + "\n") def printdentln(self, str): - self.write((' ' * self.col) + str + '\n') + self.write((" " * self.col) + str + "\n") - def indent(self): self.col += self.indentCols + def indent(self): + self.col += self.indentCols - def dedent(self): self.col -= self.indentCols + def dedent(self): + self.col -= self.indentCols # ----------------------------------------------------------------------------- class IPDLCodeGen(CodePrinter, Visitor): - '''Spits back out equivalent IPDL to the code that generated this. -Also known as pretty-printing.''' + """Spits back out equivalent IPDL to the code that generated this. + Also known as pretty-printing.""" def __init__(self, outf=sys.stdout, indentCols=4, printed=set()): CodePrinter.__init__(self, outf, indentCols) @@ -41,7 +43,7 @@ def __init__(self, outf=sys.stdout, indentCols=4, printed=set()): def visitTranslationUnit(self, tu): self.printed.add(tu.filename) - self.println('//\n// Automatically generated by ipdlc\n//') + self.println("//\n// Automatically generated by ipdlc\n//") CodeGen.visitTranslationUnit(self, tu) # NOQA: F821 def visitCxxInclude(self, inc): @@ -50,18 +52,19 @@ def visitCxxInclude(self, inc): def visitProtocolInclude(self, inc): self.println('include protocol "' + inc.file + '";') if inc.tu.filename not in self.printed: - self.println('/* Included file:') - IPDLCodeGen(outf=self.outf, indentCols=self.indentCols, - printed=self.printed).visitTranslationUnit(inc.tu) + self.println("/* Included file:") + IPDLCodeGen( + outf=self.outf, indentCols=self.indentCols, printed=self.printed + ).visitTranslationUnit(inc.tu) - self.println('*/') + self.println("*/") def visitProtocol(self, p): self.println() for namespace in p.namespaces: namespace.accept(self) - self.println('%s protocol %s\n{' % (p.sendSemantics[0], p.name)) + self.println("%s protocol %s\n{" % (p.sendSemantics[0], p.name)) self.indent() for mgs in p.managesStmts: @@ -74,32 +77,32 @@ def visitProtocol(self, p): self.println() self.dedent() - self.println('}') - self.write('}\n' * len(p.namespaces)) + self.println("}") + self.write("}\n" * len(p.namespaces)) def visitManagerStmt(self, mgr): - self.printdentln('manager ' + mgr.name + ';') + self.printdentln("manager " + mgr.name + ";") def visitManagesStmt(self, mgs): - self.printdentln('manages ' + mgs.name + ';') + self.printdentln("manages " + mgs.name + ";") def visitMessageDecl(self, msg): - self.printdent('%s %s %s(' % (msg.sendSemantics[0], msg.direction[0], msg.name)) + self.printdent("%s %s %s(" % (msg.sendSemantics[0], msg.direction[0], msg.name)) for i, inp in enumerate(msg.inParams): inp.accept(self) if i != (len(msg.inParams) - 1): - self.write(', ') - self.write(')') + self.write(", ") + self.write(")") if 0 == len(msg.outParams): - self.println(';') + self.println(";") return self.println() self.indent() - self.printdent('returns (') + self.printdent("returns (") for i, outp in enumerate(msg.outParams): outp.accept(self) if i != (len(msg.outParams) - 1): - self.write(', ') - self.println(');') + self.write(", ") + self.println(");") self.dedent() diff --git a/ipc/ipdl/ipdl/checker.py b/ipc/ipdl/ipdl/checker.py index 79572a45bbbb37..eb969b4d60bc02 100644 --- a/ipc/ipdl/ipdl/checker.py +++ b/ipc/ipdl/ipdl/checker.py @@ -22,12 +22,12 @@ def prettyMsgName(self, msg): return "%s::%s" % (self.currentProtocol, msg) def errorUnknownSyncMessage(self, loc, msg): - self.errors.append('%s: error: Unknown sync IPC message %s' % - (str(loc), msg)) + self.errors.append("%s: error: Unknown sync IPC message %s" % (str(loc), msg)) def errorAsyncMessageCanRemove(self, loc, msg): - self.errors.append('%s: error: IPC message %s is async, can be delisted' % - (str(loc), msg)) + self.errors.append( + "%s: error: IPC message %s is async, can be delisted" % (str(loc), msg) + ) def visitProtocol(self, p): self.errors = [] @@ -46,7 +46,9 @@ def visitMessageDecl(self, md): @staticmethod def getFixedSyncMessages(): - return set(SyncMessageChecker.syncMsgList) - set(SyncMessageChecker.seenSyncMessages) + return set(SyncMessageChecker.syncMsgList) - set( + SyncMessageChecker.seenSyncMessages + ) def checkSyncMessage(tu, syncMsgList, errout=sys.stderr): @@ -63,12 +65,17 @@ def checkFixedSyncMessages(config, errout=sys.stderr): fixed = SyncMessageChecker.getFixedSyncMessages() error_free = True for item in fixed: - protocol = item.split('::')[0] + protocol = item.split("::")[0] # Ignore things like sync messages in test protocols we didn't compile. # Also, ignore platform-specific IPC messages. - if protocol in SyncMessageChecker.seenProtocols and \ - 'platform' not in config.options(item): - print('Error: Sync IPC message %s not found, it appears to be fixed.\n' - 'Please remove it from sync-messages.ini.' % item, file=errout) + if ( + protocol in SyncMessageChecker.seenProtocols + and "platform" not in config.options(item) + ): + print( + "Error: Sync IPC message %s not found, it appears to be fixed.\n" + "Please remove it from sync-messages.ini." % item, + file=errout, + ) error_free = False return error_free diff --git a/ipc/ipdl/ipdl/cxx/ast.py b/ipc/ipdl/ipdl/cxx/ast.py index 7f63a51b0226a9..02f204f89236fd 100644 --- a/ipc/ipdl/ipdl/cxx/ast.py +++ b/ipc/ipdl/ipdl/cxx/ast.py @@ -10,8 +10,9 @@ class Visitor: def defaultVisit(self, node): - raise Exception("INTERNAL ERROR: no visitor for node type `%s'" % - (node.__class__.__name__)) + raise Exception( + "INTERNAL ERROR: no visitor for node type `%s'" % (node.__class__.__name__) + ) def visitWhitespace(self, ws): pass @@ -223,6 +224,7 @@ def visitStmtReturn(self, sr): if sr.expr is not None: sr.expr.accept(self) + # ------------------------------ @@ -231,9 +233,9 @@ def __init__(self): pass def accept(self, visitor): - visit = getattr(visitor, 'visit' + self.__class__.__name__, None) + visit = getattr(visitor, "visit" + self.__class__.__name__, None) if visit is None: - return getattr(visitor, 'defaultVisit')(self) + return getattr(visitor, "defaultVisit")(self) return visit(self) @@ -246,7 +248,7 @@ def __init__(self, ws, indent=False): self.indent = indent -Whitespace.NL = Whitespace('\n') +Whitespace.NL = Whitespace("\n") class VerbatimNode(Node): @@ -300,11 +302,12 @@ def addstmts(self, stmts): def addcode(self, tmpl, **context): from ipdl.cxx.code import StmtCode + self.addstmt(StmtCode(tmpl, **context)) class CppDirective(Node): - '''represents |#[directive] [rest]|, where |rest| is any string''' + """represents |#[directive] [rest]|, where |rest| is any string""" def __init__(self, directive, rest=None): Node.__init__(self) @@ -328,8 +331,10 @@ def addstmts(self, stmts): def addcode(self, tmpl, **context): from ipdl.cxx.code import StmtCode + self.addstmt(StmtCode(tmpl, **context)) + # ------------------------------ # type and decl thingies @@ -343,27 +348,33 @@ def __init__(self, name): class Type(Node): - def __init__(self, name, const=False, - ptr=False, ptrptr=False, ptrconstptr=False, - ref=False, rvalref=False, - hasimplicitcopyctor=True, - T=None, - inner=None): + def __init__( + self, + name, + const=False, + ptr=False, + ptrptr=False, + ptrconstptr=False, + ref=False, + rvalref=False, + hasimplicitcopyctor=True, + T=None, + inner=None, + ): """ -Represents the type |name::inner| with the ptr and const -modifiers as specified. + Represents the type |name::inner| with the ptr and const + modifiers as specified. -To avoid getting fancy with recursive types, we limit the kinds -of pointer types that can be be constructed. + To avoid getting fancy with recursive types, we limit the kinds + of pointer types that can be be constructed. - ptr => T* - ptrptr => T** - ptrconstptr => T* const* - ref => T& - rvalref => T&& + ptr => T* + ptrptr => T** + ptrconstptr => T* const* + ref => T& + rvalref => T&& -Any type, naked or pointer, can be const (const T) or ref (T&). -""" + Any type, naked or pointer, can be const (const T) or ref (T&).""" assert isinstance(name, str) assert isinstance(const, bool) assert isinstance(ptr, bool) @@ -388,42 +399,46 @@ def __init__(self, name, const=False, # need that for this codegen def __deepcopy__(self, memo): - return Type(self.name, - const=self.const, - ptr=self.ptr, - ptrptr=self.ptrptr, ptrconstptr=self.ptrconstptr, - ref=self.ref, rvalref=self.rvalref, - T=copy.deepcopy(self.T, memo), - inner=copy.deepcopy(self.inner, memo)) - - -Type.BOOL = Type('bool') -Type.INT = Type('int') -Type.INT32 = Type('int32_t') -Type.INTPTR = Type('intptr_t') -Type.NSRESULT = Type('nsresult') -Type.UINT32 = Type('uint32_t') -Type.UINT32PTR = Type('uint32_t', ptr=True) -Type.SIZE = Type('size_t') -Type.VOID = Type('void') -Type.VOIDPTR = Type('void', ptr=True) -Type.AUTO = Type('auto') -Type.AUTORVAL = Type('auto', rvalref=True) + return Type( + self.name, + const=self.const, + ptr=self.ptr, + ptrptr=self.ptrptr, + ptrconstptr=self.ptrconstptr, + ref=self.ref, + rvalref=self.rvalref, + T=copy.deepcopy(self.T, memo), + inner=copy.deepcopy(self.inner, memo), + ) + + +Type.BOOL = Type("bool") +Type.INT = Type("int") +Type.INT32 = Type("int32_t") +Type.INTPTR = Type("intptr_t") +Type.NSRESULT = Type("nsresult") +Type.UINT32 = Type("uint32_t") +Type.UINT32PTR = Type("uint32_t", ptr=True) +Type.SIZE = Type("size_t") +Type.VOID = Type("void") +Type.VOIDPTR = Type("void", ptr=True) +Type.AUTO = Type("auto") +Type.AUTORVAL = Type("auto", rvalref=True) class TypeArray(Node): def __init__(self, basetype, nmemb): - '''the type |basetype DECLNAME[nmemb]|. |nmemb| is an Expr''' + """the type |basetype DECLNAME[nmemb]|. |nmemb| is an Expr""" self.basetype = basetype self.nmemb = nmemb class TypeEnum(Node): def __init__(self, name=None): - '''name can be None''' + """name can be None""" Node.__init__(self) self.name = name - self.idnums = [] # pairs of ('Foo', [num]) or ('Foo', None) + self.idnums = [] # pairs of ('Foo', [num]) or ('Foo', None) def addId(self, id, num=None): self.idnums.append((id, num)) @@ -433,15 +448,15 @@ class TypeUnion(Node): def __init__(self, name=None): Node.__init__(self) self.name = name - self.components = [] # [ Decl ] + self.components = [] # [ Decl ] def addComponent(self, type, name): self.components.append(Decl(type, name)) class TypeFunction(Node): - def __init__(self, params=[], ret=Type('void')): - '''Anonymous function type std::function<>''' + def __init__(self, params=[], ret=Type("void")): + """Anonymous function type std::function<>""" self.params = params self.ret = ret @@ -460,8 +475,7 @@ def __lt__(self, other): return self.totypename < other.totypename def __eq__(self, other): - return (self.__class__ == other.__class__ - and self.totypename == other.totypename) + return self.__class__ == other.__class__ and self.totypename == other.totypename def __hash__(self): return hash_str(self.totypename) @@ -484,7 +498,7 @@ def __init__(self, pqname, cls=False, struct=False): class Decl(Node): - '''represents |Foo bar|, e.g. in a function signature''' + """represents |Foo bar|, e.g. in a function signature""" def __init__(self, type, name): assert type is not None @@ -505,17 +519,26 @@ def __init__(self, type, name, default=None): self.default = default def __deepcopy__(self, memo): - return Param(copy.deepcopy(self.type, memo), self.name, - copy.deepcopy(self.default, memo)) + return Param( + copy.deepcopy(self.type, memo), self.name, copy.deepcopy(self.default, memo) + ) + # ------------------------------ # class stuff class Class(Block): - def __init__(self, name, inherits=[], - interface=False, abstract=False, final=False, - specializes=None, struct=False): + def __init__( + self, + name, + inherits=[], + interface=False, + abstract=False, + final=False, + specializes=None, + struct=False, + ): assert not (interface and abstract) assert not (abstract and final) assert not (interface and final) @@ -523,16 +546,16 @@ def __init__(self, name, inherits=[], Block.__init__(self) self.name = name - self.inherits = inherits # [ Type ] - self.interface = interface # bool - self.abstract = abstract # bool - self.final = final # bool + self.inherits = inherits # [ Type ] + self.interface = interface # bool + self.abstract = abstract # bool + self.final = final # bool self.specializes = specializes # Type or None - self.struct = struct # bool + self.struct = struct # bool class Inherit(Node): - def __init__(self, type, viz='public'): + def __init__(self, type, viz="public"): assert isinstance(viz, str) Node.__init__(self) self.type = type @@ -544,6 +567,7 @@ def __init__(self, friend): Node.__init__(self) self.friend = friend + # Python2 polyfill for Python3's Enum() functional API. @@ -555,13 +579,23 @@ def make_enum(name, members_str): return type(name, (), members_dict) -MethodSpec = make_enum('MethodSpec', 'NONE VIRTUAL PURE OVERRIDE STATIC') +MethodSpec = make_enum("MethodSpec", "NONE VIRTUAL PURE OVERRIDE STATIC") class MethodDecl(Node): - def __init__(self, name, params=[], ret=Type('void'), - methodspec=MethodSpec.NONE, const=False, warn_unused=False, - force_inline=False, typeop=None, T=None, cls=None): + def __init__( + self, + name, + params=[], + ret=Type("void"), + methodspec=MethodSpec.NONE, + const=False, + warn_unused=False, + force_inline=False, + typeop=None, + T=None, + cls=None, + ): assert not (name and typeop) assert name is None or isinstance(name, str) assert not isinstance(ret, list) @@ -578,15 +612,15 @@ def __init__(self, name, params=[], ret=Type('void'), Node.__init__(self) self.name = name - self.params = params # [ Param ] - self.ret = ret # Type or None - self.methodspec = methodspec # enum - self.const = const # bool + self.params = params # [ Param ] + self.ret = ret # Type or None + self.methodspec = methodspec # enum + self.const = const # bool self.warn_unused = warn_unused # bool - self.force_inline = (force_inline or bool(T)) # bool - self.typeop = typeop # Type or None - self.T = T # Type or None - self.cls = cls # Class or None + self.force_inline = force_inline or bool(T) # bool + self.typeop = typeop # Type or None + self.T = T # Type or None + self.cls = cls # Class or None self.only_for_definition = False def __deepcopy__(self, memo): @@ -599,7 +633,8 @@ def __deepcopy__(self, memo): warn_unused=self.warn_unused, force_inline=self.force_inline, typeop=copy.deepcopy(self.typeop, memo), - T=copy.deepcopy(self.T, memo)) + T=copy.deepcopy(self.T, memo), + ) class MethodDefn(Block): @@ -609,13 +644,27 @@ def __init__(self, decl): class FunctionDecl(MethodDecl): - def __init__(self, name, params=[], ret=Type('void'), - methodspec=MethodSpec.NONE, warn_unused=False, - force_inline=False, T=None): + def __init__( + self, + name, + params=[], + ret=Type("void"), + methodspec=MethodSpec.NONE, + warn_unused=False, + force_inline=False, + T=None, + ): assert methodspec == MethodSpec.NONE or methodspec == MethodSpec.STATIC - MethodDecl.__init__(self, name, params=params, ret=ret, - methodspec=methodspec, warn_unused=warn_unused, - force_inline=force_inline, T=T) + MethodDecl.__init__( + self, + name, + params=params, + ret=ret, + methodspec=methodspec, + warn_unused=warn_unused, + force_inline=force_inline, + T=T, + ) class FunctionDefn(MethodDefn): @@ -625,14 +674,15 @@ def __init__(self, decl): class ConstructorDecl(MethodDecl): def __init__(self, name, params=[], explicit=False, force_inline=False): - MethodDecl.__init__(self, name, params=params, ret=None, - force_inline=force_inline) + MethodDecl.__init__( + self, name, params=params, ret=None, force_inline=force_inline + ) self.explicit = explicit def __deepcopy__(self, memo): - return ConstructorDecl(self.name, - copy.deepcopy(self.params, memo), - self.explicit) + return ConstructorDecl( + self.name, copy.deepcopy(self.params, memo), self.explicit + ) class ConstructorDefn(MethodDefn): @@ -645,17 +695,25 @@ class DestructorDecl(MethodDecl): def __init__(self, name, methodspec=MethodSpec.NONE, force_inline=False): # C++ allows pure or override destructors, but ipdl cgen does not. assert methodspec == MethodSpec.NONE or methodspec == MethodSpec.VIRTUAL - MethodDecl.__init__(self, name, params=[], ret=None, - methodspec=methodspec, force_inline=force_inline) + MethodDecl.__init__( + self, + name, + params=[], + ret=None, + methodspec=methodspec, + force_inline=force_inline, + ) def __deepcopy__(self, memo): - return DestructorDecl(self.name, - methodspec=self.methodspec, - force_inline=self.force_inline) + return DestructorDecl( + self.name, methodspec=self.methodspec, force_inline=self.force_inline + ) class DestructorDefn(MethodDefn): - def __init__(self, decl): MethodDefn.__init__(self, decl) + def __init__(self, decl): + MethodDefn.__init__(self, decl) + # ------------------------------ # expressions @@ -669,31 +727,33 @@ def __init__(self, name): self.name = name -ExprVar.THIS = ExprVar('this') +ExprVar.THIS = ExprVar("this") class ExprLiteral(Node): def __init__(self, value, type): - '''|type| is a Python format specifier; 'd' for example''' + """|type| is a Python format specifier; 'd' for example""" Node.__init__(self) self.value = value self.type = type @staticmethod - def Int(i): return ExprLiteral(i, 'd') + def Int(i): + return ExprLiteral(i, "d") @staticmethod - def String(s): return ExprLiteral('"' + s + '"', 's') + def String(s): + return ExprLiteral('"' + s + '"', "s") def __str__(self): - return ('%' + self.type) % (self.value) + return ("%" + self.type) % (self.value) ExprLiteral.ZERO = ExprLiteral.Int(0) ExprLiteral.ONE = ExprLiteral.Int(1) -ExprLiteral.NULL = ExprVar('nullptr') -ExprLiteral.TRUE = ExprVar('true') -ExprLiteral.FALSE = ExprVar('false') +ExprLiteral.NULL = ExprVar("nullptr") +ExprLiteral.TRUE = ExprVar("true") +ExprLiteral.FALSE = ExprVar("false") class ExprPrefixUnop(Node): @@ -705,22 +765,21 @@ def __init__(self, expr, op): class ExprNot(ExprPrefixUnop): def __init__(self, expr): - ExprPrefixUnop.__init__(self, expr, '!') + ExprPrefixUnop.__init__(self, expr, "!") class ExprAddrOf(ExprPrefixUnop): def __init__(self, expr): - ExprPrefixUnop.__init__(self, expr, '&') + ExprPrefixUnop.__init__(self, expr, "&") class ExprDeref(ExprPrefixUnop): def __init__(self, expr): - ExprPrefixUnop.__init__(self, expr, '*') + ExprPrefixUnop.__init__(self, expr, "*") class ExprCast(Node): - def __init__(self, expr, type, - static=False, const=False): + def __init__(self, expr, type, static=False, const=False): # Exactly one of these should be set assert static ^ const @@ -763,7 +822,7 @@ def __init__(self, obj, op, field): class ExprAssn(Node): - def __init__(self, lhs, rhs, op='='): + def __init__(self, lhs, rhs, op="="): Node.__init__(self) self.lhs = lhs self.op = op @@ -772,7 +831,7 @@ def __init__(self, lhs, rhs, op='='): class ExprCall(Node): def __init__(self, func, args=[]): - assert hasattr(func, 'accept') + assert hasattr(func, "accept") assert isinstance(args, list) for arg in args: assert arg and not isinstance(arg, str) @@ -818,6 +877,7 @@ def __init__(self, captures=[], params=[], ret=None): self.params = params self.ret = ret + # ------------------------------ # statements etc. @@ -847,9 +907,9 @@ def __init__(self, name): self.name = name -Label.PUBLIC = Label('public') -Label.PROTECTED = Label('protected') -Label.PRIVATE = Label('private') +Label.PUBLIC = Label("public") +Label.PROTECTED = Label("protected") +Label.PRIVATE = Label("private") class CaseLabel(Node): @@ -912,18 +972,24 @@ def __init__(self, expr): self.nr_cases = 0 def addcase(self, case, block): - '''NOTE: |case| is not checked for uniqueness''' + """NOTE: |case| is not checked for uniqueness""" assert not isinstance(case, str) - assert (isinstance(block, StmtBreak) - or isinstance(block, StmtReturn) - or isinstance(block, StmtSwitch) - or isinstance(block, GroupNode) - or isinstance(block, VerbatimNode) - or (hasattr(block, 'stmts') - and (isinstance(block.stmts[-1], StmtBreak) - or isinstance(block.stmts[-1], StmtReturn) - or isinstance(block.stmts[-1], GroupNode) - or isinstance(block.stmts[-1], VerbatimNode)))) + assert ( + isinstance(block, StmtBreak) + or isinstance(block, StmtReturn) + or isinstance(block, StmtSwitch) + or isinstance(block, GroupNode) + or isinstance(block, VerbatimNode) + or ( + hasattr(block, "stmts") + and ( + isinstance(block.stmts[-1], StmtBreak) + or isinstance(block.stmts[-1], StmtReturn) + or isinstance(block.stmts[-1], GroupNode) + or isinstance(block.stmts[-1], VerbatimNode) + ) + ) + ) self.addstmt(case) self.addstmt(block) self.nr_cases += 1 diff --git a/ipc/ipdl/ipdl/cxx/cgen.py b/ipc/ipdl/ipdl/cxx/cgen.py index 311c4d1b26272d..2fa74e8d353cc1 100644 --- a/ipc/ipdl/ipdl/cxx/cgen.py +++ b/ipc/ipdl/ipdl/cxx/cgen.py @@ -17,12 +17,12 @@ def cgen(self, cxxfile): def visitWhitespace(self, ws): if ws.indent: - self.printdent('') + self.printdent("") self.write(ws.ws) def visitVerbatimNode(self, verb): if verb.indent: - self.printdent('') + self.printdent("") self.write(verb.text) def visitGroupNode(self, group): @@ -34,115 +34,115 @@ def visitGroupNode(self, group): def visitCppDirective(self, cd): if cd.rest: - self.println('#%s %s' % (cd.directive, cd.rest)) + self.println("#%s %s" % (cd.directive, cd.rest)) else: - self.println('#%s' % (cd.directive)) + self.println("#%s" % (cd.directive)) def visitNamespace(self, ns): - self.println('namespace ' + ns.name + ' {') + self.println("namespace " + ns.name + " {") self.visitBlock(ns) - self.println('} // namespace ' + ns.name) + self.println("} // namespace " + ns.name) def visitType(self, t): if t.const: - self.write('const ') + self.write("const ") self.write(t.name) if t.T is not None: - self.write('<') + self.write("<") if type(t.T) is list: t.T[0].accept(self) for tt in t.T[1:]: - self.write(', ') + self.write(", ") tt.accept(self) else: t.T.accept(self) - self.write('>') + self.write(">") if t.inner is not None: - self.write('::') + self.write("::") t.inner.accept(self) - ts = '' + ts = "" if t.ptr: - ts += '*' + ts += "*" elif t.ptrptr: - ts += '**' + ts += "**" elif t.ptrconstptr: - ts += '* const*' + ts += "* const*" if t.ref: - ts += '&' + ts += "&" elif t.rvalref: - ts += '&&' + ts += "&&" self.write(ts) def visitTypeEnum(self, te): - self.write('enum') + self.write("enum") if te.name: - self.write(' ' + te.name) - self.println(' {') + self.write(" " + te.name) + self.println(" {") self.indent() nids = len(te.idnums) for i, (id, num) in enumerate(te.idnums): self.printdent(id) if num: - self.write(' = ' + str(num)) - if i != (nids-1): - self.write(',') + self.write(" = " + str(num)) + if i != (nids - 1): + self.write(",") self.println() self.dedent() - self.printdent('}') + self.printdent("}") def visitTypeUnion(self, u): - self.write('union') + self.write("union") if u.name: - self.write(' ' + u.name) - self.println(' {') + self.write(" " + u.name) + self.println(" {") self.indent() for decl in u.components: self.printdent() decl.accept(self) - self.println(';') + self.println(";") self.dedent() - self.printdent('}') + self.printdent("}") def visitTypeFunction(self, fn): - self.write('std::function<') + self.write("std::function<") fn.ret.accept(self) - self.write('(') + self.write("(") self.writeDeclList(fn.params) - self.write(')>') + self.write(")>") def visitTypedef(self, td): if td.templateargs: - formals = ', '.join(['class ' + T for T in td.templateargs]) - args = ', '.join(td.templateargs) - self.printdent('template<' + formals + '> using ' + td.totypename + ' = ') + formals = ", ".join(["class " + T for T in td.templateargs]) + args = ", ".join(td.templateargs) + self.printdent("template<" + formals + "> using " + td.totypename + " = ") td.fromtype.accept(self) - self.println('<' + args + '>;') + self.println("<" + args + ">;") else: - self.printdent('typedef ') + self.printdent("typedef ") td.fromtype.accept(self) - self.println(' ' + td.totypename + ';') + self.println(" " + td.totypename + ";") def visitUsing(self, us): - self.printdent('using ') + self.printdent("using ") us.type.accept(self) - self.println(';') + self.println(";") def visitForwardDecl(self, fd): if fd.cls: - self.printdent('class ') + self.printdent("class ") elif fd.struct: - self.printdent('struct ') + self.printdent("struct ") self.write(str(fd.pqname)) - self.println(';') + self.println(";") def visitDecl(self, d): # C-syntax arrays make code generation much more annoying @@ -152,83 +152,82 @@ def visitDecl(self, d): d.type.accept(self) if d.name: - self.write(' ' + d.name) + self.write(" " + d.name) if isinstance(d.type, TypeArray): - self.write('[') + self.write("[") d.type.nmemb.accept(self) - self.write(']') + self.write("]") def visitParam(self, p): self.visitDecl(p) if p.default is not None: - self.write(' = ') + self.write(" = ") p.default.accept(self) def visitClass(self, c): if c.specializes is not None: - self.printdentln('template<>') + self.printdentln("template<>") if c.struct: - self.printdent('struct') + self.printdent("struct") else: - self.printdent('class') - self.write(' ' + c.name) + self.printdent("class") + self.write(" " + c.name) if c.final: - self.write(' final') + self.write(" final") if c.specializes is not None: - self.write(' <') + self.write(" <") c.specializes.accept(self) - self.write('>') + self.write(">") ninh = len(c.inherits) if 0 < ninh: - self.println(' :') + self.println(" :") self.indent() for i, inherit in enumerate(c.inherits): self.printdent() inherit.accept(self) if i != (ninh - 1): - self.println(',') + self.println(",") self.dedent() self.println() - self.printdentln('{') + self.printdentln("{") self.indent() self.visitBlock(c) self.dedent() - self.printdentln('};') + self.printdentln("};") def visitInherit(self, inh): - self.write(inh.viz + ' ') + self.write(inh.viz + " ") inh.type.accept(self) def visitFriendClassDecl(self, fcd): - self.printdentln('friend class ' + fcd.friend + ';') + self.printdentln("friend class " + fcd.friend + ";") def visitMethodDecl(self, md): if md.T: - self.write('template<') - self.write('typename ') + self.write("template<") + self.write("typename ") md.T.accept(self) - self.println('>') + self.println(">") self.printdent() if md.warn_unused: - self.write('[[nodiscard]] ') + self.write("[[nodiscard]] ") if md.methodspec == MethodSpec.STATIC: - self.write('static ') - elif md.methodspec == MethodSpec.VIRTUAL or \ - md.methodspec == MethodSpec.PURE: - self.write('virtual ') + self.write("static ") + elif md.methodspec == MethodSpec.VIRTUAL or md.methodspec == MethodSpec.PURE: + self.write("virtual ") if md.ret: if md.only_for_definition: - self.write('auto ') + self.write("auto ") else: md.ret.accept(self) self.println() @@ -239,33 +238,33 @@ def visitMethodDecl(self, md): self.write(md.cls.name) if md.cls.specializes is not None: - self.write('<') + self.write("<") md.cls.specializes.accept(self) - self.write('>') - self.write('::') + self.write(">") + self.write("::") if md.typeop is not None: - self.write('operator ') + self.write("operator ") md.typeop.accept(self) else: if isinstance(md, DestructorDecl): - self.write('~') + self.write("~") self.write(md.name) - self.write('(') + self.write("(") self.writeDeclList(md.params) - self.write(')') + self.write(")") if md.const: - self.write(' const') + self.write(" const") if md.ret and md.only_for_definition: - self.write(' -> ') + self.write(" -> ") md.ret.accept(self) if md.methodspec == MethodSpec.OVERRIDE: - self.write(' override') + self.write(" override") elif md.methodspec == MethodSpec.PURE: - self.write(' = 0') + self.write(" = 0") def visitMethodDefn(self, md): # Method specifiers are for decls, not defns. @@ -275,41 +274,41 @@ def visitMethodDefn(self, md): md.decl.accept(self) self.println() - self.printdentln('{') + self.printdentln("{") self.indent() self.visitBlock(md) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitConstructorDecl(self, cd): if cd.explicit: - self.write('explicit ') + self.write("explicit ") else: - self.write('MOZ_IMPLICIT ') + self.write("MOZ_IMPLICIT ") self.visitMethodDecl(cd) def visitConstructorDefn(self, cd): self.printdent() cd.decl.accept(self) if len(cd.memberinits): - self.println(' :') + self.println(" :") self.indent() ninits = len(cd.memberinits) for i, init in enumerate(cd.memberinits): self.printdent() init.accept(self) - if i != (ninits-1): - self.println(',') + if i != (ninits - 1): + self.println(",") self.dedent() self.println() - self.printdentln('{') + self.printdentln("{") self.indent() self.visitBlock(cd) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitDestructorDecl(self, dd): self.visitMethodDecl(dd) @@ -319,13 +318,13 @@ def visitDestructorDefn(self, dd): dd.decl.accept(self) self.println() - self.printdentln('{') + self.printdentln("{") self.indent() self.visitBlock(dd) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitExprLiteral(self, el): self.write(str(el)) @@ -334,199 +333,199 @@ def visitExprVar(self, ev): self.write(ev.name) def visitExprPrefixUnop(self, e): - self.write('(') + self.write("(") self.write(e.op) - self.write('(') + self.write("(") e.expr.accept(self) - self.write(')') - self.write(')') + self.write(")") + self.write(")") def visitExprCast(self, c): if c.static: - pfx, sfx = 'static_cast<', '>' + pfx, sfx = "static_cast<", ">" else: assert c.const - pfx, sfx = 'const_cast<', '>' + pfx, sfx = "const_cast<", ">" self.write(pfx) c.type.accept(self) - self.write(sfx + '(') + self.write(sfx + "(") c.expr.accept(self) - self.write(')') + self.write(")") def visitExprBinary(self, e): - self.write('(') + self.write("(") e.left.accept(self) - self.write(') ' + e.op + ' (') + self.write(") " + e.op + " (") e.right.accept(self) - self.write(')') + self.write(")") def visitExprConditional(self, c): - self.write('(') + self.write("(") c.cond.accept(self) - self.write(' ? ') + self.write(" ? ") c.ife.accept(self) - self.write(' : ') + self.write(" : ") c.elsee.accept(self) - self.write(')') + self.write(")") def visitExprSelect(self, es): - self.write('(') + self.write("(") es.obj.accept(self) - self.write(')') + self.write(")") self.write(es.op) es.field.accept(self) def visitExprAssn(self, ea): ea.lhs.accept(self) - self.write(' ' + ea.op + ' ') + self.write(" " + ea.op + " ") ea.rhs.accept(self) def visitExprCall(self, ec): ec.func.accept(self) - self.write('(') + self.write("(") self.writeExprList(ec.args) - self.write(')') + self.write(")") def visitExprMove(self, em): self.visitExprCall(em) def visitExprNew(self, en): - self.write('new ') + self.write("new ") if en.newargs is not None: - self.write('(') + self.write("(") self.writeExprList(en.newargs) - self.write(') ') + self.write(") ") en.ctype.accept(self) if en.args is not None: - self.write('(') + self.write("(") self.writeExprList(en.args) - self.write(')') + self.write(")") def visitExprDelete(self, ed): - self.write('delete ') + self.write("delete ") ed.obj.accept(self) def visitExprLambda(self, l): - self.write('[') + self.write("[") ncaptures = len(l.captures) for i, c in enumerate(l.captures): c.accept(self) - if i != (ncaptures-1): - self.write(', ') - self.write('](') + if i != (ncaptures - 1): + self.write(", ") + self.write("](") self.writeDeclList(l.params) - self.write(')') + self.write(")") if l.ret: - self.write(' -> ') + self.write(" -> ") l.ret.accept(self) - self.println(' {') + self.println(" {") self.indent() self.visitBlock(l) self.dedent() - self.printdent('}') + self.printdent("}") def visitStmtBlock(self, b): - self.printdentln('{') + self.printdentln("{") self.indent() self.visitBlock(b) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitLabel(self, label): - self.dedent() # better not be at global scope ... - self.printdentln(label.name + ':') + self.dedent() # better not be at global scope ... + self.printdentln(label.name + ":") self.indent() def visitCaseLabel(self, cl): self.dedent() - self.printdentln('case ' + cl.name + ':') + self.printdentln("case " + cl.name + ":") self.indent() def visitDefaultLabel(self, dl): self.dedent() - self.printdentln('default:') + self.printdentln("default:") self.indent() def visitStmtIf(self, si): - self.printdent('if (') + self.printdent("if (") si.cond.accept(self) - self.println(') {') + self.println(") {") self.indent() si.ifb.accept(self) self.dedent() - self.printdentln('}') + self.printdentln("}") if si.elseb is not None: - self.printdentln('else {') + self.printdentln("else {") self.indent() si.elseb.accept(self) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitStmtFor(self, sf): - self.printdent('for (') + self.printdent("for (") if sf.init is not None: sf.init.accept(self) - self.write('; ') + self.write("; ") if sf.cond is not None: sf.cond.accept(self) - self.write('; ') + self.write("; ") if sf.update is not None: sf.update.accept(self) - self.println(') {') + self.println(") {") self.indent() self.visitBlock(sf) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitStmtRangedFor(self, rf): - self.printdent('for (auto& ') + self.printdent("for (auto& ") rf.var.accept(self) - self.write(' : ') + self.write(" : ") rf.iteree.accept(self) - self.println(') {') + self.println(") {") self.indent() self.visitBlock(rf) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitStmtSwitch(self, sw): - self.printdent('switch (') + self.printdent("switch (") sw.expr.accept(self) - self.println(') {') + self.println(") {") self.indent() self.visitBlock(sw) self.dedent() - self.printdentln('}') + self.printdentln("}") def visitStmtBreak(self, sb): - self.printdentln('break;') + self.printdentln("break;") def visitStmtDecl(self, sd): self.printdent() sd.decl.accept(self) if sd.initargs is not None: - self.write('{') + self.write("{") self.writeDeclList(sd.initargs) - self.write('}') + self.write("}") if sd.init is not None: - self.write(' = ') + self.write(" = ") sd.init.accept(self) - self.println(';') + self.println(";") def visitStmtExpr(self, se): self.printdent() se.expr.accept(self) - self.println(';') + self.println(";") def visitStmtReturn(self, sr): - self.printdent('return') + self.printdent("return") if sr.expr: - self.write(' ') + self.write(" ") sr.expr.accept(self) - self.println(';') + self.println(";") def writeDeclList(self, decls): # FIXME/cjones: try to do nice formatting of these guys @@ -544,8 +543,8 @@ def writeDeclList(self, decls): self.println() self.printdent() decl.accept(self) - if i != (ndecls-1): - self.write(',') + if i != (ndecls - 1): + self.write(",") self.dedent() self.dedent() @@ -555,5 +554,5 @@ def writeExprList(self, exprs): nexprs = len(exprs) for i, expr in enumerate(exprs): expr.accept(self) - if i != (nexprs-1): - self.write(', ') + if i != (nexprs - 1): + self.write(", ") diff --git a/ipc/ipdl/ipdl/cxx/code.py b/ipc/ipdl/ipdl/cxx/code.py index c8ecf6480da57f..0b5019b6238f9a 100644 --- a/ipc/ipdl/ipdl/cxx/code.py +++ b/ipc/ipdl/ipdl/cxx/code.py @@ -20,6 +20,7 @@ # ----------------------------------------------------------------------------- # Public API. + def StmtCode(tmpl, **kwargs): """Perform template substitution to build opaque C++ AST nodes. See the module documentation for more information on the templating syntax. @@ -57,10 +58,11 @@ def ExprVerbatim(text): # ----------------------------------------------------------------------------- # Implementation + def _code(tmpl, inline, context): # Remove common indentation, and strip the preceding newline from # '''-quoting, because we usually don't want it. - if tmpl.startswith('\n'): + if tmpl.startswith("\n"): tmpl = tmpl[1:] tmpl = textwrap.dedent(tmpl) @@ -85,53 +87,54 @@ def _verbatim(text, inline): # For simplicitly, _verbatim is implemented using the same logic as _code, # but with '$' characters escaped. This ensures we only need to worry about # a single, albeit complex, codepath. - return _code(text.replace('$', '$$'), inline, {}) + return _code(text.replace("$", "$$"), inline, {}) # Pattern used to identify substitutions. _substPat = re.compile( - r''' + r""" \$(?: (?P\$) | # '$$' is an escaped '$' (?P[*,])?{(?P[^}]+)} | # ${expr}, $*{expr}, or $,{expr} (?P) # For error reporting ) - ''', - re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, +) def _line(raw, skip_indent, lineno, context): - assert '\n' not in raw + assert "\n" not in raw # Determine the level of indentation used for this line line = raw.lstrip() offset = int(math.ceil((len(raw) - len(line)) / 4)) # If line starts with a directive, don't indent it. - if line.startswith('#'): + if line.startswith("#"): skip_indent = True column = 0 children = [] for match in _substPat.finditer(line): - if match.group('invalid') is not None: + if match.group("invalid") is not None: raise ValueError("Invalid substitution on line %d" % lineno) # Any text from before the current entry should be written, and column # advanced. if match.start() > column: - before = line[column:match.start()] + before = line[column : match.start()] children.append(VerbatimNode(before)) column = match.end() # If we have an escaped group, emit a '$' node. - if match.group('escaped') is not None: - children.append(VerbatimNode('$')) + if match.group("escaped") is not None: + children.append(VerbatimNode("$")) continue # At this point we should have an expression. - list_chr = match.group('list') - expr = match.group('expr') + list_chr = match.group("list") + expr = match.group("expr") assert expr is not None # Evaluate our expression in the context to get the values. @@ -147,12 +150,12 @@ def _line(raw, skip_indent, lineno, context): values = [values] # Check if this substitution is inline, or the entire line. - inline = (match.span() != (0, len(line))) + inline = match.span() != (0, len(line)) for idx, value in enumerate(values): # If we're using ',' as list mode, put a comma between each node. - if idx > 0 and list_chr == ',': - children.append(VerbatimNode(', ')) + if idx > 0 and list_chr == ",": + children.append(VerbatimNode(", ")) # If our value isn't a node, turn it into one. Verbatim should be # inline unless indent isn't being skipped, and the match isn't @@ -174,11 +177,11 @@ def _line(raw, skip_indent, lineno, context): # If we have no children, just emit the empty string. This will become a # blank line. if len(children) == 0: - return VerbatimNode('') + return VerbatimNode("") # Add the initial indent if we aren't skipping it. if not skip_indent: - children.insert(0, VerbatimNode('', indent=True)) + children.insert(0, VerbatimNode("", indent=True)) # Wrap ourselves into a group node with the correct indent offset return GroupNode(children, offset=offset) diff --git a/ipc/ipdl/ipdl/direct_call.py b/ipc/ipdl/ipdl/direct_call.py index 94279c78137181..42d89df1b093fe 100644 --- a/ipc/ipdl/ipdl/direct_call.py +++ b/ipc/ipdl/ipdl/direct_call.py @@ -10,29 +10,27 @@ # {(Protocol, side): (Class, HeaderFile)} DIRECT_CALL_OVERRIDES = { ("PAPZ", "parent"): ( - "RemoteContentController", "mozilla/layers/RemoteContentController.h" + "RemoteContentController", + "mozilla/layers/RemoteContentController.h", ), - ("PBackgroundMutableFile", "parent"): ( - "BackgroundMutableFileParentBase", "mozilla/dom/filehandle/ActorsParent.h" + "BackgroundMutableFileParentBase", + "mozilla/dom/filehandle/ActorsParent.h", ), - ("PCanvas", "parent"): ("CanvasTranslator", "mozilla/layers/CanvasTranslator.h"), - ("PChromiumCDM", "parent"): ("ChromiumCDMParent", "ChromiumCDMParent.h"), - ("PCompositorBridge", "parent"): ( - "CompositorBridgeParentBase", "mozilla/layers/CompositorBridgeParent.h" + "CompositorBridgeParentBase", + "mozilla/layers/CompositorBridgeParent.h", ), - ("PContentPermissionRequest", "child"): ( - "RemotePermissionRequest", "nsContentPermissionHelper.h" + "RemotePermissionRequest", + "nsContentPermissionHelper.h", ), - ("PFileSystemRequest", "child"): ( - "FileSystemTaskChildBase", "mozilla/dom/FileSystemTaskBase.h" + "FileSystemTaskChildBase", + "mozilla/dom/FileSystemTaskBase.h", ), - ("PGMP", "child"): ("GMPChild", "GMPChild.h"), ("PGMP", "parent"): ("GMPParent", "GMPParent.h"), ("PGMPContent", "child"): ("GMPContentChild", "GMPContentChild.h"), @@ -41,64 +39,79 @@ ("PGMPTimer", "parent"): ("GMPTimerParent", "GMPTimerParent.h"), ("PGMPVideoEncoder", "child"): ("GMPVideoEncoderChild", "GMPVideoEncoderChild.h"), ("PGMPVideoDecoder", "child"): ("GMPVideoDecoderChild", "GMPVideoDecoderChild.h"), - - ("PLoginReputation", "parent"): ("LoginReputationParent", "mozilla/LoginReputationIPC.h"), - + ("PLoginReputation", "parent"): ( + "LoginReputationParent", + "mozilla/LoginReputationIPC.h", + ), ("PMedia", "child"): ("Child", "mozilla/media/MediaChild.h"), - ("PPresentationRequest", "child"): ( - "PresentationRequestChild", "mozilla/dom/PresentationChild.h" + "PresentationRequestChild", + "mozilla/dom/PresentationChild.h", ), ("PPresentationRequest", "parent"): ( - "PresentationRequestParent", "mozilla/dom/PresentationParent.h" + "PresentationRequestParent", + "mozilla/dom/PresentationParent.h", ), - ("PRemoteSpellcheckEngine", "child"): ( - "RemoteSpellcheckEngineChild", "mozilla/RemoteSpellCheckEngineChild.h" + "RemoteSpellcheckEngineChild", + "mozilla/RemoteSpellCheckEngineChild.h", ), ("PRemoteSpellcheckEngine", "parent"): ( - "RemoteSpellcheckEngineParent", "mozilla/RemoteSpellCheckEngineParent.h" + "RemoteSpellcheckEngineParent", + "mozilla/RemoteSpellCheckEngineParent.h", + ), + ("PScriptCache", "child"): ( + "ScriptCacheChild", + "mozilla/loader/ScriptCacheActors.h", + ), + ("PScriptCache", "parent"): ( + "ScriptCacheParent", + "mozilla/loader/ScriptCacheActors.h", ), - - ("PScriptCache", "child"): ("ScriptCacheChild", "mozilla/loader/ScriptCacheActors.h"), - ("PScriptCache", "parent"): ("ScriptCacheParent", "mozilla/loader/ScriptCacheActors.h"), - ("PTCPServerSocket", "child"): ( - "mozilla::dom::TCPServerSocketChild", "mozilla/dom/network/TCPServerSocketChild.h" + "mozilla::dom::TCPServerSocketChild", + "mozilla/dom/network/TCPServerSocketChild.h", ), ("PTCPServerSocket", "parent"): ( - "mozilla::dom::TCPServerSocketParent", "mozilla/dom/network/TCPServerSocketParent.h" + "mozilla::dom::TCPServerSocketParent", + "mozilla/dom/network/TCPServerSocketParent.h", ), ("PTCPSocket", "child"): ( - "mozilla::dom::TCPSocketChild", "mozilla/dom/network/TCPSocketChild.h" + "mozilla::dom::TCPSocketChild", + "mozilla/dom/network/TCPSocketChild.h", ), ("PTCPSocket", "parent"): ( - "mozilla::dom::TCPSocketParent", "mozilla/dom/network/TCPSocketParent.h" + "mozilla::dom::TCPSocketParent", + "mozilla/dom/network/TCPSocketParent.h", + ), + ("PTestShellCommand", "parent"): ( + "TestShellCommandParent", + "mozilla/ipc/TestShellParent.h", ), - - ("PTestShellCommand", "parent"): ("TestShellCommandParent", "mozilla/ipc/TestShellParent.h"), - ("PTransportProvider", "child"): ( - "TransportProviderChild", "mozilla/net/IPCTransportProvider.h" + "TransportProviderChild", + "mozilla/net/IPCTransportProvider.h", ), ("PTransportProvider", "parent"): ( - "TransportProviderParent", "mozilla/net/IPCTransportProvider.h" + "TransportProviderParent", + "mozilla/net/IPCTransportProvider.h", ), - ("PUDPSocket", "child"): ( - "mozilla::dom::UDPSocketChild", "mozilla/dom/network/UDPSocketChild.h" + "mozilla::dom::UDPSocketChild", + "mozilla/dom/network/UDPSocketChild.h", ), ("PUDPSocket", "parent"): ( - "mozilla::dom::UDPSocketParent", "mozilla/dom/network/UDPSocketParent.h" + "mozilla::dom::UDPSocketParent", + "mozilla/dom/network/UDPSocketParent.h", ), - ("PURLClassifierLocal", "child"): ( - "URLClassifierLocalChild", "mozilla/dom/URLClassifierChild.h" + "URLClassifierLocalChild", + "mozilla/dom/URLClassifierChild.h", ), ("PURLClassifierLocal", "parent"): ( - "URLClassifierLocalParent", "mozilla/dom/URLClassifierParent.h" + "URLClassifierLocalParent", + "mozilla/dom/URLClassifierParent.h", ), - ("PVR", "child"): ("VRChild", "VRChild.h"), ("PVR", "parent"): ("VRParent", "VRParent.h"), ("PVRGPU", "child"): ("VRGPUChild", "VRGPUChild.h"), @@ -106,329 +119,469 @@ ("PVRLayer", "child"): ("VRLayerChild", "VRLayerChild.h"), ("PVRManager", "child"): ("VRManagerChild", "VRManagerChild.h"), ("PVRManager", "parent"): ("VRManagerParent", "VRManagerParent.h"), - - ("PWebSocket", "child"): ("WebSocketChannelChild", "mozilla/net/WebSocketChannelChild.h"), - ("PWebSocket", "parent"): ("WebSocketChannelParent", "mozilla/net/WebSocketChannelParent.h"), - + ("PWebSocket", "child"): ( + "WebSocketChannelChild", + "mozilla/net/WebSocketChannelChild.h", + ), + ("PWebSocket", "parent"): ( + "WebSocketChannelParent", + "mozilla/net/WebSocketChannelParent.h", + ), # _ipdltest ("PTestActorPunning", "child"): ( - "TestActorPunningChild", "mozilla/_ipdltest/TestActorPunning.h" + "TestActorPunningChild", + "mozilla/_ipdltest/TestActorPunning.h", ), ("PTestActorPunning", "parent"): ( - "TestActorPunningParent", "mozilla/_ipdltest/TestActorPunning.h" + "TestActorPunningParent", + "mozilla/_ipdltest/TestActorPunning.h", ), ("PTestActorPunningPunned", "child"): ( - "TestActorPunningPunnedChild", "mozilla/_ipdltest/TestActorPunning.h" + "TestActorPunningPunnedChild", + "mozilla/_ipdltest/TestActorPunning.h", ), ("PTestActorPunningPunned", "parent"): ( - "TestActorPunningPunnedParent", "mozilla/_ipdltest/TestActorPunning.h" + "TestActorPunningPunnedParent", + "mozilla/_ipdltest/TestActorPunning.h", ), ("PTestActorPunningSub", "child"): ( - "TestActorPunningSubChild", "mozilla/_ipdltest/TestActorPunning.h" + "TestActorPunningSubChild", + "mozilla/_ipdltest/TestActorPunning.h", ), ("PTestActorPunningSub", "parent"): ( - "TestActorPunningSubParent", "mozilla/_ipdltest/TestActorPunning.h" + "TestActorPunningSubParent", + "mozilla/_ipdltest/TestActorPunning.h", ), - ("PTestAsyncReturns", "child"): ( - "TestAsyncReturnsChild", "mozilla/_ipdltest/TestAsyncReturns.h" + "TestAsyncReturnsChild", + "mozilla/_ipdltest/TestAsyncReturns.h", ), ("PTestAsyncReturns", "parent"): ( - "TestAsyncReturnsParent", "mozilla/_ipdltest/TestAsyncReturns.h" + "TestAsyncReturnsParent", + "mozilla/_ipdltest/TestAsyncReturns.h", + ), + ("PTestBadActor", "parent"): ( + "TestBadActorParent", + "mozilla/_ipdltest/TestBadActor.h", + ), + ("PTestBadActor", "child"): ( + "TestBadActorChild", + "mozilla/_ipdltest/TestBadActor.h", + ), + ("PTestBadActorSub", "child"): ( + "TestBadActorSubChild", + "mozilla/_ipdltest/TestBadActor.h", + ), + ("PTestBadActorSub", "parent"): ( + "TestBadActorSubParent", + "mozilla/_ipdltest/TestBadActor.h", ), - - ("PTestBadActor", "parent"): ("TestBadActorParent", "mozilla/_ipdltest/TestBadActor.h"), - ("PTestBadActor", "child"): ("TestBadActorChild", "mozilla/_ipdltest/TestBadActor.h"), - ("PTestBadActorSub", "child"): ("TestBadActorSubChild", "mozilla/_ipdltest/TestBadActor.h"), - ("PTestBadActorSub", "parent"): ("TestBadActorSubParent", "mozilla/_ipdltest/TestBadActor.h"), - ("PTestCancel", "child"): ("TestCancelChild", "mozilla/_ipdltest/TestCancel.h"), ("PTestCancel", "parent"): ("TestCancelParent", "mozilla/_ipdltest/TestCancel.h"), - ("PTestCrashCleanup", "child"): ( - "TestCrashCleanupChild", "mozilla/_ipdltest/TestCrashCleanup.h" + "TestCrashCleanupChild", + "mozilla/_ipdltest/TestCrashCleanup.h", ), ("PTestCrashCleanup", "parent"): ( - "TestCrashCleanupParent", "mozilla/_ipdltest/TestCrashCleanup.h" + "TestCrashCleanupParent", + "mozilla/_ipdltest/TestCrashCleanup.h", ), - ("PTestDataStructures", "child"): ( - "TestDataStructuresChild", "mozilla/_ipdltest/TestDataStructures.h" + "TestDataStructuresChild", + "mozilla/_ipdltest/TestDataStructures.h", ), ("PTestDataStructures", "parent"): ( - "TestDataStructuresParent", "mozilla/_ipdltest/TestDataStructures.h" + "TestDataStructuresParent", + "mozilla/_ipdltest/TestDataStructures.h", ), ("PTestDataStructuresSub", "child"): ( - "TestDataStructuresSub", "mozilla/_ipdltest/TestDataStructures.h" + "TestDataStructuresSub", + "mozilla/_ipdltest/TestDataStructures.h", ), ("PTestDataStructuresSub", "parent"): ( - "TestDataStructuresSub", "mozilla/_ipdltest/TestDataStructures.h" + "TestDataStructuresSub", + "mozilla/_ipdltest/TestDataStructures.h", ), - ("PTestDemon", "child"): ("TestDemonChild", "mozilla/_ipdltest/TestDemon.h"), ("PTestDemon", "parent"): ("TestDemonParent", "mozilla/_ipdltest/TestDemon.h"), - ("PTestDesc", "child"): ("TestDescChild", "mozilla/_ipdltest/TestDesc.h"), ("PTestDesc", "parent"): ("TestDescParent", "mozilla/_ipdltest/TestDesc.h"), ("PTestDescSub", "child"): ("TestDescSubChild", "mozilla/_ipdltest/TestDesc.h"), ("PTestDescSub", "parent"): ("TestDescSubParent", "mozilla/_ipdltest/TestDesc.h"), - ("PTestDescSubsub", "child"): ("TestDescSubsubChild", "mozilla/_ipdltest/TestDesc.h"), - ("PTestDescSubsub", "parent"): ("TestDescSubsubParent", "mozilla/_ipdltest/TestDesc.h"), - + ("PTestDescSubsub", "child"): ( + "TestDescSubsubChild", + "mozilla/_ipdltest/TestDesc.h", + ), + ("PTestDescSubsub", "parent"): ( + "TestDescSubsubParent", + "mozilla/_ipdltest/TestDesc.h", + ), ("PTestEndpointBridgeMain", "child"): ( - "TestEndpointBridgeMainChild", "mozilla/_ipdltest/TestEndpointBridgeMain.h" + "TestEndpointBridgeMainChild", + "mozilla/_ipdltest/TestEndpointBridgeMain.h", ), ("PTestEndpointBridgeMain", "parent"): ( - "TestEndpointBridgeMainParent", "mozilla/_ipdltest/TestEndpointBridgeMain.h" + "TestEndpointBridgeMainParent", + "mozilla/_ipdltest/TestEndpointBridgeMain.h", ), ("PTestEndpointBridgeMainSub", "child"): ( - "TestEndpointBridgeMainSubChild", "mozilla/_ipdltest/TestEndpointBridgeMain.h" + "TestEndpointBridgeMainSubChild", + "mozilla/_ipdltest/TestEndpointBridgeMain.h", ), ("PTestEndpointBridgeMainSub", "parent"): ( - "TestEndpointBridgeMainSubParent", "mozilla/_ipdltest/TestEndpointBridgeMain.h" + "TestEndpointBridgeMainSubParent", + "mozilla/_ipdltest/TestEndpointBridgeMain.h", ), ("PTestEndpointBridgeSub", "child"): ( - "TestEndpointBridgeSubChild", "mozilla/_ipdltest/TestEndpointBridgeMain.h" + "TestEndpointBridgeSubChild", + "mozilla/_ipdltest/TestEndpointBridgeMain.h", ), ("PTestEndpointBridgeSub", "parent"): ( - "TestEndpointBridgeSubParent", "mozilla/_ipdltest/TestEndpointBridgeMain.h" + "TestEndpointBridgeSubParent", + "mozilla/_ipdltest/TestEndpointBridgeMain.h", ), - ("PTestEndpointOpens", "child"): ( - "TestEndpointOpensChild", "mozilla/_ipdltest/TestEndpointOpens.h" + "TestEndpointOpensChild", + "mozilla/_ipdltest/TestEndpointOpens.h", ), ("PTestEndpointOpens", "parent"): ( - "TestEndpointOpensParent", "mozilla/_ipdltest/TestEndpointOpens.h" + "TestEndpointOpensParent", + "mozilla/_ipdltest/TestEndpointOpens.h", ), ("PTestEndpointOpensOpened", "child"): ( - "TestEndpointOpensOpenedChild", "mozilla/_ipdltest/TestEndpointOpens.h" + "TestEndpointOpensOpenedChild", + "mozilla/_ipdltest/TestEndpointOpens.h", ), ("PTestEndpointOpensOpened", "parent"): ( - "TestEndpointOpensOpenedParent", "mozilla/_ipdltest/TestEndpointOpens.h" + "TestEndpointOpensOpenedParent", + "mozilla/_ipdltest/TestEndpointOpens.h", + ), + ("PTestFailedCtor", "child"): ( + "TestFailedCtorChild", + "mozilla/_ipdltest/TestFailedCtor.h", + ), + ("PTestFailedCtor", "parent"): ( + "TestFailedCtorParent", + "mozilla/_ipdltest/TestFailedCtor.h", ), - - ("PTestFailedCtor", "child"): ("TestFailedCtorChild", "mozilla/_ipdltest/TestFailedCtor.h"), - ("PTestFailedCtor", "parent"): ("TestFailedCtorParent", "mozilla/_ipdltest/TestFailedCtor.h"), ("PTestFailedCtorSub", "child"): ( - "TestFailedCtorSubChild", "mozilla/_ipdltest/TestFailedCtor.h" + "TestFailedCtorSubChild", + "mozilla/_ipdltest/TestFailedCtor.h", ), ("PTestFailedCtorSub", "parent"): ( - "TestFailedCtorSubParent", "mozilla/_ipdltest/TestFailedCtor.h" + "TestFailedCtorSubParent", + "mozilla/_ipdltest/TestFailedCtor.h", ), ("PTestFailedCtorSubsub", "child"): ( - "TestFailedCtorSubsub", "mozilla/_ipdltest/TestFailedCtor.h" + "TestFailedCtorSubsub", + "mozilla/_ipdltest/TestFailedCtor.h", ), ("PTestFailedCtorSubsub", "parent"): ( - "TestFailedCtorSubsub", "mozilla/_ipdltest/TestFailedCtor.h" + "TestFailedCtorSubsub", + "mozilla/_ipdltest/TestFailedCtor.h", ), - ("PTestHandle", "child"): ("TestHandleChild", "mozilla/_ipdltest/TestJSON.h"), ("PTestHandle", "parent"): ("TestHandleParent", "mozilla/_ipdltest/TestJSON.h"), ("PTestJSON", "child"): ("TestJSONChild", "mozilla/_ipdltest/TestJSON.h"), ("PTestJSON", "parent"): ("TestJSONParent", "mozilla/_ipdltest/TestJSON.h"), - ("PTestHangs", "child"): ("TestHangsChild", "mozilla/_ipdltest/TestHangs.h"), ("PTestHangs", "parent"): ("TestHangsParent", "mozilla/_ipdltest/TestHangs.h"), - - ("PTestHighestPrio", "child"): ("TestHighestPrioChild", "mozilla/_ipdltest/TestHighestPrio.h"), + ("PTestHighestPrio", "child"): ( + "TestHighestPrioChild", + "mozilla/_ipdltest/TestHighestPrio.h", + ), ("PTestHighestPrio", "parent"): ( - "TestHighestPrioParent", "mozilla/_ipdltest/TestHighestPrio.h" + "TestHighestPrioParent", + "mozilla/_ipdltest/TestHighestPrio.h", ), - ("PTestInterruptErrorCleanup", "child"): ( - "TestInterruptErrorCleanupChild", "mozilla/_ipdltest/TestInterruptErrorCleanup.h" + "TestInterruptErrorCleanupChild", + "mozilla/_ipdltest/TestInterruptErrorCleanup.h", ), ("PTestInterruptErrorCleanup", "parent"): ( - "TestInterruptErrorCleanupParent", "mozilla/_ipdltest/TestInterruptErrorCleanup.h" + "TestInterruptErrorCleanupParent", + "mozilla/_ipdltest/TestInterruptErrorCleanup.h", ), - ("PTestInterruptRaces", "child"): ( - "TestInterruptRacesChild", "mozilla/_ipdltest/TestInterruptRaces.h" + "TestInterruptRacesChild", + "mozilla/_ipdltest/TestInterruptRaces.h", ), ("PTestInterruptRaces", "parent"): ( - "TestInterruptRacesParent", "mozilla/_ipdltest/TestInterruptRaces.h" + "TestInterruptRacesParent", + "mozilla/_ipdltest/TestInterruptRaces.h", ), - ("PTestInterruptShutdownRace", "child"): ( - "TestInterruptShutdownRaceChild", "mozilla/_ipdltest/TestInterruptShutdownRace.h" + "TestInterruptShutdownRaceChild", + "mozilla/_ipdltest/TestInterruptShutdownRace.h", ), ("PTestInterruptShutdownRace", "parent"): ( - "TestInterruptShutdownRaceParent", "mozilla/_ipdltest/TestInterruptShutdownRace.h" + "TestInterruptShutdownRaceParent", + "mozilla/_ipdltest/TestInterruptShutdownRace.h", ), - ("PTestLatency", "child"): ("TestLatencyChild", "mozilla/_ipdltest/TestLatency.h"), - ("PTestLatency", "parent"): ("TestLatencyParent", "mozilla/_ipdltest/TestLatency.h"), - + ("PTestLatency", "parent"): ( + "TestLatencyParent", + "mozilla/_ipdltest/TestLatency.h", + ), ("PTestLayoutThread", "child"): ( - "TestOffMainThreadPaintingChild", "mozilla/_ipdltest/TestOffMainThreadPainting.h" + "TestOffMainThreadPaintingChild", + "mozilla/_ipdltest/TestOffMainThreadPainting.h", ), ("PTestLayoutThread", "parent"): ( - "TestOffMainThreadPaintingParent", "mozilla/_ipdltest/TestOffMainThreadPainting.h" + "TestOffMainThreadPaintingParent", + "mozilla/_ipdltest/TestOffMainThreadPainting.h", ), ("PTestPaintThread", "child"): ( - "TestPaintThreadChild", "mozilla/_ipdltest/TestOffMainThreadPainting.h" + "TestPaintThreadChild", + "mozilla/_ipdltest/TestOffMainThreadPainting.h", ), ("PTestPaintThread", "parent"): ( - "TestPaintThreadParent", "mozilla/_ipdltest/TestOffMainThreadPainting.h" + "TestPaintThreadParent", + "mozilla/_ipdltest/TestOffMainThreadPainting.h", ), - ("PTestManyChildAllocs", "child"): ( - "TestManyChildAllocsChild", "mozilla/_ipdltest/TestManyChildAllocs.h" + "TestManyChildAllocsChild", + "mozilla/_ipdltest/TestManyChildAllocs.h", ), ("PTestManyChildAllocs", "parent"): ( - "TestManyChildAllocsParent", "mozilla/_ipdltest/TestManyChildAllocs.h" + "TestManyChildAllocsParent", + "mozilla/_ipdltest/TestManyChildAllocs.h", ), ("PTestManyChildAllocsSub", "child"): ( - "TestManyChildAllocsSubChild", "mozilla/_ipdltest/TestManyChildAllocs.h" + "TestManyChildAllocsSubChild", + "mozilla/_ipdltest/TestManyChildAllocs.h", ), ("PTestManyChildAllocsSub", "parent"): ( - "TestManyChildAllocsSubParent", "mozilla/_ipdltest/TestManyChildAllocs.h" + "TestManyChildAllocsSubParent", + "mozilla/_ipdltest/TestManyChildAllocs.h", + ), + ("PTestMultiMgrs", "child"): ( + "TestMultiMgrsChild", + "mozilla/_ipdltest/TestMultiMgrs.h", + ), + ("PTestMultiMgrs", "parent"): ( + "TestMultiMgrsParent", + "mozilla/_ipdltest/TestMultiMgrs.h", ), - - ("PTestMultiMgrs", "child"): ("TestMultiMgrsChild", "mozilla/_ipdltest/TestMultiMgrs.h"), - ("PTestMultiMgrs", "parent"): ("TestMultiMgrsParent", "mozilla/_ipdltest/TestMultiMgrs.h"), ("PTestMultiMgrsBottom", "child"): ( - "TestMultiMgrsBottomChild", "mozilla/_ipdltest/TestMultiMgrs.h" + "TestMultiMgrsBottomChild", + "mozilla/_ipdltest/TestMultiMgrs.h", ), ("PTestMultiMgrsBottom", "parent"): ( - "TestMultiMgrsBottomParent", "mozilla/_ipdltest/TestMultiMgrs.h" + "TestMultiMgrsBottomParent", + "mozilla/_ipdltest/TestMultiMgrs.h", ), ("PTestMultiMgrsLeft", "child"): ( - "TestMultiMgrsLeftChild", "mozilla/_ipdltest/TestMultiMgrs.h" + "TestMultiMgrsLeftChild", + "mozilla/_ipdltest/TestMultiMgrs.h", ), ("PTestMultiMgrsLeft", "parent"): ( - "TestMultiMgrsLeftParent", "mozilla/_ipdltest/TestMultiMgrs.h" + "TestMultiMgrsLeftParent", + "mozilla/_ipdltest/TestMultiMgrs.h", ), ("PTestMultiMgrsRight", "child"): ( - "TestMultiMgrsRightChild", "mozilla/_ipdltest/TestMultiMgrs.h" + "TestMultiMgrsRightChild", + "mozilla/_ipdltest/TestMultiMgrs.h", ), ("PTestMultiMgrsRight", "parent"): ( - "TestMultiMgrsRightParent", "mozilla/_ipdltest/TestMultiMgrs.h" + "TestMultiMgrsRightParent", + "mozilla/_ipdltest/TestMultiMgrs.h", + ), + ("PTestNestedLoops", "child"): ( + "TestNestedLoopsChild", + "mozilla/_ipdltest/TestNestedLoops.h", ), - - ("PTestNestedLoops", "child"): ("TestNestedLoopsChild", "mozilla/_ipdltest/TestNestedLoops.h"), ("PTestNestedLoops", "parent"): ( - "TestNestedLoopsParent", "mozilla/_ipdltest/TestNestedLoops.h" + "TestNestedLoopsParent", + "mozilla/_ipdltest/TestNestedLoops.h", ), - ("PTestRaceDeadlock", "child"): ( - "TestRaceDeadlockChild", "mozilla/_ipdltest/TestRaceDeadlock.h" + "TestRaceDeadlockChild", + "mozilla/_ipdltest/TestRaceDeadlock.h", ), ("PTestRaceDeadlock", "parent"): ( - "TestRaceDeadlockParent", "mozilla/_ipdltest/TestRaceDeadlock.h" + "TestRaceDeadlockParent", + "mozilla/_ipdltest/TestRaceDeadlock.h", ), - ("PTestRaceDeferral", "child"): ( - "TestRaceDeferralChild", "mozilla/_ipdltest/TestRaceDeferral.h" + "TestRaceDeferralChild", + "mozilla/_ipdltest/TestRaceDeferral.h", ), ("PTestRaceDeferral", "parent"): ( - "TestRaceDeferralParent", "mozilla/_ipdltest/TestRaceDeferral.h" + "TestRaceDeferralParent", + "mozilla/_ipdltest/TestRaceDeferral.h", ), - ("PTestRacyInterruptReplies", "child"): ( - "TestRacyInterruptRepliesChild", "mozilla/_ipdltest/TestRacyInterruptReplies.h" + "TestRacyInterruptRepliesChild", + "mozilla/_ipdltest/TestRacyInterruptReplies.h", ), ("PTestRacyInterruptReplies", "parent"): ( - "TestRacyInterruptRepliesParent", "mozilla/_ipdltest/TestRacyInterruptReplies.h" + "TestRacyInterruptRepliesParent", + "mozilla/_ipdltest/TestRacyInterruptReplies.h", + ), + ("PTestRacyReentry", "child"): ( + "TestRacyReentryChild", + "mozilla/_ipdltest/TestRacyReentry.h", ), - - ("PTestRacyReentry", "child"): ("TestRacyReentryChild", "mozilla/_ipdltest/TestRacyReentry.h"), ("PTestRacyReentry", "parent"): ( - "TestRacyReentryParent", "mozilla/_ipdltest/TestRacyReentry.h" + "TestRacyReentryParent", + "mozilla/_ipdltest/TestRacyReentry.h", + ), + ("PTestRacyUndefer", "child"): ( + "TestRacyUndeferChild", + "mozilla/_ipdltest/TestRacyUndefer.h", ), - - ("PTestRacyUndefer", "child"): ("TestRacyUndeferChild", "mozilla/_ipdltest/TestRacyUndefer.h"), ("PTestRacyUndefer", "parent"): ( - "TestRacyUndeferParent", "mozilla/_ipdltest/TestRacyUndefer.h" + "TestRacyUndeferParent", + "mozilla/_ipdltest/TestRacyUndefer.h", ), - ("PTestRPC", "child"): ("TestRPCChild", "mozilla/_ipdltest/TestRPC.h"), ("PTestRPC", "parent"): ("TestRPCParent", "mozilla/_ipdltest/TestRPC.h"), - ("PTestSanity", "child"): ("TestSanityChild", "mozilla/_ipdltest/TestSanity.h"), ("PTestSanity", "parent"): ("TestSanityParent", "mozilla/_ipdltest/TestSanity.h"), - ("PTestSelfManage", "child"): ( - "TestSelfManageChild", "mozilla/_ipdltest/TestSelfManageRoot.h" + "TestSelfManageChild", + "mozilla/_ipdltest/TestSelfManageRoot.h", ), ("PTestSelfManage", "parent"): ( - "TestSelfManageParent", "mozilla/_ipdltest/TestSelfManageRoot.h" + "TestSelfManageParent", + "mozilla/_ipdltest/TestSelfManageRoot.h", ), ("PTestSelfManageRoot", "child"): ( - "TestSelfManageRootChild", "mozilla/_ipdltest/TestSelfManageRoot.h" + "TestSelfManageRootChild", + "mozilla/_ipdltest/TestSelfManageRoot.h", ), ("PTestSelfManageRoot", "parent"): ( - "TestSelfManageRootParent", "mozilla/_ipdltest/TestSelfManageRoot.h" + "TestSelfManageRootParent", + "mozilla/_ipdltest/TestSelfManageRoot.h", ), - ("PTestShmem", "child"): ("TestShmemChild", "mozilla/_ipdltest/TestShmem.h"), ("PTestShmem", "parent"): ("TestShmemParent", "mozilla/_ipdltest/TestShmem.h"), - - ("PTestShutdown", "child"): ("TestShutdownChild", "mozilla/_ipdltest/TestShutdown.h"), - ("PTestShutdown", "parent"): ("TestShutdownParent", "mozilla/_ipdltest/TestShutdown.h"), - ("PTestShutdownSub", "child"): ("TestShutdownSubChild", "mozilla/_ipdltest/TestShutdown.h"), - ("PTestShutdownSub", "parent"): ("TestShutdownSubParent", "mozilla/_ipdltest/TestShutdown.h"), + ("PTestShutdown", "child"): ( + "TestShutdownChild", + "mozilla/_ipdltest/TestShutdown.h", + ), + ("PTestShutdown", "parent"): ( + "TestShutdownParent", + "mozilla/_ipdltest/TestShutdown.h", + ), + ("PTestShutdownSub", "child"): ( + "TestShutdownSubChild", + "mozilla/_ipdltest/TestShutdown.h", + ), + ("PTestShutdownSub", "parent"): ( + "TestShutdownSubParent", + "mozilla/_ipdltest/TestShutdown.h", + ), ("PTestShutdownSubsub", "child"): ( - "TestShutdownSubsubChild", "mozilla/_ipdltest/TestShutdown.h" + "TestShutdownSubsubChild", + "mozilla/_ipdltest/TestShutdown.h", ), ("PTestShutdownSubsub", "parent"): ( - "TestShutdownSubsubParent", "mozilla/_ipdltest/TestShutdown.h" + "TestShutdownSubsubParent", + "mozilla/_ipdltest/TestShutdown.h", + ), + ("PTestStackHooks", "child"): ( + "TestStackHooksChild", + "mozilla/_ipdltest/TestStackHooks.h", + ), + ("PTestStackHooks", "parent"): ( + "TestStackHooksParent", + "mozilla/_ipdltest/TestStackHooks.h", + ), + ("PTestSyncError", "child"): ( + "TestSyncErrorChild", + "mozilla/_ipdltest/TestSyncError.h", + ), + ("PTestSyncError", "parent"): ( + "TestSyncErrorParent", + "mozilla/_ipdltest/TestSyncError.h", + ), + ("PTestSyncHang", "child"): ( + "TestSyncHangChild", + "mozilla/_ipdltest/TestSyncHang.h", + ), + ("PTestSyncHang", "parent"): ( + "TestSyncHangParent", + "mozilla/_ipdltest/TestSyncHang.h", + ), + ("PTestSyncWakeup", "child"): ( + "TestSyncWakeupChild", + "mozilla/_ipdltest/TestSyncWakeup.h", + ), + ("PTestSyncWakeup", "parent"): ( + "TestSyncWakeupParent", + "mozilla/_ipdltest/TestSyncWakeup.h", ), - - ("PTestStackHooks", "child"): ("TestStackHooksChild", "mozilla/_ipdltest/TestStackHooks.h"), - ("PTestStackHooks", "parent"): ("TestStackHooksParent", "mozilla/_ipdltest/TestStackHooks.h"), - - ("PTestSyncError", "child"): ("TestSyncErrorChild", "mozilla/_ipdltest/TestSyncError.h"), - ("PTestSyncError", "parent"): ("TestSyncErrorParent", "mozilla/_ipdltest/TestSyncError.h"), - - ("PTestSyncHang", "child"): ("TestSyncHangChild", "mozilla/_ipdltest/TestSyncHang.h"), - ("PTestSyncHang", "parent"): ("TestSyncHangParent", "mozilla/_ipdltest/TestSyncHang.h"), - - ("PTestSyncWakeup", "child"): ("TestSyncWakeupChild", "mozilla/_ipdltest/TestSyncWakeup.h"), - ("PTestSyncWakeup", "parent"): ("TestSyncWakeupParent", "mozilla/_ipdltest/TestSyncWakeup.h"), - ("PTestUniquePtrIPC", "child"): ( - "TestUniquePtrIPCChild", "mozilla/_ipdltest/TestUniquePtrIPC.h" + "TestUniquePtrIPCChild", + "mozilla/_ipdltest/TestUniquePtrIPC.h", ), ("PTestUniquePtrIPC", "parent"): ( - "TestUniquePtrIPCParent", "mozilla/_ipdltest/TestUniquePtrIPC.h" + "TestUniquePtrIPCParent", + "mozilla/_ipdltest/TestUniquePtrIPC.h", ), - ("PTestUrgency", "child"): ("TestUrgencyChild", "mozilla/_ipdltest/TestUrgency.h"), - ("PTestUrgency", "parent"): ("TestUrgencyParent", "mozilla/_ipdltest/TestUrgency.h"), - - ("PTestUrgentHangs", "child"): ("TestUrgentHangsChild", "mozilla/_ipdltest/TestUrgentHangs.h"), + ("PTestUrgency", "parent"): ( + "TestUrgencyParent", + "mozilla/_ipdltest/TestUrgency.h", + ), + ("PTestUrgentHangs", "child"): ( + "TestUrgentHangsChild", + "mozilla/_ipdltest/TestUrgentHangs.h", + ), ("PTestUrgentHangs", "parent"): ( - "TestUrgentHangsParent", "mozilla/_ipdltest/TestUrgentHangs.h" + "TestUrgentHangsParent", + "mozilla/_ipdltest/TestUrgentHangs.h", + ), + ("PBackgroundFileHandle", "child"): ( + "indexedDB::BackgroundFileHandleChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundFileRequest", "child"): ( + "indexedDB::BackgroundFileRequestChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBCursor", "child"): ( + "indexedDB::BackgroundCursorChildBase", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBDatabase", "child"): ( + "indexedDB::BackgroundDatabaseChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBDatabaseRequest", "child"): ( + "indexedDB::BackgroundDatabaseRequestChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBFactory", "child"): ( + "indexedDB::BackgroundFactoryChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBFactoryRequest", "child"): ( + "indexedDB::BackgroundFactoryRequestChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBRequest", "child"): ( + "indexedDB::BackgroundRequestChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBTransaction", "child"): ( + "indexedDB::BackgroundTransactionChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIDBVersionChangeTransaction", "child"): ( + "indexedDB::BackgroundVersionChangeTransactionChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundIndexedDBUtils", "child"): ( + "indexedDB::BackgroundUtilsChild", + "mozilla/dom/indexedDB/ActorsChild.h", + ), + ("PBackgroundMutableFile", "child"): ( + "indexedDB::BackgroundMutableFileChild", + "mozilla/dom/indexedDB/ActorsChild.h", ), - - ("PBackgroundFileHandle", "child"): - ("indexedDB::BackgroundFileHandleChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundFileRequest", "child"): - ("indexedDB::BackgroundFileRequestChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBCursor", "child"): - ("indexedDB::BackgroundCursorChildBase", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBDatabase", "child"): - ("indexedDB::BackgroundDatabaseChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBDatabaseRequest", "child"): - ("indexedDB::BackgroundDatabaseRequestChild", - "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBFactory", "child"): - ("indexedDB::BackgroundFactoryChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBFactoryRequest", "child"): - ("indexedDB::BackgroundFactoryRequestChild", - "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBRequest", "child"): - ("indexedDB::BackgroundRequestChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBTransaction", "child"): - ("indexedDB::BackgroundTransactionChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIDBVersionChangeTransaction", "child"): - ("indexedDB::BackgroundVersionChangeTransactionChild", - "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundIndexedDBUtils", "child"): - ("indexedDB::BackgroundUtilsChild", "mozilla/dom/indexedDB/ActorsChild.h"), - ("PBackgroundMutableFile", "child"): - ("indexedDB::BackgroundMutableFileChild", "mozilla/dom/indexedDB/ActorsChild.h"), } # Our long term goal is to burn this list down, so new entries should be added @@ -436,180 +589,175 @@ # peer's r+ to add something new! # set() of (Protocol, side) -VIRTUAL_CALL_CLASSES = set([ - # Defined as a strange template - ("PMedia", "parent"), - ("PTexture", "parent"), - - # Defined in a .cpp - ("PBackgroundFileHandle", "parent"), - ("PBackgroundFileRequest", "parent"), - ("PBackgroundIDBCursor", "parent"), - ("PBackgroundIDBDatabase", "parent"), - ("PBackgroundIDBDatabaseFile", "child"), - ("PBackgroundIDBDatabaseFile", "parent"), - ("PBackgroundIDBDatabaseRequest", "parent"), - ("PBackgroundIDBFactory", "parent"), - ("PBackgroundIDBFactoryRequest", "parent"), - ("PBackgroundIDBRequest", "parent"), - ("PBackgroundIDBTransaction", "parent"), - ("PBackgroundIDBVersionChangeTransaction", "parent"), - ("PBackgroundIndexedDBUtils", "parent"), - ("PBackgroundLSDatabase", "parent"), - ("PBackgroundLSObserver", "parent"), - ("PBackgroundLSRequest", "parent"), - ("PBackgroundLSSimpleRequest", "parent"), - ("PBackgroundLSSnapshot", "parent"), - ("PBackgroundSDBConnection", "parent"), - ("PBackgroundSDBRequest", "parent"), - ("PBackgroundTest", "child"), - ("PBackgroundTest", "parent"), - ("PChildToParentStream", "child"), - ("PChildToParentStream", "parent"), - ("PContentPermissionRequest", "parent"), - ("PCycleCollectWithLogs", "child"), - ("PCycleCollectWithLogs", "parent"), - ("PHal", "child"), - ("PHal", "parent"), - ("PIndexedDBPermissionRequest", "parent"), - ("PParentToChildStream", "child"), - ("PParentToChildStream", "parent"), - ("PProcessHangMonitor", "child"), - ("PProcessHangMonitor", "parent"), - ("PQuota", "parent"), - ("PQuotaRequest", "parent"), - ("PQuotaUsageRequest", "parent"), - ("PSimpleChannel", "child"), - ("PTexture", "child"), - - # .h is not exported - ("PBackground", "child"), - ("PBackground", "parent"), - ("PBackgroundLSDatabase", "child"), - ("PBackgroundLSObserver", "child"), - ("PBackgroundLSRequest", "child"), - ("PBackgroundLSSimpleRequest", "child"), - ("PBackgroundLSSnapshot", "child"), - ("PBackgroundSDBConnection", "child"), - ("PBackgroundSDBRequest", "child"), - ("PBroadcastChannel", "child"), - ("PBroadcastChannel", "parent"), - ("PChromiumCDM", "child"), - ("PClientHandle", "child"), - ("PClientHandle", "parent"), - ("PClientHandleOp", "child"), - ("PClientHandleOp", "parent"), - ("PClientManager", "child"), - ("PClientManager", "parent"), - ("PClientManagerOp", "child"), - ("PClientManagerOp", "parent"), - ("PClientNavigateOp", "child"), - ("PClientNavigateOp", "parent"), - ("PClientOpenWindowOp", "child"), - ("PClientOpenWindowOp", "parent"), - ("PClientSource", "child"), - ("PClientSource", "parent"), - ("PClientSourceOp", "child"), - ("PClientSourceOp", "parent"), - ("PColorPicker", "child"), - ("PColorPicker", "parent"), - ("PDataChannel", "child"), - ("PFileChannel", "child"), - ("PFilePicker", "child"), - ("PFunctionBroker", "child"), - ("PFunctionBroker", "parent"), - ("PHandlerService", "child"), - ("PHandlerService", "parent"), - ("PPluginBackgroundDestroyer", "child"), - ("PPluginBackgroundDestroyer", "parent"), - ("PRemotePrintJob", "child"), - ("PRemotePrintJob", "parent"), - # PRemotePrintJob, PPrinting, PPrintProgressDialog and PPrintSettingsDialog - # need to be virtual for --disable-printing builds. - ("PPrinting", "child"), - ("PPrinting", "parent"), - ("PPrintProgressDialog", "child"), - ("PPrintProgressDialog", "parent"), - ("PPrintSettingsDialog", "child"), - ("PPrintSettingsDialog", "parent"), - ("PQuota", "child"), - ("PQuotaRequest", "child"), - ("PQuotaUsageRequest", "child"), - ("PServiceWorker", "child"), - ("PServiceWorker", "parent"), - ("PServiceWorkerContainer", "child"), - ("PServiceWorkerContainer", "parent"), - ("PServiceWorkerRegistration", "child"), - ("PServiceWorkerRegistration", "parent"), - ("PServiceWorkerUpdater", "child"), - ("PServiceWorkerUpdater", "parent"), - ("PVRLayer", "parent"), - ("PWebBrowserPersistResources", "child"), - ("PWebBrowserPersistResources", "parent"), - ("PWebBrowserPersistSerialize", "child"), - ("PWebBrowserPersistSerialize", "parent"), - ("PWebrtcGlobal", "child"), - ("PWebrtcGlobal", "parent"), - - # .h is only exported on some platforms/configs - ("PCameras", "child"), - ("PCameras", "parent"), - ("PCompositorWidget", "child"), - ("PCompositorWidget", "parent"), - ("PDocAccessible", "child"), - ("PDocAccessible", "parent"), - ("PPluginSurface", "parent"), - ("PPluginWidget", "child"), - ("PPluginWidget", "parent"), - ("PProfiler", "child"), - ("PProfiler", "parent"), - ("PSpeechSynthesisRequest", "child"), - ("PSpeechSynthesisRequest", "parent"), - ("PStunAddrsRequest", "child"), - ("PStunAddrsRequest", "parent"), - ("PWebrtcTCPSocket", "child"), - ("PWebrtcTCPSocket", "parent"), - - # .h includes something that's a LOCAL_INCLUDE - ("PBackgroundLocalStorageCache", "child"), - ("PBackgroundLocalStorageCache", "parent"), - ("PBackgroundSessionStorageCache", "child"), - ("PBackgroundSessionStorageCache", "parent"), - ("PBackgroundSessionStorageManager", "child"), - ("PBackgroundSessionStorageManager", "parent"), - ("PBackgroundStorage", "child"), - ("PBackgroundStorage", "parent"), - ("PBrowserStream", "parent"), - ("PExternalHelperApp", "parent"), - ("PFTPChannel", "child"), - ("PFTPChannel", "parent"), - ("PHttpChannel", "child"), - ("PHttpChannel", "parent"), - ("PSessionStorageObserver", "child"), - ("PSessionStorageObserver", "parent"), - - # can't be included safely for compilation error reasons - ("PGMPContent", "parent"), - ("PGMPService", "child"), - ("PGMPService", "parent"), - ("PGMPStorage", "parent"), - ("PGMPVideoDecoder", "parent"), - ("PGMPVideoEncoder", "parent"), - ("PWebRenderBridge", "parent"), - - # Not actually subclassed - ("PLoginReputation", "child"), - ("PPluginSurface", "child"), - ("PTestShellCommand", "child"), - - # _ipdltest - # Not actually subclassed - ("PTestIndirectProtocolParamFirst", "child"), - ("PTestIndirectProtocolParamFirst", "parent"), - ("PTestIndirectProtocolParamManage", "child"), - ("PTestIndirectProtocolParamManage", "parent"), - ("PTestIndirectProtocolParamSecond", "child"), - ("PTestIndirectProtocolParamSecond", "parent"), - ("PTestPriority", "child"), - ("PTestPriority", "parent"), -]) +VIRTUAL_CALL_CLASSES = set( + [ + # Defined as a strange template + ("PMedia", "parent"), + ("PTexture", "parent"), + # Defined in a .cpp + ("PBackgroundFileHandle", "parent"), + ("PBackgroundFileRequest", "parent"), + ("PBackgroundIDBCursor", "parent"), + ("PBackgroundIDBDatabase", "parent"), + ("PBackgroundIDBDatabaseFile", "child"), + ("PBackgroundIDBDatabaseFile", "parent"), + ("PBackgroundIDBDatabaseRequest", "parent"), + ("PBackgroundIDBFactory", "parent"), + ("PBackgroundIDBFactoryRequest", "parent"), + ("PBackgroundIDBRequest", "parent"), + ("PBackgroundIDBTransaction", "parent"), + ("PBackgroundIDBVersionChangeTransaction", "parent"), + ("PBackgroundIndexedDBUtils", "parent"), + ("PBackgroundLSDatabase", "parent"), + ("PBackgroundLSObserver", "parent"), + ("PBackgroundLSRequest", "parent"), + ("PBackgroundLSSimpleRequest", "parent"), + ("PBackgroundLSSnapshot", "parent"), + ("PBackgroundSDBConnection", "parent"), + ("PBackgroundSDBRequest", "parent"), + ("PBackgroundTest", "child"), + ("PBackgroundTest", "parent"), + ("PChildToParentStream", "child"), + ("PChildToParentStream", "parent"), + ("PContentPermissionRequest", "parent"), + ("PCycleCollectWithLogs", "child"), + ("PCycleCollectWithLogs", "parent"), + ("PHal", "child"), + ("PHal", "parent"), + ("PIndexedDBPermissionRequest", "parent"), + ("PParentToChildStream", "child"), + ("PParentToChildStream", "parent"), + ("PProcessHangMonitor", "child"), + ("PProcessHangMonitor", "parent"), + ("PQuota", "parent"), + ("PQuotaRequest", "parent"), + ("PQuotaUsageRequest", "parent"), + ("PSimpleChannel", "child"), + ("PTexture", "child"), + # .h is not exported + ("PBackground", "child"), + ("PBackground", "parent"), + ("PBackgroundLSDatabase", "child"), + ("PBackgroundLSObserver", "child"), + ("PBackgroundLSRequest", "child"), + ("PBackgroundLSSimpleRequest", "child"), + ("PBackgroundLSSnapshot", "child"), + ("PBackgroundSDBConnection", "child"), + ("PBackgroundSDBRequest", "child"), + ("PBroadcastChannel", "child"), + ("PBroadcastChannel", "parent"), + ("PChromiumCDM", "child"), + ("PClientHandle", "child"), + ("PClientHandle", "parent"), + ("PClientHandleOp", "child"), + ("PClientHandleOp", "parent"), + ("PClientManager", "child"), + ("PClientManager", "parent"), + ("PClientManagerOp", "child"), + ("PClientManagerOp", "parent"), + ("PClientNavigateOp", "child"), + ("PClientNavigateOp", "parent"), + ("PClientOpenWindowOp", "child"), + ("PClientOpenWindowOp", "parent"), + ("PClientSource", "child"), + ("PClientSource", "parent"), + ("PClientSourceOp", "child"), + ("PClientSourceOp", "parent"), + ("PColorPicker", "child"), + ("PColorPicker", "parent"), + ("PDataChannel", "child"), + ("PFileChannel", "child"), + ("PFilePicker", "child"), + ("PFunctionBroker", "child"), + ("PFunctionBroker", "parent"), + ("PHandlerService", "child"), + ("PHandlerService", "parent"), + ("PPluginBackgroundDestroyer", "child"), + ("PPluginBackgroundDestroyer", "parent"), + ("PRemotePrintJob", "child"), + ("PRemotePrintJob", "parent"), + # PRemotePrintJob, PPrinting, PPrintProgressDialog and PPrintSettingsDialog + # need to be virtual for --disable-printing builds. + ("PPrinting", "child"), + ("PPrinting", "parent"), + ("PPrintProgressDialog", "child"), + ("PPrintProgressDialog", "parent"), + ("PPrintSettingsDialog", "child"), + ("PPrintSettingsDialog", "parent"), + ("PQuota", "child"), + ("PQuotaRequest", "child"), + ("PQuotaUsageRequest", "child"), + ("PServiceWorker", "child"), + ("PServiceWorker", "parent"), + ("PServiceWorkerContainer", "child"), + ("PServiceWorkerContainer", "parent"), + ("PServiceWorkerRegistration", "child"), + ("PServiceWorkerRegistration", "parent"), + ("PServiceWorkerUpdater", "child"), + ("PServiceWorkerUpdater", "parent"), + ("PVRLayer", "parent"), + ("PWebBrowserPersistResources", "child"), + ("PWebBrowserPersistResources", "parent"), + ("PWebBrowserPersistSerialize", "child"), + ("PWebBrowserPersistSerialize", "parent"), + ("PWebrtcGlobal", "child"), + ("PWebrtcGlobal", "parent"), + # .h is only exported on some platforms/configs + ("PCameras", "child"), + ("PCameras", "parent"), + ("PCompositorWidget", "child"), + ("PCompositorWidget", "parent"), + ("PDocAccessible", "child"), + ("PDocAccessible", "parent"), + ("PPluginSurface", "parent"), + ("PPluginWidget", "child"), + ("PPluginWidget", "parent"), + ("PProfiler", "child"), + ("PProfiler", "parent"), + ("PSpeechSynthesisRequest", "child"), + ("PSpeechSynthesisRequest", "parent"), + ("PStunAddrsRequest", "child"), + ("PStunAddrsRequest", "parent"), + ("PWebrtcTCPSocket", "child"), + ("PWebrtcTCPSocket", "parent"), + # .h includes something that's a LOCAL_INCLUDE + ("PBackgroundLocalStorageCache", "child"), + ("PBackgroundLocalStorageCache", "parent"), + ("PBackgroundSessionStorageCache", "child"), + ("PBackgroundSessionStorageCache", "parent"), + ("PBackgroundSessionStorageManager", "child"), + ("PBackgroundSessionStorageManager", "parent"), + ("PBackgroundStorage", "child"), + ("PBackgroundStorage", "parent"), + ("PBrowserStream", "parent"), + ("PExternalHelperApp", "parent"), + ("PFTPChannel", "child"), + ("PFTPChannel", "parent"), + ("PHttpChannel", "child"), + ("PHttpChannel", "parent"), + ("PSessionStorageObserver", "child"), + ("PSessionStorageObserver", "parent"), + # can't be included safely for compilation error reasons + ("PGMPContent", "parent"), + ("PGMPService", "child"), + ("PGMPService", "parent"), + ("PGMPStorage", "parent"), + ("PGMPVideoDecoder", "parent"), + ("PGMPVideoEncoder", "parent"), + ("PWebRenderBridge", "parent"), + # Not actually subclassed + ("PLoginReputation", "child"), + ("PPluginSurface", "child"), + ("PTestShellCommand", "child"), + # _ipdltest + # Not actually subclassed + ("PTestIndirectProtocolParamFirst", "child"), + ("PTestIndirectProtocolParamFirst", "parent"), + ("PTestIndirectProtocolParamManage", "child"), + ("PTestIndirectProtocolParamManage", "parent"), + ("PTestIndirectProtocolParamSecond", "child"), + ("PTestIndirectProtocolParamSecond", "parent"), + ("PTestPriority", "child"), + ("PTestPriority", "parent"), + ] +) diff --git a/ipc/ipdl/ipdl/lower.py b/ipc/ipdl/ipdl/lower.py index e1a54553d534e9..3e0a8a75188af7 100644 --- a/ipc/ipdl/ipdl/lower.py +++ b/ipc/ipdl/ipdl/lower.py @@ -23,15 +23,15 @@ class LowerToCxx: def lower(self, tu, segmentcapacitydict): - '''returns |[ header: File ], [ cpp : File ]| representing the -lowered form of |tu|''' + """returns |[ header: File ], [ cpp : File ]| representing the + lowered form of |tu|""" # annotate the AST with IPDL/C++ IR-type stuff used later tu.accept(_DecorateWithCxxStuff()) # Any modifications to the filename scheme here need corresponding # modifications in the ipdl.py driver script. name = tu.name - pheader, pcpp = File(name + '.h'), File(name + '.cpp') + pheader, pcpp = File(name + ".h"), File(name + ".cpp") _GenerateProtocolCode().lower(tu, pheader, pcpp, segmentcapacitydict) headers = [pheader] @@ -40,13 +40,18 @@ def lower(self, tu, segmentcapacitydict): if tu.protocol: pname = tu.protocol.name - parentheader, parentcpp = File(pname + 'Parent.h'), File(pname + 'Parent.cpp') + parentheader, parentcpp = ( + File(pname + "Parent.h"), + File(pname + "Parent.cpp"), + ) _GenerateProtocolParentCode().lower( - tu, pname+'Parent', parentheader, parentcpp) + tu, pname + "Parent", parentheader, parentcpp + ) - childheader, childcpp = File(pname + 'Child.h'), File(pname + 'Child.cpp') + childheader, childcpp = File(pname + "Child.h"), File(pname + "Child.cpp") _GenerateProtocolChildCode().lower( - tu, pname+'Child', childheader, childcpp) + tu, pname + "Child", childheader, childcpp + ) headers += [parentheader, childheader] cpps += [parentcpp, childcpp] @@ -58,22 +63,25 @@ def lower(self, tu, segmentcapacitydict): # Helper code ## + def hashfunc(value): - h = hash_str(value) % 2**32 + h = hash_str(value) % 2 ** 32 if h < 0: - h += 2**32 + h += 2 ** 32 return h _NULL_ACTOR_ID = ExprLiteral.ZERO _FREED_ACTOR_ID = ExprLiteral.ONE -_DISCLAIMER = Whitespace('''// +_DISCLAIMER = Whitespace( + """// // Automatically generated by ipdlc. // Edit at your own risk // -''') +""" +) class _struct: @@ -81,19 +89,19 @@ class _struct: def _namespacedHeaderName(name, namespaces): - pfx = '/'.join([ns.name for ns in namespaces]) + pfx = "/".join([ns.name for ns in namespaces]) if pfx: - return pfx + '/' + name + return pfx + "/" + name else: return name def _ipdlhHeaderName(tu): - assert tu.filetype == 'header' + assert tu.filetype == "header" return _namespacedHeaderName(tu.name, tu.namespaces) -def _protocolHeaderName(p, side=''): +def _protocolHeaderName(p, side=""): if side: side = side.title() base = p.name + side @@ -101,22 +109,21 @@ def _protocolHeaderName(p, side=''): def _includeGuardMacroName(headerfile): - return re.sub(r'[./]', '_', headerfile.name) + return re.sub(r"[./]", "_", headerfile.name) def _includeGuardStart(headerfile): guard = _includeGuardMacroName(headerfile) - return [CppDirective('ifndef', guard), - CppDirective('define', guard)] + return [CppDirective("ifndef", guard), CppDirective("define", guard)] def _includeGuardEnd(headerfile): guard = _includeGuardMacroName(headerfile) - return [CppDirective('endif', '// ifndef ' + guard)] + return [CppDirective("endif", "// ifndef " + guard)] def _messageStartName(ptype): - return ptype.name() + 'MsgStart' + return ptype.name() + "MsgStart" def _protocolId(ptype): @@ -145,32 +152,32 @@ def _actorTypeTagType(): def _actorId(actor=None): if actor is not None: - return ExprCall(ExprSelect(actor, '->', 'Id')) - return ExprCall(ExprVar('Id')) + return ExprCall(ExprSelect(actor, "->", "Id")) + return ExprCall(ExprVar("Id")) def _actorHId(actorhandle): - return ExprSelect(actorhandle, '.', 'mId') + return ExprSelect(actorhandle, ".", "mId") def _backstagePass(): - return ExprCall(ExprVar('mozilla::ipc::PrivateIPDLInterface')) + return ExprCall(ExprVar("mozilla::ipc::PrivateIPDLInterface")) def _iterType(ptr): - return Type('PickleIterator', ptr=ptr) + return Type("PickleIterator", ptr=ptr) def _deleteId(): - return ExprVar('Msg___delete____ID') + return ExprVar("Msg___delete____ID") def _deleteReplyId(): - return ExprVar('Reply___delete____ID') + return ExprVar("Reply___delete____ID") def _lookupListener(idexpr): - return ExprCall(ExprVar('Lookup'), args=[idexpr]) + return ExprCall(ExprVar("Lookup"), args=[idexpr]) def _makeForwardDeclForQClass(clsname, quals, cls=True, struct=False): @@ -190,8 +197,9 @@ def _makeForwardDeclForQClass(clsname, quals, cls=True, struct=False): def _makeForwardDeclForActor(ptype, side): - return _makeForwardDeclForQClass(_actorName(ptype.qname.baseid, side), - ptype.qname.quals) + return _makeForwardDeclForQClass( + _actorName(ptype.qname.baseid, side), ptype.qname.quals + ) def _makeForwardDecl(type): @@ -216,66 +224,66 @@ def _putInNamespaces(cxxthing, namespaces): def _sendPrefix(msgtype): """Prefix of the name of the C++ method that sends |msgtype|.""" if msgtype.isInterrupt(): - return 'Call' - return 'Send' + return "Call" + return "Send" def _recvPrefix(msgtype): """Prefix of the name of the C++ method that handles |msgtype|.""" if msgtype.isInterrupt(): - return 'Answer' - return 'Recv' + return "Answer" + return "Recv" def _flatTypeName(ipdltype): """Return a 'flattened' IPDL type name that can be used as an -identifier. -E.g., |Foo[]| --> |ArrayOfFoo|.""" + identifier. + E.g., |Foo[]| --> |ArrayOfFoo|.""" # NB: this logic depends heavily on what IPDL types are allowed to # be constructed; e.g., Foo[][] is disallowed. needs to be kept in # sync with grammar. if ipdltype.isIPDL() and ipdltype.isArray(): - return 'ArrayOf' + ipdltype.basetype.name() + return "ArrayOf" + ipdltype.basetype.name() if ipdltype.isIPDL() and ipdltype.isMaybe(): - return 'Maybe' + ipdltype.basetype.name() + return "Maybe" + ipdltype.basetype.name() return ipdltype.name() def _hasVisibleActor(ipdltype): """Return true iff a C++ decl of |ipdltype| would have an Actor* type. -For example: |Actor[]| would turn into |Array|, so this -function would return true for |Actor[]|.""" - return (ipdltype.isIPDL() - and (ipdltype.isActor() - or (ipdltype.hasBaseType() - and _hasVisibleActor(ipdltype.basetype)))) + For example: |Actor[]| would turn into |Array|, so this + function would return true for |Actor[]|.""" + return ipdltype.isIPDL() and ( + ipdltype.isActor() + or (ipdltype.hasBaseType() and _hasVisibleActor(ipdltype.basetype)) + ) def _abortIfFalse(cond, msg): - return StmtExpr(ExprCall( - ExprVar('MOZ_RELEASE_ASSERT'), - [cond, ExprLiteral.String(msg)])) + return StmtExpr( + ExprCall(ExprVar("MOZ_RELEASE_ASSERT"), [cond, ExprLiteral.String(msg)]) + ) def _refptr(T): - return Type('RefPtr', T=T) + return Type("RefPtr", T=T) def _uniqueptr(T): - return Type('UniquePtr', T=T) + return Type("UniquePtr", T=T) def _alreadyaddrefed(T): - return Type('already_AddRefed', T=T) + return Type("already_AddRefed", T=T) def _tuple(types, const=False, ref=False): - return Type('Tuple', T=types, const=const, ref=ref) + return Type("Tuple", T=types, const=const, ref=ref) def _promise(resolvetype, rejecttype, tail, resolver=False): - inner = Type('Private') if resolver else None - return Type('MozPromise', T=[resolvetype, rejecttype, tail], inner=inner) + inner = Type("Private") if resolver else None + return Type("MozPromise", T=[resolvetype, rejecttype, tail], inner=inner) def _makePromise(returns, side, resolver=False): @@ -285,10 +293,9 @@ def _makePromise(returns, side, resolver=False): resolvetype = returns[0].bareType(side) # MozPromise is purposefully made to be exclusive only. Really, we mean it. - return _promise(resolvetype, - _ResponseRejectReason.Type(), - ExprLiteral.TRUE, - resolver=resolver) + return _promise( + resolvetype, _ResponseRejectReason.Type(), ExprLiteral.TRUE, resolver=resolver + ) def _makeResolver(returns, side): @@ -296,76 +303,83 @@ def _makeResolver(returns, side): resolvetype = _tuple([d.moveType(side) for d in returns]) else: resolvetype = returns[0].moveType(side) - return TypeFunction([Decl(resolvetype, '')]) + return TypeFunction([Decl(resolvetype, "")]) def _cxxArrayType(basetype, const=False, ref=False): - return Type('nsTArray', T=basetype, const=const, ref=ref, hasimplicitcopyctor=False) + return Type("nsTArray", T=basetype, const=const, ref=ref, hasimplicitcopyctor=False) def _cxxMaybeType(basetype, const=False, ref=False): - return Type('mozilla::Maybe', T=basetype, const=const, ref=ref, - hasimplicitcopyctor=basetype.hasimplicitcopyctor) + return Type( + "mozilla::Maybe", + T=basetype, + const=const, + ref=ref, + hasimplicitcopyctor=basetype.hasimplicitcopyctor, + ) def _cxxManagedContainerType(basetype, const=False, ref=False): - return Type('ManagedContainer', T=basetype, - const=const, ref=ref, hasimplicitcopyctor=False) + return Type( + "ManagedContainer", T=basetype, const=const, ref=ref, hasimplicitcopyctor=False + ) def _cxxLifecycleProxyType(ptr=False): - return Type('mozilla::ipc::ActorLifecycleProxy', ptr=ptr) + return Type("mozilla::ipc::ActorLifecycleProxy", ptr=ptr) def _callInsertManagedActor(managees, actor): - return ExprCall(ExprSelect(managees, '.', 'PutEntry'), - args=[actor]) + return ExprCall(ExprSelect(managees, ".", "PutEntry"), args=[actor]) def _callRemoveManagedActor(managees, actor): - return ExprCall(ExprSelect(managees, '.', 'RemoveEntry'), - args=[actor]) + return ExprCall(ExprSelect(managees, ".", "RemoveEntry"), args=[actor]) def _callClearManagedActors(managees): - return ExprCall(ExprSelect(managees, '.', 'Clear')) + return ExprCall(ExprSelect(managees, ".", "Clear")) def _callHasManagedActor(managees, actor): - return ExprCall(ExprSelect(managees, '.', 'Contains'), args=[actor]) + return ExprCall(ExprSelect(managees, ".", "Contains"), args=[actor]) def _callGetLifecycleProxy(actor=ExprVar.THIS): - return ExprCall(ExprSelect(actor, '->', 'GetLifecycleProxy')) + return ExprCall(ExprSelect(actor, "->", "GetLifecycleProxy")) def _releaseLifecycleProxy(actor=None): return StmtCode( - ''' + """ mozilla::ipc::ActorLifecycleProxy* proxy = (${actor})->GetLifecycleProxy(); NS_IF_RELEASE(proxy); - ''', - actor=actor or ExprVar.THIS) + """, + actor=actor or ExprVar.THIS, + ) def _otherSide(side): - if side == 'child': - return 'parent' - if side == 'parent': - return 'child' + if side == "child": + return "parent" + if side == "parent": + return "child" assert 0 def _ifLogging(topLevelProtocol, stmts): return StmtCode( - ''' + """ if (mozilla::ipc::LoggingEnabledFor(${proto})) { $*{stmts} } - ''', + """, proto=topLevelProtocol, - stmts=stmts) + stmts=stmts, + ) + # XXX we need to remove these and install proper error handling @@ -373,38 +387,40 @@ def _ifLogging(topLevelProtocol, stmts): def _printErrorMessage(msg): if isinstance(msg, str): msg = ExprLiteral.String(msg) - return StmtExpr( - ExprCall(ExprVar('NS_ERROR'), args=[msg])) + return StmtExpr(ExprCall(ExprVar("NS_ERROR"), args=[msg])) def _protocolErrorBreakpoint(msg): if isinstance(msg, str): msg = ExprLiteral.String(msg) - return StmtExpr(ExprCall(ExprVar('mozilla::ipc::ProtocolErrorBreakpoint'), - args=[msg])) + return StmtExpr( + ExprCall(ExprVar("mozilla::ipc::ProtocolErrorBreakpoint"), args=[msg]) + ) def _printWarningMessage(msg): if isinstance(msg, str): msg = ExprLiteral.String(msg) - return StmtExpr( - ExprCall(ExprVar('NS_WARNING'), args=[msg])) + return StmtExpr(ExprCall(ExprVar("NS_WARNING"), args=[msg])) def _fatalError(msg): - return StmtExpr( - ExprCall(ExprVar('FatalError'), args=[ExprLiteral.String(msg)])) + return StmtExpr(ExprCall(ExprVar("FatalError"), args=[ExprLiteral.String(msg)])) def _logicError(msg): return StmtExpr( - ExprCall(ExprVar('mozilla::ipc::LogicError'), args=[ExprLiteral.String(msg)])) + ExprCall(ExprVar("mozilla::ipc::LogicError"), args=[ExprLiteral.String(msg)]) + ) def _sentinelReadError(classname): return StmtExpr( - ExprCall(ExprVar('mozilla::ipc::SentinelReadError'), - args=[ExprLiteral.String(classname)])) + ExprCall( + ExprVar("mozilla::ipc::SentinelReadError"), + args=[ExprLiteral.String(classname)], + ) + ) # Results that IPDL-generated code returns back to *Channel code. @@ -414,15 +430,16 @@ def _sentinelReadError(classname): class _Result: @staticmethod def Type(): - return Type('Result') + return Type("Result") + + Processed = ExprVar("MsgProcessed") + NotKnown = ExprVar("MsgNotKnown") + NotAllowed = ExprVar("MsgNotAllowed") + PayloadError = ExprVar("MsgPayloadError") + ProcessingError = ExprVar("MsgProcessingError") + RouteError = ExprVar("MsgRouteError") + ValuError = ExprVar("MsgValueError") # [sic] - Processed = ExprVar('MsgProcessed') - NotKnown = ExprVar('MsgNotKnown') - NotAllowed = ExprVar('MsgNotAllowed') - PayloadError = ExprVar('MsgPayloadError') - ProcessingError = ExprVar('MsgProcessingError') - RouteError = ExprVar('MsgRouteError') - ValuError = ExprVar('MsgValueError') # [sic] # these |errfn*| are functions that generate code to be executed on an # error, such as "bad actor ID". each is given a Python string @@ -432,75 +449,70 @@ def Type(): def errfnSend(msg, errcode=ExprLiteral.FALSE): - return [ - _fatalError(msg), - StmtReturn(errcode) - ] + return [_fatalError(msg), StmtReturn(errcode)] + +def errfnSendCtor(msg): + return errfnSend(msg, errcode=ExprLiteral.NULL) -def errfnSendCtor(msg): return errfnSend(msg, errcode=ExprLiteral.NULL) # TODO should this error handling be strengthened for dtors? def errfnSendDtor(msg): - return [ - _printErrorMessage(msg), - StmtReturn.FALSE - ] + return [_printErrorMessage(msg), StmtReturn.FALSE] + # used in |OnMessage*()| handlers that hand in-messages off to Recv*() # interface methods def errfnRecv(msg, errcode=_Result.ValuError): - return [ - _fatalError(msg), - StmtReturn(errcode) - ] + return [_fatalError(msg), StmtReturn(errcode)] def errfnSentinel(rvalue=ExprLiteral.FALSE): def inner(msg): return [_sentinelReadError(msg), StmtReturn(rvalue)] + return inner def _destroyMethod(): - return ExprVar('ActorDestroy') + return ExprVar("ActorDestroy") def errfnUnreachable(msg): - return [ - _logicError(msg) - ] + return [_logicError(msg)] class _DestroyReason: @staticmethod - def Type(): return Type('ActorDestroyReason') + def Type(): + return Type("ActorDestroyReason") - Deletion = ExprVar('Deletion') - AncestorDeletion = ExprVar('AncestorDeletion') - NormalShutdown = ExprVar('NormalShutdown') - AbnormalShutdown = ExprVar('AbnormalShutdown') - FailedConstructor = ExprVar('FailedConstructor') + Deletion = ExprVar("Deletion") + AncestorDeletion = ExprVar("AncestorDeletion") + NormalShutdown = ExprVar("NormalShutdown") + AbnormalShutdown = ExprVar("AbnormalShutdown") + FailedConstructor = ExprVar("FailedConstructor") class _ResponseRejectReason: @staticmethod def Type(): - return Type('ResponseRejectReason') + return Type("ResponseRejectReason") - SendError = ExprVar('ResponseRejectReason::SendError') - ChannelClosed = ExprVar('ResponseRejectReason::ChannelClosed') - HandlerRejected = ExprVar('ResponseRejectReason::HandlerRejected') - ActorDestroyed = ExprVar('ResponseRejectReason::ActorDestroyed') + SendError = ExprVar("ResponseRejectReason::SendError") + ChannelClosed = ExprVar("ResponseRejectReason::ChannelClosed") + HandlerRejected = ExprVar("ResponseRejectReason::HandlerRejected") + ActorDestroyed = ExprVar("ResponseRejectReason::ActorDestroyed") # ----------------------------------------------------------------------------- # Intermediate representation (IR) nodes used during lowering + class _ConvertToCxxType(TypeVisitor): def __init__(self, side, fq): self.side = side @@ -552,11 +564,14 @@ def visitManagedEndpointType(self, s): def visitUniquePtrType(self, s): return Type(self.typename(s)) - def visitProtocolType(self, p): assert 0 + def visitProtocolType(self, p): + assert 0 - def visitMessageType(self, m): assert 0 + def visitMessageType(self, m): + assert 0 - def visitVoidType(self, v): assert 0 + def visitVoidType(self, v): + assert 0 def _cxxBareType(ipdltype, side, fq=False): @@ -626,10 +641,12 @@ def _cxxTypeNeedsMoveForSend(ipdltype): if ipdltype.isIPDL(): if ipdltype.hasBaseType(): return _cxxTypeNeedsMove(ipdltype.basetype) - return (ipdltype.isShmem() or - ipdltype.isByteBuf() or - ipdltype.isEndpoint() or - ipdltype.isManagedEndpoint()) + return ( + ipdltype.isShmem() + or ipdltype.isByteBuf() + or ipdltype.isEndpoint() + or ipdltype.isManagedEndpoint() + ) return False @@ -650,8 +667,11 @@ def _cxxTypeCanOnlyMove(ipdltype, visited=None): if ipdltype.isMaybe() or ipdltype.isArray(): return _cxxTypeCanOnlyMove(ipdltype.basetype, visited) if ipdltype.isStruct() or ipdltype.isUnion(): - return any(_cxxTypeCanOnlyMove(t, visited) - for t in ipdltype.itercomponents() if t not in visited) + return any( + _cxxTypeCanOnlyMove(t, visited) + for t in ipdltype.itercomponents() + if t not in visited + ) return ipdltype.isManagedEndpoint() return False @@ -698,11 +718,12 @@ def _cxxConstPtrToType(ipdltype, side): def _allocMethod(ptype, side): - return 'Alloc' + ptype.name() + side.title() + return "Alloc" + ptype.name() + side.title() def _deallocMethod(ptype, side): - return 'Dealloc' + ptype.name() + side.title() + return "Dealloc" + ptype.name() + side.title() + ## # A _HybridDecl straddles IPDL and C++ decls. It knows which C++ @@ -713,7 +734,7 @@ def _deallocMethod(ptype, side): class _HybridDecl: """A hybrid decl stores both an IPDL type and all the C++ type -info needed by later passes, along with a basic name for the decl.""" + info needed by later passes, along with a basic name for the decl.""" def __init__(self, ipdltype, name): self.ipdltype = ipdltype @@ -728,7 +749,7 @@ def bareType(self, side, fq=False): def refType(self, side): """Return this decl's C++ type as a 'reference' type, which is not -necessarily a C++ reference.""" + necessarily a C++ reference.""" return _cxxRefType(self.ipdltype, side) def constRefType(self, side): @@ -774,6 +795,7 @@ def forceMoveType(self, side): assert _cxxTypeCanMove(self.ipdltype) return _cxxForceMoveRefType(self.ipdltype, side) + # -------------------------------------------------- @@ -840,7 +862,7 @@ def __init__(self, ipdltype, name, sd, side=None): _CompoundTypeComponent.__init__(self, ipdltype, fname, side, sd) - def getMethod(self, thisexpr=None, sel='.'): + def getMethod(self, thisexpr=None, sel="."): meth = self.var() if thisexpr is not None: return ExprSelect(thisexpr, sel, meth.name) @@ -849,32 +871,32 @@ def getMethod(self, thisexpr=None, sel='.'): def refExpr(self, thisexpr=None): ref = self.memberVar() if thisexpr is not None: - ref = ExprSelect(thisexpr, '.', ref.name) + ref = ExprSelect(thisexpr, ".", ref.name) return ref def constRefExpr(self, thisexpr=None): # sigh, gross hack refexpr = self.refExpr(thisexpr) - if 'Shmem' == self.ipdltype.name(): - refexpr = ExprCast(refexpr, Type('Shmem', ref=True), const=True) - if 'ByteBuf' == self.ipdltype.name(): - refexpr = ExprCast(refexpr, Type('ByteBuf', ref=True), const=True) - if 'FileDescriptor' == self.ipdltype.name(): - refexpr = ExprCast(refexpr, Type('FileDescriptor', ref=True), const=True) + if "Shmem" == self.ipdltype.name(): + refexpr = ExprCast(refexpr, Type("Shmem", ref=True), const=True) + if "ByteBuf" == self.ipdltype.name(): + refexpr = ExprCast(refexpr, Type("ByteBuf", ref=True), const=True) + if "FileDescriptor" == self.ipdltype.name(): + refexpr = ExprCast(refexpr, Type("FileDescriptor", ref=True), const=True) return refexpr def argVar(self): - return ExprVar('_' + self.name) + return ExprVar("_" + self.name) def memberVar(self): - return ExprVar(self.name + '_') + return ExprVar(self.name + "_") class UnionDecl(ipdl.ast.UnionDecl, HasFQName): def callType(self, var=None): - func = ExprVar('type') + func = ExprVar("type") if var is not None: - func = ExprSelect(var, '.', func.name) + func = ExprSelect(var, ".", func.name) return ExprCall(func) @staticmethod @@ -885,7 +907,7 @@ def upgrade(unionDecl): class _UnionMember(_CompoundTypeComponent): """Not in the AFL sense, but rather a member (e.g. |int;|) of an -IPDL union type.""" + IPDL union type.""" def __init__(self, ipdltype, ud, side=None, other=None): flatname = _flatTypeName(ipdltype) @@ -893,7 +915,7 @@ def __init__(self, ipdltype, ud, side=None, other=None): if special: flatname += side.title() - _CompoundTypeComponent.__init__(self, ipdltype, 'V' + flatname, side, ud) + _CompoundTypeComponent.__init__(self, ipdltype, "V" + flatname, side, ud) self.flattypename = flatname if special: if other is not None: @@ -907,7 +929,7 @@ def __init__(self, ipdltype, ud, side=None, other=None): self.recursive = ud.decl.type.mutuallyRecursiveWith(ipdltype) def enum(self): - return 'T' + self.flattypename + return "T" + self.flattypename def enumvar(self): return ExprVar(self.enum()) @@ -923,14 +945,14 @@ def unionType(self): if self.recursive: return self.ptrToType() else: - return Type('mozilla::AlignedStorage2', T=self.internalType()) + return Type("mozilla::AlignedStorage2", T=self.internalType()) def unionValue(self): # NB: knows that Union's storage C union is named |mValue| - return ExprSelect(ExprVar('mValue'), '.', self.name) + return ExprSelect(ExprVar("mValue"), ".", self.name) def typedef(self): - return self.flattypename + '__tdef' + return self.flattypename + "__tdef" def callGetConstPtr(self): """Return an expression of type self.constptrToSelfType()""" @@ -943,8 +965,12 @@ def callGetPtr(self): def callOperatorEq(self, rhs): if self.ipdltype.isIPDL() and self.ipdltype.isActor(): rhs = ExprCast(rhs, self.bareType(), const=True) - elif self.ipdltype.isIPDL() and self.ipdltype.isArray() and not isinstance(rhs, ExprMove): - rhs = ExprCall(ExprSelect(rhs, '.', 'Clone'), args=[]) + elif ( + self.ipdltype.isIPDL() + and self.ipdltype.isArray() + and not isinstance(rhs, ExprMove) + ): + rhs = ExprCall(ExprSelect(rhs, ".", "Clone"), args=[]) return ExprAssn(ExprDeref(self.callGetPtr()), rhs) def callCtor(self, expr=None): @@ -954,36 +980,46 @@ def callCtor(self, expr=None): args = None elif self.ipdltype.isIPDL() and self.ipdltype.isActor(): args = [ExprCast(expr, self.bareType(), const=True)] - elif self.ipdltype.isIPDL() and self.ipdltype.isArray() and not isinstance(expr, ExprMove): - args = [ExprCall(ExprSelect(expr, '.', 'Clone'), args=[])] + elif ( + self.ipdltype.isIPDL() + and self.ipdltype.isArray() + and not isinstance(expr, ExprMove) + ): + args = [ExprCall(ExprSelect(expr, ".", "Clone"), args=[])] else: args = [expr] if self.recursive: - return ExprAssn(self.callGetPtr(), - ExprNew(self.bareType(self.side), - args=args)) + return ExprAssn( + self.callGetPtr(), ExprNew(self.bareType(self.side), args=args) + ) else: - return ExprNew(self.bareType(self.side), - args=args, - newargs=[ExprVar('mozilla::KnownNotNull'), self.callGetPtr()]) + return ExprNew( + self.bareType(self.side), + args=args, + newargs=[ExprVar("mozilla::KnownNotNull"), self.callGetPtr()], + ) def callDtor(self): if self.recursive: return ExprDelete(self.callGetPtr()) else: - return ExprCall( - ExprSelect(self.callGetPtr(), '->', '~' + self.typedef())) + return ExprCall(ExprSelect(self.callGetPtr(), "->", "~" + self.typedef())) - def getTypeName(self): return 'get_' + self.flattypename + def getTypeName(self): + return "get_" + self.flattypename - def getConstTypeName(self): return 'get_' + self.flattypename + def getConstTypeName(self): + return "get_" + self.flattypename - def getOtherTypeName(self): return 'get_' + self.otherflattypename + def getOtherTypeName(self): + return "get_" + self.otherflattypename - def getPtrName(self): return 'ptr_' + self.flattypename + def getPtrName(self): + return "ptr_" + self.flattypename - def getConstPtrName(self): return 'constptr_' + self.flattypename + def getConstPtrName(self): + return "constptr_" + self.flattypename def ptrToSelfExpr(self): """|*ptrToSelfExpr()| has type |self.bareType()|""" @@ -991,14 +1027,14 @@ def ptrToSelfExpr(self): if self.recursive: return v else: - return ExprCall(ExprSelect(v, '.', 'addr')) + return ExprCall(ExprSelect(v, ".", "addr")) def constptrToSelfExpr(self): """|*constptrToSelfExpr()| has type |self.constType()|""" v = self.unionValue() if self.recursive: return v - return ExprCall(ExprSelect(v, '.', 'addr')) + return ExprCall(ExprSelect(v, ".", "addr")) def ptrToInternalType(self): t = self.ptrToType() @@ -1020,14 +1056,15 @@ def defaultValue(self, fq=False): def getConstValue(self): v = ExprDeref(self.callGetConstPtr()) # sigh - if 'ByteBuf' == self.ipdltype.name(): - v = ExprCast(v, Type('ByteBuf', ref=True), const=True) - if 'Shmem' == self.ipdltype.name(): - v = ExprCast(v, Type('Shmem', ref=True), const=True) - if 'FileDescriptor' == self.ipdltype.name(): - v = ExprCast(v, Type('FileDescriptor', ref=True), const=True) + if "ByteBuf" == self.ipdltype.name(): + v = ExprCast(v, Type("ByteBuf", ref=True), const=True) + if "Shmem" == self.ipdltype.name(): + v = ExprCast(v, Type("Shmem", ref=True), const=True) + if "FileDescriptor" == self.ipdltype.name(): + v = ExprCast(v, Type("FileDescriptor", ref=True), const=True) return v + # -------------------------------------------------- @@ -1038,87 +1075,91 @@ def baseName(self): def recvMethod(self): name = _recvPrefix(self.decl.type) + self.baseName() if self.decl.type.isCtor(): - name += 'Constructor' + name += "Constructor" return name def sendMethod(self): name = _sendPrefix(self.decl.type) + self.baseName() if self.decl.type.isCtor(): - name += 'Constructor' + name += "Constructor" return name def hasReply(self): - return (self.decl.type.hasReply() - or self.decl.type.isCtor() - or self.decl.type.isDtor()) + return ( + self.decl.type.hasReply() + or self.decl.type.isCtor() + or self.decl.type.isDtor() + ) def hasAsyncReturns(self): - return (self.decl.type.isAsync() and - self.returns) + return self.decl.type.isAsync() and self.returns def msgCtorFunc(self): - return 'Msg_%s' % (self.decl.progname) + return "Msg_%s" % (self.decl.progname) - def prettyMsgName(self, pfx=''): + def prettyMsgName(self, pfx=""): return pfx + self.msgCtorFunc() def pqMsgCtorFunc(self): - return '%s::%s' % (self.namespace, self.msgCtorFunc()) + return "%s::%s" % (self.namespace, self.msgCtorFunc()) - def msgId(self): return self.msgCtorFunc() + '__ID' + def msgId(self): + return self.msgCtorFunc() + "__ID" def pqMsgId(self): - return '%s::%s' % (self.namespace, self.msgId()) + return "%s::%s" % (self.namespace, self.msgId()) def replyCtorFunc(self): - return 'Reply_%s' % (self.decl.progname) + return "Reply_%s" % (self.decl.progname) def pqReplyCtorFunc(self): - return '%s::%s' % (self.namespace, self.replyCtorFunc()) + return "%s::%s" % (self.namespace, self.replyCtorFunc()) - def replyId(self): return self.replyCtorFunc() + '__ID' + def replyId(self): + return self.replyCtorFunc() + "__ID" def pqReplyId(self): - return '%s::%s' % (self.namespace, self.replyId()) + return "%s::%s" % (self.namespace, self.replyId()) - def prettyReplyName(self, pfx=''): + def prettyReplyName(self, pfx=""): return pfx + self.replyCtorFunc() def promiseName(self): name = self.baseName() if self.decl.type.isCtor(): - name += 'Constructor' - name += 'Promise' + name += "Constructor" + name += "Promise" return name def resolverName(self): - return self.baseName() + 'Resolver' + return self.baseName() + "Resolver" def actorDecl(self): return self.params[0] - def makeCxxParams(self, paramsems='in', returnsems='out', - side=None, implicit=True, direction=None): + def makeCxxParams( + self, paramsems="in", returnsems="out", side=None, implicit=True, direction=None + ): """Return a list of C++ decls per the spec'd configuration. -|params| and |returns| is the C++ semantics of those: 'in', 'out', or None.""" + |params| and |returns| is the C++ semantics of those: 'in', 'out', or None.""" def makeDecl(d, sems): - if self.decl.type.tainted and direction == 'recv': + if self.decl.type.tainted and direction == "recv": # Tainted types are passed by-value, allowing the receiver to move them if desired. - assert sems != 'out' - return Decl(Type('Tainted', T=d.bareType(side)), d.name) + assert sems != "out" + return Decl(Type("Tainted", T=d.bareType(side)), d.name) - if sems == 'in': + if sems == "in": return Decl(d.inType(side), d.name) - elif sems == 'move': + elif sems == "move": return Decl(d.moveType(side), d.name) - elif sems == 'out': + elif sems == "out": return Decl(d.outType(side), d.name) else: assert 0 def makeResolverDecl(returns): - return Decl(Type(self.resolverName(), rvalref=True), 'aResolve') + return Decl(Type(self.resolverName(), rvalref=True), "aResolve") def makeCallbackResolveDecl(returns): if len(returns) > 1: @@ -1126,22 +1167,28 @@ def makeCallbackResolveDecl(returns): else: resolvetype = returns[0].bareType(side) - return Decl(Type("mozilla::ipc::ResolveCallback", T=resolvetype, rvalref=True), - 'aResolve') + return Decl( + Type("mozilla::ipc::ResolveCallback", T=resolvetype, rvalref=True), + "aResolve", + ) def makeCallbackRejectDecl(returns): - return Decl(Type("mozilla::ipc::RejectCallback", rvalref=True), 'aReject') + return Decl(Type("mozilla::ipc::RejectCallback", rvalref=True), "aReject") cxxparams = [] if paramsems is not None: cxxparams.extend([makeDecl(d, paramsems) for d in self.params]) - if returnsems == 'promise' and self.returns: + if returnsems == "promise" and self.returns: pass - elif returnsems == 'callback' and self.returns: - cxxparams.extend([makeCallbackResolveDecl(self.returns), - makeCallbackRejectDecl(self.returns)]) - elif returnsems == 'resolver' and self.returns: + elif returnsems == "callback" and self.returns: + cxxparams.extend( + [ + makeCallbackResolveDecl(self.returns), + makeCallbackRejectDecl(self.returns), + ] + ) + elif returnsems == "resolver" and self.returns: cxxparams.extend([makeResolverDecl(self.returns)]) elif returnsems is not None: cxxparams.extend([makeDecl(r, returnsems) for r in self.returns]) @@ -1151,43 +1198,46 @@ def makeCallbackRejectDecl(returns): return cxxparams - def makeCxxArgs(self, paramsems='in', retsems='out', retcallsems='out', - implicit=True): + def makeCxxArgs( + self, paramsems="in", retsems="out", retcallsems="out", implicit=True + ): assert not retcallsems or retsems # retcallsems => returnsems cxxargs = [] - if paramsems == 'move': + if paramsems == "move": # We don't std::move() RefPtr types because current Recv*() # implementors take these parameters as T*, and # std::move(RefPtr) doesn't coerce to T*. - cxxargs.extend([ - p.var() if p.ipdltype.isRefcounted() else ExprMove(p.var()) - for p in self.params - ]) - elif paramsems == 'in': + cxxargs.extend( + [ + p.var() if p.ipdltype.isRefcounted() else ExprMove(p.var()) + for p in self.params + ] + ) + elif paramsems == "in": cxxargs.extend([p.var() for p in self.params]) else: assert False for ret in self.returns: - if retsems == 'in': - if retcallsems == 'in': + if retsems == "in": + if retcallsems == "in": cxxargs.append(ret.var()) - elif retcallsems == 'out': + elif retcallsems == "out": cxxargs.append(ExprAddrOf(ret.var())) else: assert 0 - elif retsems == 'out': - if retcallsems == 'in': + elif retsems == "out": + if retcallsems == "in": cxxargs.append(ExprDeref(ret.var())) - elif retcallsems == 'out': + elif retcallsems == "out": cxxargs.append(ret.var()) else: assert 0 - elif retsems == 'resolver': + elif retsems == "resolver": pass - if retsems == 'resolver': - cxxargs.append(ExprMove(ExprVar('resolver'))) + if retsems == "resolver": + cxxargs.append(ExprMove(ExprVar("resolver"))) if not implicit: assert self.decl.type.hasImplicitActorParam() @@ -1202,9 +1252,10 @@ def upgrade(messageDecl): messageDecl.params.insert( 0, _HybridDecl( - ipdl.type.ActorType( - messageDecl.decl.type.constructedType()), - 'actor')) + ipdl.type.ActorType(messageDecl.decl.type.constructedType()), + "actor", + ), + ) messageDecl.__class__ = MessageDecl @@ -1237,11 +1288,10 @@ def cxxTypedefs(self): return self.decl.cxxtypedefs def managerInterfaceType(self, ptr=False): - return Type('mozilla::ipc::IProtocol', ptr=ptr) + return Type("mozilla::ipc::IProtocol", ptr=ptr) def openedProtocolInterfaceType(self, ptr=False): - return Type('mozilla::ipc::IToplevelProtocol', - ptr=ptr) + return Type("mozilla::ipc::IToplevelProtocol", ptr=ptr) def _ipdlmgrtype(self): assert 1 == len(self.decl.type.managers) @@ -1249,74 +1299,73 @@ def _ipdlmgrtype(self): return mgr def managerActorType(self, side, ptr=False): - return Type(_actorName(self._ipdlmgrtype().name(), side), - ptr=ptr) + return Type(_actorName(self._ipdlmgrtype().name(), side), ptr=ptr) def unregisterMethod(self, actorThis=None): if actorThis is not None: - return ExprSelect(actorThis, '->', 'Unregister') - return ExprVar('Unregister') + return ExprSelect(actorThis, "->", "Unregister") + return ExprVar("Unregister") def removeManageeMethod(self): - return ExprVar('RemoveManagee') + return ExprVar("RemoveManagee") def deallocManageeMethod(self): - return ExprVar('DeallocManagee') + return ExprVar("DeallocManagee") def otherPidMethod(self): - return ExprVar('OtherPid') + return ExprVar("OtherPid") def callOtherPid(self, actorThis=None): fn = self.otherPidMethod() if actorThis is not None: - fn = ExprSelect(actorThis, '->', fn.name) + fn = ExprSelect(actorThis, "->", fn.name) return ExprCall(fn) def getChannelMethod(self): - return ExprVar('GetIPCChannel') + return ExprVar("GetIPCChannel") def callGetChannel(self, actorThis=None): fn = self.getChannelMethod() if actorThis is not None: - fn = ExprSelect(actorThis, '->', fn.name) + fn = ExprSelect(actorThis, "->", fn.name) return ExprCall(fn) def processingErrorVar(self): assert self.decl.type.isToplevel() - return ExprVar('ProcessingError') + return ExprVar("ProcessingError") def shouldContinueFromTimeoutVar(self): assert self.decl.type.isToplevel() - return ExprVar('ShouldContinueFromReplyTimeout') + return ExprVar("ShouldContinueFromReplyTimeout") def enteredCxxStackVar(self): assert self.decl.type.isToplevel() - return ExprVar('EnteredCxxStack') + return ExprVar("EnteredCxxStack") def exitedCxxStackVar(self): assert self.decl.type.isToplevel() - return ExprVar('ExitedCxxStack') + return ExprVar("ExitedCxxStack") def enteredCallVar(self): assert self.decl.type.isToplevel() - return ExprVar('EnteredCall') + return ExprVar("EnteredCall") def exitedCallVar(self): assert self.decl.type.isToplevel() - return ExprVar('ExitedCall') + return ExprVar("ExitedCall") def routingId(self, actorThis=None): if self.decl.type.isToplevel(): - return ExprVar('MSG_ROUTING_CONTROL') + return ExprVar("MSG_ROUTING_CONTROL") if actorThis is not None: - return ExprCall(ExprSelect(actorThis, '->', 'Id')) - return ExprCall(ExprVar('Id')) + return ExprCall(ExprSelect(actorThis, "->", "Id")) + return ExprCall(ExprVar("Id")) def managerVar(self, thisexpr=None): assert thisexpr is not None or not self.decl.type.isToplevel() - mvar = ExprCall(ExprVar('Manager'), args=[]) + mvar = ExprCall(ExprVar("Manager"), args=[]) if thisexpr is not None: - mvar = ExprCall(ExprSelect(thisexpr, '->', 'Manager'), args=[]) + mvar = ExprCall(ExprSelect(thisexpr, "->", "Manager"), args=[]) return mvar def managedCxxType(self, actortype, side): @@ -1325,16 +1374,17 @@ def managedCxxType(self, actortype, side): def managedMethod(self, actortype, side): assert self.decl.type.isManagerOf(actortype) - return ExprVar('Managed' + _actorName(actortype.name(), side)) + return ExprVar("Managed" + _actorName(actortype.name(), side)) def managedVar(self, actortype, side): assert self.decl.type.isManagerOf(actortype) - return ExprVar('mManaged' + _actorName(actortype.name(), side)) + return ExprVar("mManaged" + _actorName(actortype.name(), side)) def managedVarType(self, actortype, side, const=False, ref=False): assert self.decl.type.isManagerOf(actortype) - return _cxxManagedContainerType(Type(_actorName(actortype.name(), side)), - const=const, ref=ref) + return _cxxManagedContainerType( + Type(_actorName(actortype.name(), side)), const=const, ref=ref + ) def subtreeUsesShmem(self): return _subtreeUsesShmem(self) @@ -1355,16 +1405,16 @@ def upgrade(tu): # ----------------------------------------------------------------------------- pod_types = { - 'int8_t': 1, - 'uint8_t': 1, - 'int16_t': 2, - 'uint16_t': 2, - 'int32_t': 4, - 'uint32_t': 4, - 'int64_t': 8, - 'uint64_t': 8, - 'float': 4, - 'double': 8, + "int8_t": 1, + "uint8_t": 1, + "int16_t": 2, + "uint16_t": 2, + "int32_t": 4, + "uint32_t": 4, + "int64_t": 8, + "uint64_t": 8, + "float": 4, + "double": 8, } max_pod_size = max(pod_types.values()) # We claim that all types we don't recognize are automatically "bigger" @@ -1381,11 +1431,11 @@ def pod_size(ipdltype): class _DecorateWithCxxStuff(ipdl.ast.Visitor): """Phase 1 of lowering: decorate the IPDL AST with information -relevant to C++ code generation. + relevant to C++ code generation. -This pass results in an AST that is a poor man's "IR"; in reality, a -"hybrid" AST mainly consisting of IPDL nodes with new C++ info along -with some new IPDL/C++ nodes that are tuned for C++ codegen.""" + This pass results in an AST that is a poor man's "IR"; in reality, a + "hybrid" AST mainly consisting of IPDL nodes with new C++ info along + with some new IPDL/C++ nodes that are tuned for C++ codegen.""" def __init__(self): self.visitedTus = set() @@ -1393,24 +1443,27 @@ def __init__(self): # reference known C++ types by their "short name" rather than # fully-qualified name. e.g. |Foo| rather than |a::b::Foo|. self.typedefs = [] - self.typedefSet = set([Typedef(Type('mozilla::ipc::ActorHandle'), - 'ActorHandle'), - Typedef(Type('base::ProcessId'), - 'ProcessId'), - Typedef(Type('mozilla::ipc::ProtocolId'), - 'ProtocolId'), - Typedef(Type('mozilla::ipc::Transport'), - 'Transport'), - Typedef(Type('mozilla::ipc::Endpoint'), - 'Endpoint', ['FooSide']), - Typedef(Type('mozilla::ipc::ManagedEndpoint'), - 'ManagedEndpoint', ['FooSide']), - Typedef(Type('mozilla::ipc::TransportDescriptor'), - 'TransportDescriptor'), - Typedef(Type('mozilla::UniquePtr'), - 'UniquePtr', ['T']), - Typedef(Type('mozilla::ipc::ResponseRejectReason'), - 'ResponseRejectReason')]) + self.typedefSet = set( + [ + Typedef(Type("mozilla::ipc::ActorHandle"), "ActorHandle"), + Typedef(Type("base::ProcessId"), "ProcessId"), + Typedef(Type("mozilla::ipc::ProtocolId"), "ProtocolId"), + Typedef(Type("mozilla::ipc::Transport"), "Transport"), + Typedef(Type("mozilla::ipc::Endpoint"), "Endpoint", ["FooSide"]), + Typedef( + Type("mozilla::ipc::ManagedEndpoint"), + "ManagedEndpoint", + ["FooSide"], + ), + Typedef( + Type("mozilla::ipc::TransportDescriptor"), "TransportDescriptor" + ), + Typedef(Type("mozilla::UniquePtr"), "UniquePtr", ["T"]), + Typedef( + Type("mozilla::ipc::ResponseRejectReason"), "ResponseRejectReason" + ), + ] + ) self.protocolName = None def visitTranslationUnit(self, tu): @@ -1422,7 +1475,7 @@ def visitTranslationUnit(self, tu): self.typedefs[:] = sorted(list(self.typedefSet)) def visitInclude(self, inc): - if inc.tu.filetype == 'header': + if inc.tu.filetype == "header": inc.tu.accept(self) def visitProtocol(self, pro): @@ -1433,8 +1486,9 @@ def visitProtocol(self, pro): def visitUsingStmt(self, using): if using.decl.fullname is not None: - self.typedefSet.add(Typedef(Type(using.decl.fullname), - using.decl.shortname)) + self.typedefSet.add( + Typedef(Type(using.decl.fullname), using.decl.shortname) + ) def visitStructDecl(self, sd): if not isinstance(sd, StructDecl): @@ -1446,10 +1500,8 @@ def visitStructDecl(self, sd): sd.decl.special = True # if ftype has a visible actor, we need both # |ActorParent| and |ActorChild| fields - newfields.append(_StructField(ftype, f.name, sd, - side='parent')) - newfields.append(_StructField(ftype, f.name, sd, - side='child')) + newfields.append(_StructField(ftype, f.name, sd, side="parent")) + newfields.append(_StructField(ftype, f.name, sd, side="child")) else: newfields.append(_StructField(ftype, f.name, sd)) @@ -1479,8 +1531,8 @@ def visitUnionDecl(self, ud): ud.decl.special = True # if ctype has a visible actor, we need both # |ActorParent| and |ActorChild| union members - newcomponents.append(_UnionMember(ctype, ud, side='parent')) - newcomponents.append(_UnionMember(ctype, ud, side='child')) + newcomponents.append(_UnionMember(ctype, ud, side="parent")) + newcomponents.append(_UnionMember(ctype, ud, side="child")) else: newcomponents.append(_UnionMember(ctype, ud)) ud.components = newcomponents @@ -1501,27 +1553,28 @@ def visitMessageDecl(self, md): # ----------------------------------------------------------------------------- + def msgenums(protocol, pretty=False): - msgenum = TypeEnum('MessageType') - msgstart = _messageStartName(protocol.decl.type) + ' << 16' - msgenum.addId(protocol.name + 'Start', msgstart) + msgenum = TypeEnum("MessageType") + msgstart = _messageStartName(protocol.decl.type) + " << 16" + msgenum.addId(protocol.name + "Start", msgstart) for md in protocol.messageDecls: msgenum.addId(md.prettyMsgName() if pretty else md.msgId()) if md.hasReply(): msgenum.addId(md.prettyReplyName() if pretty else md.replyId()) - msgenum.addId(protocol.name + 'End') + msgenum.addId(protocol.name + "End") return msgenum class _GenerateProtocolCode(ipdl.ast.Visitor): - '''Creates code common to both the parent and child actors.''' + """Creates code common to both the parent and child actors.""" def __init__(self): - self.protocol = None # protocol we're generating a class for - self.hdrfile = None # what will become Protocol.h - self.cppfile = None # what will become Protocol.cpp + self.protocol = None # protocol we're generating a class for + self.hdrfile = None # what will become Protocol.h + self.cppfile = None # what will become Protocol.cpp self.cppIncludeHeaders = [] self.structUnionDefns = [] self.funcDefns = [] @@ -1572,7 +1625,7 @@ def visitTranslationUnit(self, tu): hf.addthings([Whitespace("// Headers for typedefs"), Whitespace.NL]) for headername in sorted(iter(aggregateTypeIncludes)): - hf.addthing(CppDirective('include', '"' + headername + '"')) + hf.addthing(CppDirective("include", '"' + headername + '"')) # Manually run Visitor.visitTranslationUnit. For dependency resolution # we need to handle structs and unions separately. @@ -1588,59 +1641,62 @@ def visitTranslationUnit(self, tu): if tu.protocol: tu.protocol.accept(self) - if tu.filetype == 'header': - self.cppIncludeHeaders.append(_ipdlhHeaderName(tu) + '.h') + if tu.filetype == "header": + self.cppIncludeHeaders.append(_ipdlhHeaderName(tu) + ".h") hf.addthing(Whitespace.NL) hf.addthings(_includeGuardEnd(hf)) cf = self.cppfile - cf.addthings(( - [_DISCLAIMER, Whitespace.NL] - + [CppDirective('include', '"'+h+'"') - for h in self.cppIncludeHeaders] - + [Whitespace.NL] - )) + cf.addthings( + ( + [_DISCLAIMER, Whitespace.NL] + + [ + CppDirective("include", '"' + h + '"') + for h in self.cppIncludeHeaders + ] + + [Whitespace.NL] + ) + ) if self.protocol: # construct the namespace into which we'll stick all our defns ns = Namespace(self.protocol.name) cf.addthing(_putInNamespaces(ns, self.protocol.namespaces)) - ns.addstmts(([Whitespace.NL] - + self.funcDefns - + [Whitespace.NL])) + ns.addstmts(([Whitespace.NL] + self.funcDefns + [Whitespace.NL])) cf.addthings(self.structUnionDefns) def visitBuiltinCxxInclude(self, inc): - self.hdrfile.addthing(CppDirective('include', '"' + inc.file + '"')) + self.hdrfile.addthing(CppDirective("include", '"' + inc.file + '"')) def visitCxxInclude(self, inc): self.cppIncludeHeaders.append(inc.file) def visitInclude(self, inc): - if inc.tu.filetype == 'header': - self.hdrfile.addthing(CppDirective( - 'include', '"' + _ipdlhHeaderName(inc.tu) + '.h"')) + if inc.tu.filetype == "header": + self.hdrfile.addthing( + CppDirective("include", '"' + _ipdlhHeaderName(inc.tu) + '.h"') + ) else: self.cppIncludeHeaders += [ - _protocolHeaderName(inc.tu.protocol, 'parent') + '.h', - _protocolHeaderName(inc.tu.protocol, 'child') + '.h', + _protocolHeaderName(inc.tu.protocol, "parent") + ".h", + _protocolHeaderName(inc.tu.protocol, "child") + ".h", ] def generateStructsAndUnions(self, tu): - '''Generate the definitions for all structs and unions. This will + """Generate the definitions for all structs and unions. This will re-order the declarations if needed in the C++ code such that - dependencies have already been defined.''' + dependencies have already been defined.""" decls = OrderedDict() for su in tu.structsAndUnions: if isinstance(su, StructDecl): - which = 'struct' + which = "struct" forwarddecls, fulldecltypes, cls = _generateCxxStruct(su) traitsdecl, traitsdefns = _ParamTraits.structPickling(su.decl.type) else: assert isinstance(su, UnionDecl) - which = 'union' + which = "union" forwarddecls, fulldecltypes, cls = _generateCxxUnion(su) traitsdecl, traitsdefns = _ParamTraits.unionPickling(su.decl.type) @@ -1652,26 +1708,35 @@ def generateStructsAndUnions(self, tu): fulldecltypes, [Whitespace.NL] + forwarddecls - + [Whitespace(""" + + [ + Whitespace( + """ //----------------------------------------------------------------------------- // Declaration of the IPDL type |%s %s| // -""" % (which, su.name)), +""" + % (which, su.name) + ), _putInNamespaces(clsdecl, su.namespaces), ] - + [Whitespace.NL, - traitsdecl]) + + [Whitespace.NL, traitsdecl], + ) - self.structUnionDefns.extend([ - Whitespace(""" + self.structUnionDefns.extend( + [ + Whitespace( + """ //----------------------------------------------------------------------------- // Method definitions for the IPDL type |%s %s| // -""" % (which, su.name)), - _putInNamespaces(methoddefns, su.namespaces), - Whitespace.NL, - traitsdefns, - ]) +""" + % (which, su.name) + ), + _putInNamespaces(methoddefns, su.namespaces), + Whitespace.NL, + traitsdefns, + ] + ) # Generate the declarations structs in dependency order. def gen_struct(deps, defn): @@ -1681,27 +1746,39 @@ def gen_struct(deps, defn): del decls[dep] gen_struct(d, t) self.hdrfile.addthings(defn) + while len(decls) > 0: _, (d, t) = decls.popitem(False) gen_struct(d, t) def visitProtocol(self, p): - self.cppIncludeHeaders.append(_protocolHeaderName(self.protocol, '') + '.h') - self.cppIncludeHeaders.append(_protocolHeaderName(self.protocol, 'Parent') + '.h') - self.cppIncludeHeaders.append(_protocolHeaderName(self.protocol, 'Child') + '.h') + self.cppIncludeHeaders.append(_protocolHeaderName(self.protocol, "") + ".h") + self.cppIncludeHeaders.append( + _protocolHeaderName(self.protocol, "Parent") + ".h" + ) + self.cppIncludeHeaders.append( + _protocolHeaderName(self.protocol, "Child") + ".h" + ) # Forward declare our own actors. - self.hdrfile.addthings([ - Whitespace.NL, - _makeForwardDeclForActor(p.decl.type, 'Parent'), - _makeForwardDeclForActor(p.decl.type, 'Child') - ]) + self.hdrfile.addthings( + [ + Whitespace.NL, + _makeForwardDeclForActor(p.decl.type, "Parent"), + _makeForwardDeclForActor(p.decl.type, "Child"), + ] + ) - self.hdrfile.addthing(Whitespace(""" + self.hdrfile.addthing( + Whitespace( + """ //----------------------------------------------------------------------------- // Code common to %sChild and %sParent // -""" % (p.name, p.name))) +""" + % (p.name, p.name) + ) + ) # construct the namespace into which we'll stick all our decls ns = Namespace(self.protocol.name) @@ -1714,7 +1791,7 @@ def visitProtocol(self, p): # spit out message type enum and classes msgenum = msgenums(self.protocol) - ns.addstmts([StmtDecl(Decl(msgenum, '')), Whitespace.NL]) + ns.addstmts([StmtDecl(Decl(msgenum, "")), Whitespace.NL]) for md in p.messageDecls: decls = [] @@ -1722,18 +1799,19 @@ def visitProtocol(self, p): # Look up the segment capacity used for serializing this # message. If the capacity is not specified, use '0' for # the default capacity (defined in ipc_message.cc) - name = '%s::%s' % (md.namespace, md.decl.progname) + name = "%s::%s" % (md.namespace, md.decl.progname) segmentcapacity = self.segmentcapacitydict.get(name, 0) mfDecl, mfDefn = _splitFuncDeclDefn( - _generateMessageConstructor(md, segmentcapacity, p, - forReply=False)) + _generateMessageConstructor(md, segmentcapacity, p, forReply=False) + ) decls.append(mfDecl) self.funcDefns.append(mfDefn) if md.hasReply(): rfDecl, rfDefn = _splitFuncDeclDefn( - _generateMessageConstructor(md, 0, p, forReply=True)) + _generateMessageConstructor(md, 0, p, forReply=True) + ) decls.append(rfDecl) self.funcDefns.append(rfDefn) @@ -1745,139 +1823,169 @@ def visitProtocol(self, p): # Generate code for PFoo::CreateEndpoints. def genEndpointFunc(self): p = self.protocol.decl.type - tparent = _cxxBareType(ActorType(p), 'Parent', fq=True) - tchild = _cxxBareType(ActorType(p), 'Child', fq=True) - - openfunc = MethodDefn(MethodDecl( - 'CreateEndpoints', - params=[Decl(Type('base::ProcessId'), 'aParentDestPid'), - Decl(Type('base::ProcessId'), 'aChildDestPid'), - Decl(Type('mozilla::ipc::Endpoint<' + tparent.name + '>', ptr=True), - 'aParent'), - Decl(Type('mozilla::ipc::Endpoint<' + tchild.name + '>', ptr=True), - 'aChild')], - ret=Type.NSRESULT)) + tparent = _cxxBareType(ActorType(p), "Parent", fq=True) + tchild = _cxxBareType(ActorType(p), "Child", fq=True) + + openfunc = MethodDefn( + MethodDecl( + "CreateEndpoints", + params=[ + Decl(Type("base::ProcessId"), "aParentDestPid"), + Decl(Type("base::ProcessId"), "aChildDestPid"), + Decl( + Type("mozilla::ipc::Endpoint<" + tparent.name + ">", ptr=True), + "aParent", + ), + Decl( + Type("mozilla::ipc::Endpoint<" + tchild.name + ">", ptr=True), + "aChild", + ), + ], + ret=Type.NSRESULT, + ) + ) openfunc.addcode( - ''' + """ return mozilla::ipc::CreateEndpoints( mozilla::ipc::PrivateIPDLInterface(), aParentDestPid, aChildDestPid, aParent, aChild); - ''') + """ + ) return openfunc # -------------------------------------------------- + def _generateMessageConstructor(md, segmentSize, protocol, forReply=False): if forReply: clsname = md.replyCtorFunc() msgid = md.replyId() - replyEnum = 'REPLY' + replyEnum = "REPLY" else: clsname = md.msgCtorFunc() msgid = md.msgId() - replyEnum = 'NOT_REPLY' + replyEnum = "NOT_REPLY" nested = md.decl.type.nested prio = md.decl.type.prio compress = md.decl.type.compress - routingId = ExprVar('routingId') + routingId = ExprVar("routingId") - func = FunctionDefn(FunctionDecl( - clsname, - params=[Decl(Type('int32_t'), routingId.name)], - ret=Type('IPC::Message', ptr=True))) + func = FunctionDefn( + FunctionDecl( + clsname, + params=[Decl(Type("int32_t"), routingId.name)], + ret=Type("IPC::Message", ptr=True), + ) + ) - if compress == 'compress': - compression = 'COMPRESSION_ENABLED' + if compress == "compress": + compression = "COMPRESSION_ENABLED" elif compress: - assert compress == 'compressall' - compression = 'COMPRESSION_ALL' + assert compress == "compressall" + compression = "COMPRESSION_ALL" else: - compression = 'COMPRESSION_NONE' + compression = "COMPRESSION_NONE" if nested == ipdl.ast.NOT_NESTED: - nestedEnum = 'NOT_NESTED' + nestedEnum = "NOT_NESTED" elif nested == ipdl.ast.INSIDE_SYNC_NESTED: - nestedEnum = 'NESTED_INSIDE_SYNC' + nestedEnum = "NESTED_INSIDE_SYNC" else: assert nested == ipdl.ast.INSIDE_CPOW_NESTED - nestedEnum = 'NESTED_INSIDE_CPOW' + nestedEnum = "NESTED_INSIDE_CPOW" if prio == ipdl.ast.NORMAL_PRIORITY: - prioEnum = 'NORMAL_PRIORITY' + prioEnum = "NORMAL_PRIORITY" elif prio == ipdl.ast.INPUT_PRIORITY: - prioEnum = 'INPUT_PRIORITY' + prioEnum = "INPUT_PRIORITY" elif prio == ipdl.ast.HIGH_PRIORITY: - prioEnum = 'HIGH_PRIORITY' + prioEnum = "HIGH_PRIORITY" else: - prioEnum = 'MEDIUMHIGH_PRIORITY' + prioEnum = "MEDIUMHIGH_PRIORITY" if md.decl.type.isSync(): - syncEnum = 'SYNC' + syncEnum = "SYNC" else: - syncEnum = 'ASYNC' + syncEnum = "ASYNC" if md.decl.type.isInterrupt(): - interruptEnum = 'INTERRUPT' + interruptEnum = "INTERRUPT" else: - interruptEnum = 'NOT_INTERRUPT' + interruptEnum = "NOT_INTERRUPT" if md.decl.type.isCtor(): - ctorEnum = 'CONSTRUCTOR' + ctorEnum = "CONSTRUCTOR" else: - ctorEnum = 'NOT_CONSTRUCTOR' + ctorEnum = "NOT_CONSTRUCTOR" def messageEnum(valname): - return ExprVar('IPC::Message::' + valname) - - flags = ExprCall(ExprVar('IPC::Message::HeaderFlags'), - args=[messageEnum(nestedEnum), - messageEnum(prioEnum), - messageEnum(compression), - messageEnum(ctorEnum), - messageEnum(syncEnum), - messageEnum(interruptEnum), - messageEnum(replyEnum)]) + return ExprVar("IPC::Message::" + valname) + + flags = ExprCall( + ExprVar("IPC::Message::HeaderFlags"), + args=[ + messageEnum(nestedEnum), + messageEnum(prioEnum), + messageEnum(compression), + messageEnum(ctorEnum), + messageEnum(syncEnum), + messageEnum(interruptEnum), + messageEnum(replyEnum), + ], + ) segmentSize = int(segmentSize) if segmentSize: func.addstmt( - StmtReturn(ExprNew(Type('IPC::Message'), - args=[routingId, - ExprVar(msgid), - ExprLiteral.Int(int(segmentSize)), - flags, - # Pass `true` to recordWriteLatency to collect telemetry - ExprLiteral.TRUE]))) + StmtReturn( + ExprNew( + Type("IPC::Message"), + args=[ + routingId, + ExprVar(msgid), + ExprLiteral.Int(int(segmentSize)), + flags, + # Pass `true` to recordWriteLatency to collect telemetry + ExprLiteral.TRUE, + ], + ) + ) + ) else: func.addstmt( - StmtReturn(ExprCall(ExprVar('IPC::Message::IPDLMessage'), - args=[routingId, - ExprVar(msgid), - flags]))) + StmtReturn( + ExprCall( + ExprVar("IPC::Message::IPDLMessage"), + args=[routingId, ExprVar(msgid), flags], + ) + ) + ) return func + # -------------------------------------------------- -class _ParamTraits(): - var = ExprVar('aVar') - msgvar = ExprVar('aMsg') - itervar = ExprVar('aIter') - actor = ExprVar('aActor') +class _ParamTraits: + var = ExprVar("aVar") + msgvar = ExprVar("aMsg") + itervar = ExprVar("aIter") + actor = ExprVar("aActor") @classmethod def ifsideis(cls, side, then, els=None): - cxxside = ExprVar('mozilla::ipc::ChildSide') - if side == 'parent': - cxxside = ExprVar('mozilla::ipc::ParentSide') + cxxside = ExprVar("mozilla::ipc::ChildSide") + if side == "parent": + cxxside = ExprVar("mozilla::ipc::ParentSide") - ifstmt = StmtIf(ExprBinary(cxxside, '==', - ExprCall(ExprSelect(cls.actor, '->', 'GetSide')))) + ifstmt = StmtIf( + ExprBinary(cxxside, "==", ExprCall(ExprSelect(cls.actor, "->", "GetSide"))) + ) ifstmt.addifstmt(then) if els is not None: ifstmt.addelsestmt(els) @@ -1885,27 +1993,34 @@ def ifsideis(cls, side, then, els=None): @classmethod def fatalError(cls, reason): - return StmtCode('aActor->FatalError(${reason});', - reason=ExprLiteral.String(reason)) + return StmtCode( + "aActor->FatalError(${reason});", reason=ExprLiteral.String(reason) + ) @classmethod def writeSentinel(cls, msgvar, sentinelKey): return [ - Whitespace('// Sentinel = ' + repr(sentinelKey) + '\n', indent=True), - StmtExpr(ExprCall(ExprSelect(msgvar, '->', 'WriteSentinel'), - args=[ExprLiteral.Int(hashfunc(sentinelKey))])) + Whitespace("// Sentinel = " + repr(sentinelKey) + "\n", indent=True), + StmtExpr( + ExprCall( + ExprSelect(msgvar, "->", "WriteSentinel"), + args=[ExprLiteral.Int(hashfunc(sentinelKey))], + ) + ), ] @classmethod def readSentinel(cls, msgvar, itervar, sentinelKey, sentinelFail): # Read the sentinel - read = ExprCall(ExprSelect(msgvar, '->', 'ReadSentinel'), - args=[itervar, ExprLiteral.Int(hashfunc(sentinelKey))]) + read = ExprCall( + ExprSelect(msgvar, "->", "ReadSentinel"), + args=[itervar, ExprLiteral.Int(hashfunc(sentinelKey))], + ) ifsentinel = StmtIf(ExprNot(read)) ifsentinel.addifstmts(sentinelFail) return [ - Whitespace('// Sentinel = ' + repr(sentinelKey) + '\n', indent=True), + Whitespace("// Sentinel = " + repr(sentinelKey) + "\n", indent=True), ifsentinel, ] @@ -1915,7 +2030,7 @@ def write(cls, var, msgvar, actor, ipdltype=None): # only called when the actor is already correctly set. if ipdltype and _cxxTypeNeedsMoveForSend(ipdltype): var = ExprMove(var) - return ExprCall(ExprVar('WriteIPDLParam'), args=[msgvar, actor, var]) + return ExprCall(ExprVar("WriteIPDLParam"), args=[msgvar, actor, var]) @classmethod def checkedWrite(cls, ipdltype, var, msgvar, sentinelKey, actor): @@ -1923,30 +2038,48 @@ def checkedWrite(cls, ipdltype, var, msgvar, sentinelKey, actor): block = Block() # Assert we aren't serializing a null non-nullable actor - if ipdltype and ipdltype.isIPDL() and ipdltype.isActor() and not ipdltype.nullable: - block.addstmt(_abortIfFalse(var, 'NULL actor value passed to non-nullable param')) + if ( + ipdltype + and ipdltype.isIPDL() + and ipdltype.isActor() + and not ipdltype.nullable + ): + block.addstmt( + _abortIfFalse(var, "NULL actor value passed to non-nullable param") + ) - block.addstmts([ - StmtExpr(cls.write(var, msgvar, actor, ipdltype)), - ]) + block.addstmts( + [ + StmtExpr(cls.write(var, msgvar, actor, ipdltype)), + ] + ) block.addstmts(cls.writeSentinel(msgvar, sentinelKey)) return block @classmethod def bulkSentinelKey(cls, fields): - return ' | '.join(f.basename for f in fields) + return " | ".join(f.basename for f in fields) @classmethod def checkedBulkWrite(cls, size, fields): block = Block() first = fields[0] - block.addstmts([ - StmtExpr(ExprCall(ExprSelect(cls.msgvar, '->', 'WriteBytes'), - args=[ExprAddrOf(ExprCall(first.getMethod(thisexpr=cls.var, - sel='.'))), - ExprLiteral.Int(size * len(fields))])) - ]) + block.addstmts( + [ + StmtExpr( + ExprCall( + ExprSelect(cls.msgvar, "->", "WriteBytes"), + args=[ + ExprAddrOf( + ExprCall(first.getMethod(thisexpr=cls.var, sel=".")) + ), + ExprLiteral.Int(size * len(fields)), + ], + ) + ) + ] + ) block.addstmts(cls.writeSentinel(cls.msgvar, cls.bulkSentinelKey(fields))) return block @@ -1956,46 +2089,69 @@ def checkedBulkRead(cls, size, fields): block = Block() first = fields[0] - readbytes = ExprCall(ExprSelect(cls.msgvar, '->', 'ReadBytesInto'), - args=[cls.itervar, - ExprAddrOf(ExprCall(first.getMethod(thisexpr=cls.var, - sel='->'))), - ExprLiteral.Int(size * len(fields))]) + readbytes = ExprCall( + ExprSelect(cls.msgvar, "->", "ReadBytesInto"), + args=[ + cls.itervar, + ExprAddrOf(ExprCall(first.getMethod(thisexpr=cls.var, sel="->"))), + ExprLiteral.Int(size * len(fields)), + ], + ) ifbad = StmtIf(ExprNot(readbytes)) - errmsg = 'Error bulk reading fields from %s' % first.ipdltype.name() - ifbad.addifstmts([cls.fatalError(errmsg), - StmtReturn.FALSE]) + errmsg = "Error bulk reading fields from %s" % first.ipdltype.name() + ifbad.addifstmts([cls.fatalError(errmsg), StmtReturn.FALSE]) block.addstmt(ifbad) - block.addstmts(cls.readSentinel(cls.msgvar, - cls.itervar, - cls.bulkSentinelKey(fields), - errfnSentinel()(errmsg))) + block.addstmts( + cls.readSentinel( + cls.msgvar, + cls.itervar, + cls.bulkSentinelKey(fields), + errfnSentinel()(errmsg), + ) + ) return block @classmethod - def checkedRead(cls, ipdltype, var, - msgvar, itervar, errfn, - paramtype, sentinelKey, - errfnSentinel, actor): + def checkedRead( + cls, + ipdltype, + var, + msgvar, + itervar, + errfn, + paramtype, + sentinelKey, + errfnSentinel, + actor, + ): block = Block() # Read the data - ifbad = StmtIf(ExprNot(ExprCall(ExprVar('ReadIPDLParam'), - args=[msgvar, itervar, actor, var]))) + ifbad = StmtIf( + ExprNot( + ExprCall(ExprVar("ReadIPDLParam"), args=[msgvar, itervar, actor, var]) + ) + ) if not isinstance(paramtype, list): - paramtype = ['Error deserializing ' + paramtype] + paramtype = ["Error deserializing " + paramtype] ifbad.addifstmts(errfn(*paramtype)) block.addstmt(ifbad) # Check if we got a null non-nullable actor - if ipdltype and ipdltype.isIPDL() and ipdltype.isActor() and not ipdltype.nullable: + if ( + ipdltype + and ipdltype.isIPDL() + and ipdltype.isActor() + and not ipdltype.nullable + ): ifnull = StmtIf(ExprNot(ExprDeref(var))) ifnull.addifstmts(errfn(*paramtype)) block.addstmt(ifnull) - block.addstmts(cls.readSentinel(msgvar, itervar, sentinelKey, - errfnSentinel(*paramtype))) + block.addstmts( + cls.readSentinel(msgvar, itervar, sentinelKey, errfnSentinel(*paramtype)) + ) return block @@ -2006,63 +2162,71 @@ def errfn(msg): return [cls.fatalError(msg), StmtReturn.FALSE] return cls.checkedRead( - ipdltype, var, cls.msgvar, cls.itervar, + ipdltype, + var, + cls.msgvar, + cls.itervar, errfn=errfn, paramtype=what, sentinelKey=sentinelKey, errfnSentinel=errfnSentinel(), - actor=cls.actor) + actor=cls.actor, + ) @classmethod def generateDecl(cls, fortype, write, read, constin=True): # IPDLParamTraits impls are selected ignoring constness, and references. - pt = Class('IPDLParamTraits', - specializes=Type(fortype.name, - T=fortype.T, - inner=fortype.inner, - ptr=fortype.ptr), - struct=True) + pt = Class( + "IPDLParamTraits", + specializes=Type( + fortype.name, T=fortype.T, inner=fortype.inner, ptr=fortype.ptr + ), + struct=True, + ) # typedef T paramType; - pt.addstmt(Typedef(fortype, 'paramType')) + pt.addstmt(Typedef(fortype, "paramType")) - iprotocoltype = Type('mozilla::ipc::IProtocol', ptr=True) + iprotocoltype = Type("mozilla::ipc::IProtocol", ptr=True) # static void Write(Message*, const T&); - intype = Type('paramType', ref=True, const=constin) + intype = Type("paramType", ref=True, const=constin) writemthd = MethodDefn( - MethodDecl('Write', - params=[Decl(Type('IPC::Message', ptr=True), - cls.msgvar.name), - Decl(iprotocoltype, - cls.actor.name), - Decl(intype, - cls.var.name)], - methodspec=MethodSpec.STATIC)) + MethodDecl( + "Write", + params=[ + Decl(Type("IPC::Message", ptr=True), cls.msgvar.name), + Decl(iprotocoltype, cls.actor.name), + Decl(intype, cls.var.name), + ], + methodspec=MethodSpec.STATIC, + ) + ) writemthd.addstmts(write) pt.addstmt(writemthd) # static bool Read(const Message*, PickleIterator*, T*); - outtype = Type('paramType', ptr=True) + outtype = Type("paramType", ptr=True) readmthd = MethodDefn( - MethodDecl('Read', - params=[Decl(Type('IPC::Message', ptr=True, const=True), - cls.msgvar.name), - Decl(_iterType(ptr=True), - cls.itervar.name), - Decl(iprotocoltype, - cls.actor.name), - Decl(outtype, - cls.var.name)], - ret=Type.BOOL, - methodspec=MethodSpec.STATIC)) + MethodDecl( + "Read", + params=[ + Decl(Type("IPC::Message", ptr=True, const=True), cls.msgvar.name), + Decl(_iterType(ptr=True), cls.itervar.name), + Decl(iprotocoltype, cls.actor.name), + Decl(outtype, cls.var.name), + ], + ret=Type.BOOL, + methodspec=MethodSpec.STATIC, + ) + ) readmthd.addstmts(read) pt.addstmt(readmthd) # Split the class into declaration and definition clsdecl, methoddefns = _splitClassDeclDefn(pt) - namespaces = [Namespace('mozilla'), Namespace('ipc')] + namespaces = [Namespace("mozilla"), Namespace("ipc")] clsns = _putInNamespaces(clsdecl, namespaces) defns = _putInNamespaces(methoddefns, namespaces) return clsns, defns @@ -2075,7 +2239,7 @@ def actorPickling(cls, actortype, side): cxxtype = _cxxBareType(actortype, side, fq=True) write = StmtCode( - ''' + """ int32_t id; if (!${var}) { id = 0; // kNullActorId @@ -2094,14 +2258,15 @@ def actorPickling(cls, actortype, side): } ${write}; - ''', + """, var=cls.var, actor=cls.actor, - write=cls.write(ExprVar('id'), cls.msgvar, cls.actor)) + write=cls.write(ExprVar("id"), cls.msgvar, cls.actor), + ) # bool Read(..) impl read = StmtCode( - ''' + """ mozilla::Maybe actor = ${actor}->ReadActor(${msgvar}, ${itervar}, true, ${actortype}, ${protocolid}); if (actor.isNothing()) { @@ -2110,14 +2275,15 @@ def actorPickling(cls, actortype, side): *${var} = static_cast<${cxxtype}>(actor.value()); return true; - ''', + """, actor=cls.actor, msgvar=cls.msgvar, itervar=cls.itervar, actortype=ExprLiteral.String(actortype.name()), protocolid=_protocolId(actortype), var=cls.var, - cxxtype=cxxtype) + cxxtype=cxxtype, + ) return cls.generateDecl(cxxtype, [write], [read]) @@ -2133,22 +2299,34 @@ def get(sel, f): write = [] read = [] - for (size, fields) in itertools.groupby(sd.fields_member_order(), - lambda f: pod_size(f.ipdltype)): + for (size, fields) in itertools.groupby( + sd.fields_member_order(), lambda f: pod_size(f.ipdltype) + ): fields = list(fields) if size == pod_size_sentinel: for f in fields: - writefield = cls.checkedWrite(f.ipdltype, - get('.', f), - cls.msgvar, - sentinelKey=f.basename, - actor=cls.actor) - readfield = cls._checkedRead(f.ipdltype, - ExprAddrOf(get('->', f)), f.basename, - '\'' + f.getMethod().name + '\' ' + - '(' + f.ipdltype.name() + ') member of ' + - '\'' + structtype.name() + '\'') + writefield = cls.checkedWrite( + f.ipdltype, + get(".", f), + cls.msgvar, + sentinelKey=f.basename, + actor=cls.actor, + ) + readfield = cls._checkedRead( + f.ipdltype, + ExprAddrOf(get("->", f)), + f.basename, + "'" + + f.getMethod().name + + "' " + + "(" + + f.ipdltype.name() + + ") member of " + + "'" + + structtype.name() + + "'", + ) # Wrap the read/write in a side check if the field is special. if f.special: @@ -2178,8 +2356,8 @@ def unionPickling(cls, uniontype): ud = uniontype._ast # Use typedef to set up an alias so it's easier to reference the struct type. - alias = 'union__' - typevar = ExprVar('type') + alias = "union__" + typevar = ExprVar("type") prelude = [ Typedef(cxxtype, alias), @@ -2188,11 +2366,9 @@ def unionPickling(cls, uniontype): writeswitch = StmtSwitch(typevar) write = prelude + [ StmtDecl(Decl(Type.INT, typevar.name), init=ud.callType(cls.var)), - cls.checkedWrite(None, - typevar, - cls.msgvar, - sentinelKey=uniontype.name(), - actor=cls.actor), + cls.checkedWrite( + None, typevar, cls.msgvar, sentinelKey=uniontype.name(), actor=cls.actor + ), Whitespace.NL, writeswitch, ] @@ -2200,29 +2376,32 @@ def unionPickling(cls, uniontype): readswitch = StmtSwitch(typevar) read = prelude + [ StmtDecl(Decl(Type.INT, typevar.name), init=ExprLiteral.ZERO), - cls._checkedRead(None, - ExprAddrOf(typevar), - uniontype.name(), - 'type of union ' + uniontype.name()), + cls._checkedRead( + None, + ExprAddrOf(typevar), + uniontype.name(), + "type of union " + uniontype.name(), + ), Whitespace.NL, readswitch, ] for c in ud.components: ct = c.ipdltype - caselabel = CaseLabel(alias + '::' + c.enum()) + caselabel = CaseLabel(alias + "::" + c.enum()) origenum = c.enum() writecase = StmtBlock() - wstmt = cls.checkedWrite(c.ipdltype, - ExprCall(ExprSelect(cls.var, '.', - c.getTypeName())), - cls.msgvar, sentinelKey=c.enum(), - actor=cls.actor) + wstmt = cls.checkedWrite( + c.ipdltype, + ExprCall(ExprSelect(cls.var, ".", c.getTypeName())), + cls.msgvar, + sentinelKey=c.enum(), + actor=cls.actor, + ) if c.special: # Report an error if the type is special and the side is wrong - wstmt = cls.ifsideis(c.side, wstmt, - els=cls.fatalError('wrong side!')) + wstmt = cls.ifsideis(c.side, wstmt, els=cls.fatalError("wrong side!")) writecase.addstmts([wstmt, StmtReturn()]) writeswitch.addcase(caselabel, writecase) @@ -2232,43 +2411,54 @@ def unionPickling(cls, uniontype): # this side; i.e. child->parent messages will have PFooChild # when received on the parent side. Report an error if the sides # match, and handle c.other instead. - readcase.addstmt(cls.ifsideis(c.side, - StmtBlock([cls.fatalError('wrong side!'), - StmtReturn.FALSE]))) + readcase.addstmt( + cls.ifsideis( + c.side, + StmtBlock([cls.fatalError("wrong side!"), StmtReturn.FALSE]), + ) + ) c = c.other - tmpvar = ExprVar('tmp') + tmpvar = ExprVar("tmp") ct = c.bareType(fq=True) - readcase.addstmts([ - StmtDecl(Decl(ct, tmpvar.name), init=c.defaultValue(fq=True)), - StmtExpr(ExprAssn(ExprDeref(cls.var), ExprMove(tmpvar))), - cls._checkedRead(c.ipdltype, - ExprAddrOf(ExprCall(ExprSelect(cls.var, '->', - c.getTypeName()))), - origenum, - 'variant ' + origenum + ' of union ' + uniontype.name()), - StmtReturn.TRUE, - ]) + readcase.addstmts( + [ + StmtDecl(Decl(ct, tmpvar.name), init=c.defaultValue(fq=True)), + StmtExpr(ExprAssn(ExprDeref(cls.var), ExprMove(tmpvar))), + cls._checkedRead( + c.ipdltype, + ExprAddrOf( + ExprCall(ExprSelect(cls.var, "->", c.getTypeName())) + ), + origenum, + "variant " + origenum + " of union " + uniontype.name(), + ), + StmtReturn.TRUE, + ] + ) readswitch.addcase(caselabel, readcase) # Add the error default case - writeswitch.addcase(DefaultLabel(), - StmtBlock([cls.fatalError('unknown union type'), - StmtReturn()])) - readswitch.addcase(DefaultLabel(), - StmtBlock([cls.fatalError('unknown union type'), - StmtReturn.FALSE])) + writeswitch.addcase( + DefaultLabel(), + StmtBlock([cls.fatalError("unknown union type"), StmtReturn()]), + ) + readswitch.addcase( + DefaultLabel(), + StmtBlock([cls.fatalError("unknown union type"), StmtReturn.FALSE]), + ) return cls.generateDecl(cxxtype, write, read) + # -------------------------------------------------- class _ComputeTypeDeps(TypeVisitor): - '''Pass that gathers the C++ types that a particular IPDL type -(recursively) depends on. There are three kinds of dependencies: (i) -types that need forward declaration; (ii) types that need a |using| -stmt; (iii) IPDL structs or unions which must be fully declared -before this struct. Some types generate multiple kinds.''' + """Pass that gathers the C++ types that a particular IPDL type + (recursively) depends on. There are three kinds of dependencies: (i) + types that need forward declaration; (ii) types that need a |using| + stmt; (iii) IPDL structs or unions which must be fully declared + before this struct. Some types generate multiple kinds.""" def __init__(self, fortype, unqualifiedTypedefs=False): ipdl.type.TypeVisitor.__init__(self) @@ -2295,15 +2485,17 @@ def visitActorType(self, t): fqname, name = t.fullname(), t.name() - self.maybeTypedef(_actorName(fqname, 'Parent'), - _actorName(name, 'Parent')) - self.maybeTypedef(_actorName(fqname, 'Child'), - _actorName(name, 'Child')) + self.maybeTypedef(_actorName(fqname, "Parent"), _actorName(name, "Parent")) + self.maybeTypedef(_actorName(fqname, "Child"), _actorName(name, "Child")) - self.forwardDeclStmts.extend([ - _makeForwardDeclForActor(t.protocol, 'parent'), Whitespace.NL, - _makeForwardDeclForActor(t.protocol, 'child'), Whitespace.NL - ]) + self.forwardDeclStmts.extend( + [ + _makeForwardDeclForActor(t.protocol, "parent"), + Whitespace.NL, + _makeForwardDeclForActor(t.protocol, "child"), + Whitespace.NL, + ] + ) def visitStructOrUnionType(self, su, defaultVisit): if su in self.visited or su == self.fortype: @@ -2313,7 +2505,9 @@ def visitStructOrUnionType(self, su, defaultVisit): # Mutually recursive fields in unions are behind indirection, so we only # need a forward decl, and don't need a full type declaration. - if isinstance(self.fortype, UnionType) and self.fortype.mutuallyRecursiveWith(su): + if isinstance(self.fortype, UnionType) and self.fortype.mutuallyRecursiveWith( + su + ): self.forwardDeclStmts.append(_makeForwardDecl(su)) else: self.fullDeclTypes.append(su) @@ -2336,33 +2530,34 @@ def visitShmemType(self, s): if s in self.visited: return self.visited.add(s) - self.maybeTypedef('mozilla::ipc::Shmem', 'Shmem') + self.maybeTypedef("mozilla::ipc::Shmem", "Shmem") def visitByteBufType(self, s): if s in self.visited: return self.visited.add(s) - self.maybeTypedef('mozilla::ipc::ByteBuf', 'ByteBuf') + self.maybeTypedef("mozilla::ipc::ByteBuf", "ByteBuf") def visitFDType(self, s): if s in self.visited: return self.visited.add(s) - self.maybeTypedef('mozilla::ipc::FileDescriptor', 'FileDescriptor') + self.maybeTypedef("mozilla::ipc::FileDescriptor", "FileDescriptor") def visitEndpointType(self, s): if s in self.visited: return self.visited.add(s) - self.maybeTypedef('mozilla::ipc::Endpoint', 'Endpoint', ['FooSide']) + self.maybeTypedef("mozilla::ipc::Endpoint", "Endpoint", ["FooSide"]) self.visitActorType(s.actor) def visitManagedEndpointType(self, s): if s in self.visited: return self.visited.add(s) - self.maybeTypedef('mozilla::ipc::ManagedEndpoint', 'ManagedEndpoint', - ['FooSide']) + self.maybeTypedef( + "mozilla::ipc::ManagedEndpoint", "ManagedEndpoint", ["FooSide"] + ) self.visitActorType(s.actor) def visitUniquePtrType(self, s): @@ -2370,17 +2565,21 @@ def visitUniquePtrType(self, s): return self.visited.add(s) - def visitVoidType(self, v): assert 0 + def visitVoidType(self, v): + assert 0 - def visitMessageType(self, v): assert 0 + def visitMessageType(self, v): + assert 0 - def visitProtocolType(self, v): assert 0 + def visitProtocolType(self, v): + assert 0 def _fieldStaticAssertions(sd): staticasserts = [] - for (size, fields) in itertools.groupby(sd.fields_member_order(), - lambda f: pod_size(f.ipdltype)): + for (size, fields) in itertools.groupby( + sd.fields_member_order(), lambda f: pod_size(f.ipdltype) + ): if size == pod_size_sentinel: continue @@ -2388,22 +2587,25 @@ def _fieldStaticAssertions(sd): if len(fields) == 1: continue - staticasserts.append(StmtCode( - ''' + staticasserts.append( + StmtCode( + """ static_assert( (offsetof(${struct}, ${last}) - offsetof(${struct}, ${first})) == ${expected}, "Bad assumptions about field layout!"); - ''', - struct=sd.name, - first=fields[0].memberVar(), - last=fields[-1].memberVar(), - expected=ExprLiteral.Int(size * (len(fields) - 1)))) + """, + struct=sd.name, + first=fields[0].memberVar(), + last=fields[-1].memberVar(), + expected=ExprLiteral.Int(size * (len(fields) - 1)), + ) + ) return staticasserts def _generateCxxStruct(sd): - ''' ''' + """ """ # compute all the typedefs and forward decls we need to make gettypedeps = _ComputeTypeDeps(sd.decl.type) for f in sd.fields: @@ -2414,9 +2616,7 @@ def _generateCxxStruct(sd): fulldecltypes = gettypedeps.fullDeclTypes struct = Class(sd.name, final=True) - struct.addstmts([Label.PRIVATE] - + usingTypedefs - + [Whitespace.NL, Label.PUBLIC]) + struct.addstmts([Label.PRIVATE] + usingTypedefs + [Whitespace.NL, Label.PUBLIC]) constreftype = Type(sd.name, const=True, ref=True) @@ -2424,9 +2624,13 @@ def fieldsAsParamList(): # FIXME Bug 1547019 inType() should do the right thing once # _cxxTypeCanOnlyMove is replaced with # _cxxTypeNeedsMoveForSend - return [Decl(f.forceMoveType() if _cxxTypeCanOnlyMove(f.ipdltype) - else f.inType(), f.argVar().name) - for f in sd.fields_ipdl_order()] + return [ + Decl( + f.forceMoveType() if _cxxTypeCanOnlyMove(f.ipdltype) else f.inType(), + f.argVar().name, + ) + for f in sd.fields_ipdl_order() + ] # If this is an empty struct (no fields), then the default ctor # and "create-with-fields" ctors are equivalent. So don't bother @@ -2442,14 +2646,15 @@ def fieldsAsParamList(): # normally to their default values, and will initialize any actor member # pointers to the correct default value of `nullptr`. Other C++ types # with custom constructors must also provide a default constructor. - defctor.memberinits = [ExprMemberInit(f.memberVar()) - for f in sd.fields_member_order()] + defctor.memberinits = [ + ExprMemberInit(f.memberVar()) for f in sd.fields_member_order() + ] struct.addstmts([defctor, Whitespace.NL]) # Struct(const field1& _f1, ...) - valctor = ConstructorDefn(ConstructorDecl(sd.name, - params=fieldsAsParamList(), - force_inline=True)) + valctor = ConstructorDefn( + ConstructorDecl(sd.name, params=fieldsAsParamList(), force_inline=True) + ) valctor.memberinits = [] for f in sd.fields_member_order(): arg = f.argVar() @@ -2464,38 +2669,48 @@ def fieldsAsParamList(): if sd.comparable: # bool operator==(const Struct& _o) - ovar = ExprVar('_o') - opeqeq = MethodDefn(MethodDecl( - 'operator==', - params=[Decl(constreftype, ovar.name)], - ret=Type.BOOL, - const=True)) + ovar = ExprVar("_o") + opeqeq = MethodDefn( + MethodDecl( + "operator==", + params=[Decl(constreftype, ovar.name)], + ret=Type.BOOL, + const=True, + ) + ) for f in sd.fields_ipdl_order(): - ifneq = StmtIf(ExprNot( - ExprBinary(ExprCall(f.getMethod()), '==', - ExprCall(f.getMethod(ovar))))) + ifneq = StmtIf( + ExprNot( + ExprBinary( + ExprCall(f.getMethod()), "==", ExprCall(f.getMethod(ovar)) + ) + ) + ) ifneq.addifstmt(StmtReturn.FALSE) opeqeq.addstmt(ifneq) opeqeq.addstmt(StmtReturn.TRUE) struct.addstmts([opeqeq, Whitespace.NL]) # bool operator!=(const Struct& _o) - opneq = MethodDefn(MethodDecl( - 'operator!=', - params=[Decl(constreftype, ovar.name)], - ret=Type.BOOL, - const=True)) - opneq.addstmt(StmtReturn(ExprNot(ExprCall(ExprVar('operator=='), - args=[ovar])))) + opneq = MethodDefn( + MethodDecl( + "operator!=", + params=[Decl(constreftype, ovar.name)], + ret=Type.BOOL, + const=True, + ) + ) + opneq.addstmt(StmtReturn(ExprNot(ExprCall(ExprVar("operator=="), args=[ovar])))) struct.addstmts([opneq, Whitespace.NL]) # field1& f1() # const field1& f1() const for f in sd.fields_ipdl_order(): - get = MethodDefn(MethodDecl(f.getMethod().name, - params=[], - ret=f.refType(), - force_inline=True)) + get = MethodDefn( + MethodDecl( + f.getMethod().name, params=[], ret=f.refType(), force_inline=True + ) + ) get.addstmt(StmtReturn(f.refExpr())) getconstdecl = deepcopy(get.decl) @@ -2516,16 +2731,19 @@ def fieldsAsParamList(): # time the header file containing the structure is included. staticasserts = _fieldStaticAssertions(sd) if staticasserts: - method = MethodDefn(MethodDecl('StaticAssertions', - params=[], - ret=Type.VOID, - const=True)) + method = MethodDefn( + MethodDecl("StaticAssertions", params=[], ret=Type.VOID, const=True) + ) method.addstmts(staticasserts) struct.addstmts([method]) # members - struct.addstmts([StmtDecl(Decl(_effectiveMemberType(f), f.memberVar().name)) - for f in sd.fields_member_order()]) + struct.addstmts( + [ + StmtDecl(Decl(_effectiveMemberType(f), f.memberVar().name)) + for f in sd.fields_member_order() + ] + ) return forwarddeclstmts, fulldecltypes, struct @@ -2540,6 +2758,7 @@ def _effectiveMemberType(f): effective_type.name = "CopyableTArray" return effective_type + # -------------------------------------------------- @@ -2586,20 +2805,20 @@ def _generateCxxUnion(ud): inClsType = Type(ud.name, const=True, ref=True) refClsType = Type(ud.name, ref=True) rvalueRefClsType = Type(ud.name, rvalref=True) - typetype = Type('Type') - valuetype = Type('Value') - mtypevar = ExprVar('mType') - mvaluevar = ExprVar('mValue') - maybedtorvar = ExprVar('MaybeDestroy') - assertsanityvar = ExprVar('AssertSanity') - tnonevar = ExprVar('T__None') - tlastvar = ExprVar('T__Last') + typetype = Type("Type") + valuetype = Type("Value") + mtypevar = ExprVar("mType") + mvaluevar = ExprVar("mValue") + maybedtorvar = ExprVar("MaybeDestroy") + assertsanityvar = ExprVar("AssertSanity") + tnonevar = ExprVar("T__None") + tlastvar = ExprVar("T__Last") def callAssertSanity(uvar=None, expectTypeVar=None): func = assertsanityvar args = [] if uvar is not None: - func = ExprSelect(uvar, '.', assertsanityvar.name) + func = ExprSelect(uvar, ".", assertsanityvar.name) if expectTypeVar is not None: args.append(expectTypeVar) return ExprCall(func, args=args) @@ -2633,35 +2852,42 @@ def voidCast(expr): for c in ud.components[1:]: typeenum.addId(c.enum()) typeenum.addId(tlastvar.name, ud.components[-1].enum()) - cls.addstmts([StmtDecl(Decl(typeenum, '')), - Whitespace.NL]) + cls.addstmts([StmtDecl(Decl(typeenum, "")), Whitespace.NL]) cls.addstmt(Label.PRIVATE) cls.addstmts( usingTypedefs # hacky typedef's that allow placement dtors of builtins - + [Typedef(c.internalType(), c.typedef()) for c in ud.components]) + + [Typedef(c.internalType(), c.typedef()) for c in ud.components] + ) cls.addstmt(Whitespace.NL) # the C++ union the discunion use for storage valueunion = TypeUnion(valuetype.name) for c in ud.components: valueunion.addComponent(c.unionType(), c.name) - cls.addstmts([StmtDecl(Decl(valueunion, '')), - Whitespace.NL]) + cls.addstmts([StmtDecl(Decl(valueunion, "")), Whitespace.NL]) # for each constituent type T, add private accessors that # return a pointer to the Value union storage casted to |T*| # and |const T*| for c in ud.components: - getptr = MethodDefn(MethodDecl( - c.getPtrName(), params=[], ret=c.ptrToInternalType(), - force_inline=True)) + getptr = MethodDefn( + MethodDecl( + c.getPtrName(), params=[], ret=c.ptrToInternalType(), force_inline=True + ) + ) getptr.addstmt(StmtReturn(c.ptrToSelfExpr())) - getptrconst = MethodDefn(MethodDecl( - c.getConstPtrName(), params=[], ret=c.constPtrToType(), - const=True, force_inline=True)) + getptrconst = MethodDefn( + MethodDecl( + c.getConstPtrName(), + params=[], + ret=c.constPtrToType(), + const=True, + force_inline=True, + ) + ) getptrconst.addstmt(StmtReturn(c.constptrToSelfExpr())) cls.addstmts([getptr, getptrconst]) @@ -2671,121 +2897,145 @@ def voidCast(expr): # current underlying value, only if |aNewType| is different # than the current type, and returns true if the underlying # value needs to be re-constructed - newtypevar = ExprVar('aNewType') - maybedtor = MethodDefn(MethodDecl( - maybedtorvar.name, - params=[Decl(typetype, newtypevar.name)], - ret=Type.BOOL)) + newtypevar = ExprVar("aNewType") + maybedtor = MethodDefn( + MethodDecl( + maybedtorvar.name, params=[Decl(typetype, newtypevar.name)], ret=Type.BOOL + ) + ) # wasn't /actually/ dtor'd, but it needs to be re-constructed - ifnone = StmtIf(ExprBinary(mtypevar, '==', tnonevar)) + ifnone = StmtIf(ExprBinary(mtypevar, "==", tnonevar)) ifnone.addifstmt(StmtReturn.TRUE) # same type, nothing to see here - ifnochange = StmtIf(ExprBinary(mtypevar, '==', newtypevar)) + ifnochange = StmtIf(ExprBinary(mtypevar, "==", newtypevar)) ifnochange.addifstmt(StmtReturn.FALSE) # need to destroy. switch on underlying type dtorswitch = StmtSwitch(mtypevar) for c in ud.components: dtorswitch.addcase( - CaseLabel(c.enum()), - StmtBlock([StmtExpr(c.callDtor()), - StmtBreak()])) + CaseLabel(c.enum()), StmtBlock([StmtExpr(c.callDtor()), StmtBreak()]) + ) dtorswitch.addcase( - DefaultLabel(), - StmtBlock([_logicError("not reached"), StmtBreak()])) - maybedtor.addstmts([ - ifnone, - ifnochange, - dtorswitch, - StmtReturn.TRUE - ]) + DefaultLabel(), StmtBlock([_logicError("not reached"), StmtBreak()]) + ) + maybedtor.addstmts([ifnone, ifnochange, dtorswitch, StmtReturn.TRUE]) cls.addstmts([maybedtor, Whitespace.NL]) # add helper methods that ensure the discunion has a # valid type - sanity = MethodDefn(MethodDecl( - assertsanityvar.name, ret=Type.VOID, const=True, force_inline=True)) - sanity.addstmts([ - _abortIfFalse(ExprBinary(tnonevar, '<=', mtypevar), - 'invalid type tag'), - _abortIfFalse(ExprBinary(mtypevar, '<=', tlastvar), - 'invalid type tag')]) + sanity = MethodDefn( + MethodDecl(assertsanityvar.name, ret=Type.VOID, const=True, force_inline=True) + ) + sanity.addstmts( + [ + _abortIfFalse(ExprBinary(tnonevar, "<=", mtypevar), "invalid type tag"), + _abortIfFalse(ExprBinary(mtypevar, "<=", tlastvar), "invalid type tag"), + ] + ) cls.addstmt(sanity) - atypevar = ExprVar('aType') + atypevar = ExprVar("aType") sanity2 = MethodDefn( - MethodDecl(assertsanityvar.name, - params=[Decl(typetype, atypevar.name)], - ret=Type.VOID, - const=True, force_inline=True)) - sanity2.addstmts([ - StmtExpr(ExprCall(assertsanityvar)), - _abortIfFalse(ExprBinary(mtypevar, '==', atypevar), - 'unexpected type tag')]) + MethodDecl( + assertsanityvar.name, + params=[Decl(typetype, atypevar.name)], + ret=Type.VOID, + const=True, + force_inline=True, + ) + ) + sanity2.addstmts( + [ + StmtExpr(ExprCall(assertsanityvar)), + _abortIfFalse(ExprBinary(mtypevar, "==", atypevar), "unexpected type tag"), + ] + ) cls.addstmts([sanity2, Whitespace.NL]) # ---- begin public methods ----- # Union() default ctor - cls.addstmts([ - Label.PUBLIC, - ConstructorDefn( - ConstructorDecl(ud.name, force_inline=True), - memberinits=[ExprMemberInit(mtypevar, [tnonevar])]), - Whitespace.NL - ]) + cls.addstmts( + [ + Label.PUBLIC, + ConstructorDefn( + ConstructorDecl(ud.name, force_inline=True), + memberinits=[ExprMemberInit(mtypevar, [tnonevar])], + ), + Whitespace.NL, + ] + ) # Union(const T&) copy & Union(T&&) move ctors - othervar = ExprVar('aOther') + othervar = ExprVar("aOther") for c in ud.components: if not _cxxTypeCanOnlyMove(c.ipdltype): - copyctor = ConstructorDefn(ConstructorDecl( - ud.name, params=[Decl(c.inType(), othervar.name)])) - copyctor.addstmts([ - StmtExpr(c.callCtor(othervar)), - StmtExpr(ExprAssn(mtypevar, c.enumvar()))]) + copyctor = ConstructorDefn( + ConstructorDecl(ud.name, params=[Decl(c.inType(), othervar.name)]) + ) + copyctor.addstmts( + [ + StmtExpr(c.callCtor(othervar)), + StmtExpr(ExprAssn(mtypevar, c.enumvar())), + ] + ) cls.addstmts([copyctor, Whitespace.NL]) if not _cxxTypeCanMove(c.ipdltype) or _cxxTypeNeedsMoveForSend(c.ipdltype): continue - movector = ConstructorDefn(ConstructorDecl( - ud.name, params=[Decl(c.forceMoveType(), othervar.name)])) - movector.addstmts([ - StmtExpr(c.callCtor(ExprMove(othervar))), - StmtExpr(ExprAssn(mtypevar, c.enumvar()))]) + movector = ConstructorDefn( + ConstructorDecl(ud.name, params=[Decl(c.forceMoveType(), othervar.name)]) + ) + movector.addstmts( + [ + StmtExpr(c.callCtor(ExprMove(othervar))), + StmtExpr(ExprAssn(mtypevar, c.enumvar())), + ] + ) cls.addstmts([movector, Whitespace.NL]) unionNeedsMove = any(_cxxTypeCanOnlyMove(c.ipdltype) for c in ud.components) # Union(const Union&) copy ctor if not unionNeedsMove: - copyctor = ConstructorDefn(ConstructorDecl( - ud.name, params=[Decl(inClsType, othervar.name)])) + copyctor = ConstructorDefn( + ConstructorDecl(ud.name, params=[Decl(inClsType, othervar.name)]) + ) othertype = ud.callType(othervar) copyswitch = StmtSwitch(othertype) for c in ud.components: copyswitch.addcase( CaseLabel(c.enum()), - StmtBlock([ - StmtExpr(c.callCtor( - ExprCall(ExprSelect(othervar, - '.', c.getConstTypeName())))), - StmtBreak() - ])) - copyswitch.addcase(CaseLabel(tnonevar.name), - StmtBlock([StmtBreak()])) + StmtBlock( + [ + StmtExpr( + c.callCtor( + ExprCall( + ExprSelect(othervar, ".", c.getConstTypeName()) + ) + ) + ), + StmtBreak(), + ] + ), + ) + copyswitch.addcase(CaseLabel(tnonevar.name), StmtBlock([StmtBreak()])) copyswitch.addcase( - DefaultLabel(), - StmtBlock([_logicError('unreached'), StmtReturn()])) - copyctor.addstmts([ - StmtExpr(callAssertSanity(uvar=othervar)), - copyswitch, - StmtExpr(ExprAssn(mtypevar, othertype)) - ]) + DefaultLabel(), StmtBlock([_logicError("unreached"), StmtReturn()]) + ) + copyctor.addstmts( + [ + StmtExpr(callAssertSanity(uvar=othervar)), + copyswitch, + StmtExpr(ExprAssn(mtypevar, othertype)), + ] + ) cls.addstmts([copyctor, Whitespace.NL]) # Union(Union&&) move ctor - movector = ConstructorDefn(ConstructorDecl( - ud.name, params=[Decl(rvalueRefClsType, othervar.name)])) + movector = ConstructorDefn( + ConstructorDecl(ud.name, params=[Decl(rvalueRefClsType, othervar.name)]) + ) othertypevar = ExprVar("t") moveswitch = StmtSwitch(othertypevar) for c in ud.components: @@ -2793,35 +3043,55 @@ def voidCast(expr): if c.recursive: # This is sound as we set othervar.mTypeVar to T__None after the # switch. The pointer in the union will be left dangling. - case.addstmts([ - # ptr_C() = other.ptr_C() - StmtExpr(ExprAssn(c.callGetPtr(), - ExprCall(ExprSelect(othervar, '.', ExprVar(c.getPtrName()))))) - ]) + case.addstmts( + [ + # ptr_C() = other.ptr_C() + StmtExpr( + ExprAssn( + c.callGetPtr(), + ExprCall( + ExprSelect(othervar, ".", ExprVar(c.getPtrName())) + ), + ) + ) + ] + ) else: - case.addstmts([ - # new ... (Move(other.get_C())) - StmtExpr(c.callCtor(ExprMove(ExprCall(ExprSelect(othervar, '.', - c.getTypeName()))))), - # other.MaybeDestroy(T__None) - StmtExpr( - voidCast(ExprCall(ExprSelect(othervar, '.', maybedtorvar), - args=[tnonevar]))), - ]) + case.addstmts( + [ + # new ... (Move(other.get_C())) + StmtExpr( + c.callCtor( + ExprMove( + ExprCall(ExprSelect(othervar, ".", c.getTypeName())) + ) + ) + ), + # other.MaybeDestroy(T__None) + StmtExpr( + voidCast( + ExprCall( + ExprSelect(othervar, ".", maybedtorvar), args=[tnonevar] + ) + ) + ), + ] + ) case.addstmts([StmtBreak()]) moveswitch.addcase(CaseLabel(c.enum()), case) - moveswitch.addcase(CaseLabel(tnonevar.name), - StmtBlock([StmtBreak()])) + moveswitch.addcase(CaseLabel(tnonevar.name), StmtBlock([StmtBreak()])) moveswitch.addcase( - DefaultLabel(), - StmtBlock([_logicError('unreached'), StmtReturn()])) - movector.addstmts([ - StmtExpr(callAssertSanity(uvar=othervar)), - StmtDecl(Decl(typetype, othertypevar.name), init=ud.callType(othervar)), - moveswitch, - StmtExpr(ExprAssn(ExprSelect(othervar, '.', mtypevar), tnonevar)), - StmtExpr(ExprAssn(mtypevar, othertypevar)) - ]) + DefaultLabel(), StmtBlock([_logicError("unreached"), StmtReturn()]) + ) + movector.addstmts( + [ + StmtExpr(callAssertSanity(uvar=othervar)), + StmtDecl(Decl(typetype, othertypevar.name), init=ud.callType(othervar)), + moveswitch, + StmtExpr(ExprAssn(ExprSelect(othervar, ".", mtypevar), tnonevar)), + StmtExpr(ExprAssn(mtypevar, othertypevar)), + ] + ) cls.addstmts([movector, Whitespace.NL]) # ~Union() @@ -2832,163 +3102,214 @@ def voidCast(expr): cls.addstmts([dtor, Whitespace.NL]) # type() - typemeth = MethodDefn(MethodDecl('type', ret=typetype, - const=True, force_inline=True)) + typemeth = MethodDefn( + MethodDecl("type", ret=typetype, const=True, force_inline=True) + ) typemeth.addstmt(StmtReturn(mtypevar)) cls.addstmts([typemeth, Whitespace.NL]) # Union& operator= methods - rhsvar = ExprVar('aRhs') + rhsvar = ExprVar("aRhs") for c in ud.components: if not _cxxTypeCanOnlyMove(c.ipdltype): # Union& operator=(const T&) - opeq = MethodDefn(MethodDecl( - 'operator=', - params=[Decl(c.inType(), rhsvar.name)], - ret=refClsType)) - opeq.addstmts([ - # might need to placement-delete old value first - maybeReconstruct(c, c.enumvar()), - StmtExpr(c.callOperatorEq(rhsvar)), - StmtExpr(ExprAssn(mtypevar, c.enumvar())), - StmtReturn(ExprDeref(ExprVar.THIS)) - ]) + opeq = MethodDefn( + MethodDecl( + "operator=", params=[Decl(c.inType(), rhsvar.name)], ret=refClsType + ) + ) + opeq.addstmts( + [ + # might need to placement-delete old value first + maybeReconstruct(c, c.enumvar()), + StmtExpr(c.callOperatorEq(rhsvar)), + StmtExpr(ExprAssn(mtypevar, c.enumvar())), + StmtReturn(ExprDeref(ExprVar.THIS)), + ] + ) cls.addstmts([opeq, Whitespace.NL]) # Union& operator=(T&&) if not _cxxTypeCanMove(c.ipdltype) or _cxxTypeNeedsMoveForSend(c.ipdltype): continue - opeq = MethodDefn(MethodDecl( - 'operator=', - params=[Decl(c.forceMoveType(), rhsvar.name)], - ret=refClsType)) - opeq.addstmts([ - # might need to placement-delete old value first - maybeReconstruct(c, c.enumvar()), - StmtExpr(c.callOperatorEq(ExprMove(rhsvar))), - StmtExpr(ExprAssn(mtypevar, c.enumvar())), - StmtReturn(ExprDeref(ExprVar.THIS)) - ]) + opeq = MethodDefn( + MethodDecl( + "operator=", + params=[Decl(c.forceMoveType(), rhsvar.name)], + ret=refClsType, + ) + ) + opeq.addstmts( + [ + # might need to placement-delete old value first + maybeReconstruct(c, c.enumvar()), + StmtExpr(c.callOperatorEq(ExprMove(rhsvar))), + StmtExpr(ExprAssn(mtypevar, c.enumvar())), + StmtReturn(ExprDeref(ExprVar.THIS)), + ] + ) cls.addstmts([opeq, Whitespace.NL]) # Union& operator=(const Union&) if not unionNeedsMove: - opeq = MethodDefn(MethodDecl( - 'operator=', - params=[Decl(inClsType, rhsvar.name)], - ret=refClsType)) - rhstypevar = ExprVar('t') + opeq = MethodDefn( + MethodDecl( + "operator=", params=[Decl(inClsType, rhsvar.name)], ret=refClsType + ) + ) + rhstypevar = ExprVar("t") opeqswitch = StmtSwitch(rhstypevar) for c in ud.components: case = StmtBlock() - case.addstmts([ - maybeReconstruct(c, rhstypevar), - StmtExpr(c.callOperatorEq( - ExprCall(ExprSelect(rhsvar, '.', c.getConstTypeName())))), - StmtBreak() - ]) + case.addstmts( + [ + maybeReconstruct(c, rhstypevar), + StmtExpr( + c.callOperatorEq( + ExprCall(ExprSelect(rhsvar, ".", c.getConstTypeName())) + ) + ), + StmtBreak(), + ] + ) opeqswitch.addcase(CaseLabel(c.enum()), case) opeqswitch.addcase( CaseLabel(tnonevar.name), # The void cast prevents Coverity from complaining about missing return # value checks. - StmtBlock([StmtExpr(ExprCast(callMaybeDestroy(rhstypevar), Type.VOID, - static=True)), - StmtBreak()]) + StmtBlock( + [ + StmtExpr( + ExprCast(callMaybeDestroy(rhstypevar), Type.VOID, static=True) + ), + StmtBreak(), + ] + ), ) opeqswitch.addcase( - DefaultLabel(), - StmtBlock([_logicError('unreached'), StmtBreak()])) - opeq.addstmts([ - StmtExpr(callAssertSanity(uvar=rhsvar)), - StmtDecl(Decl(typetype, rhstypevar.name), init=ud.callType(rhsvar)), - opeqswitch, - StmtExpr(ExprAssn(mtypevar, rhstypevar)), - StmtReturn(ExprDeref(ExprVar.THIS)) - ]) + DefaultLabel(), StmtBlock([_logicError("unreached"), StmtBreak()]) + ) + opeq.addstmts( + [ + StmtExpr(callAssertSanity(uvar=rhsvar)), + StmtDecl(Decl(typetype, rhstypevar.name), init=ud.callType(rhsvar)), + opeqswitch, + StmtExpr(ExprAssn(mtypevar, rhstypevar)), + StmtReturn(ExprDeref(ExprVar.THIS)), + ] + ) cls.addstmts([opeq, Whitespace.NL]) # Union& operator=(Union&&) - opeq = MethodDefn(MethodDecl( - 'operator=', - params=[Decl(rvalueRefClsType, rhsvar.name)], - ret=refClsType)) - rhstypevar = ExprVar('t') + opeq = MethodDefn( + MethodDecl( + "operator=", params=[Decl(rvalueRefClsType, rhsvar.name)], ret=refClsType + ) + ) + rhstypevar = ExprVar("t") opeqswitch = StmtSwitch(rhstypevar) for c in ud.components: case = StmtBlock() if c.recursive: - case.addstmts([ - StmtExpr(voidCast(callMaybeDestroy(tnonevar))), - StmtExpr(ExprAssn(c.callGetPtr(), - ExprCall(ExprSelect(rhsvar, '.', ExprVar(c.getPtrName()))))), - ]) + case.addstmts( + [ + StmtExpr(voidCast(callMaybeDestroy(tnonevar))), + StmtExpr( + ExprAssn( + c.callGetPtr(), + ExprCall(ExprSelect(rhsvar, ".", ExprVar(c.getPtrName()))), + ) + ), + ] + ) else: - case.addstmts([ - maybeReconstruct(c, rhstypevar), - StmtExpr(c.callOperatorEq( - ExprMove(ExprCall(ExprSelect(rhsvar, '.', c.getTypeName()))))), - # other.MaybeDestroy(T__None) - StmtExpr( - voidCast(ExprCall(ExprSelect(rhsvar, '.', maybedtorvar), args=[tnonevar]))), - ]) + case.addstmts( + [ + maybeReconstruct(c, rhstypevar), + StmtExpr( + c.callOperatorEq( + ExprMove(ExprCall(ExprSelect(rhsvar, ".", c.getTypeName()))) + ) + ), + # other.MaybeDestroy(T__None) + StmtExpr( + voidCast( + ExprCall( + ExprSelect(rhsvar, ".", maybedtorvar), args=[tnonevar] + ) + ) + ), + ] + ) case.addstmts([StmtBreak()]) opeqswitch.addcase(CaseLabel(c.enum()), case) opeqswitch.addcase( CaseLabel(tnonevar.name), # The void cast prevents Coverity from complaining about missing return # value checks. - StmtBlock([StmtExpr(voidCast(callMaybeDestroy(rhstypevar))), - StmtBreak()]) + StmtBlock([StmtExpr(voidCast(callMaybeDestroy(rhstypevar))), StmtBreak()]), ) opeqswitch.addcase( - DefaultLabel(), - StmtBlock([_logicError('unreached'), StmtBreak()])) - opeq.addstmts([ - StmtExpr(callAssertSanity(uvar=rhsvar)), - StmtDecl(Decl(typetype, rhstypevar.name), init=ud.callType(rhsvar)), - opeqswitch, - StmtExpr(ExprAssn(ExprSelect(rhsvar, '.', mtypevar), tnonevar)), - StmtExpr(ExprAssn(mtypevar, rhstypevar)), - StmtReturn(ExprDeref(ExprVar.THIS)) - ]) + DefaultLabel(), StmtBlock([_logicError("unreached"), StmtBreak()]) + ) + opeq.addstmts( + [ + StmtExpr(callAssertSanity(uvar=rhsvar)), + StmtDecl(Decl(typetype, rhstypevar.name), init=ud.callType(rhsvar)), + opeqswitch, + StmtExpr(ExprAssn(ExprSelect(rhsvar, ".", mtypevar), tnonevar)), + StmtExpr(ExprAssn(mtypevar, rhstypevar)), + StmtReturn(ExprDeref(ExprVar.THIS)), + ] + ) cls.addstmts([opeq, Whitespace.NL]) if ud.comparable: # bool operator==(const T&) for c in ud.components: - opeqeq = MethodDefn(MethodDecl( - 'operator==', - params=[Decl(c.inType(), rhsvar.name)], - ret=Type.BOOL, - const=True)) - opeqeq.addstmt(StmtReturn(ExprBinary( - ExprCall(ExprVar(c.getTypeName())), '==', rhsvar))) + opeqeq = MethodDefn( + MethodDecl( + "operator==", + params=[Decl(c.inType(), rhsvar.name)], + ret=Type.BOOL, + const=True, + ) + ) + opeqeq.addstmt( + StmtReturn(ExprBinary(ExprCall(ExprVar(c.getTypeName())), "==", rhsvar)) + ) cls.addstmts([opeqeq, Whitespace.NL]) # bool operator==(const Union&) - opeqeq = MethodDefn(MethodDecl( - 'operator==', - params=[Decl(inClsType, rhsvar.name)], - ret=Type.BOOL, - const=True)) - iftypesmismatch = StmtIf(ExprBinary(ud.callType(), '!=', - ud.callType(rhsvar))) + opeqeq = MethodDefn( + MethodDecl( + "operator==", + params=[Decl(inClsType, rhsvar.name)], + ret=Type.BOOL, + const=True, + ) + ) + iftypesmismatch = StmtIf(ExprBinary(ud.callType(), "!=", ud.callType(rhsvar))) iftypesmismatch.addifstmt(StmtReturn.FALSE) opeqeq.addstmts([iftypesmismatch, Whitespace.NL]) opeqeqswitch = StmtSwitch(ud.callType()) for c in ud.components: case = StmtBlock() - case.addstmt(StmtReturn(ExprBinary( - ExprCall(ExprVar(c.getTypeName())), '==', - ExprCall(ExprSelect(rhsvar, '.', c.getTypeName()))))) + case.addstmt( + StmtReturn( + ExprBinary( + ExprCall(ExprVar(c.getTypeName())), + "==", + ExprCall(ExprSelect(rhsvar, ".", c.getTypeName())), + ) + ) + ) opeqeqswitch.addcase(CaseLabel(c.enum()), case) opeqeqswitch.addcase( - DefaultLabel(), - StmtBlock([_logicError('unreached'), - StmtReturn.FALSE])) + DefaultLabel(), StmtBlock([_logicError("unreached"), StmtReturn.FALSE]) + ) opeqeq.addstmt(opeqeqswitch) cls.addstmts([opeqeq, Whitespace.NL]) @@ -2999,48 +3320,61 @@ def voidCast(expr): getValueVar = ExprVar(c.getTypeName()) getConstValueVar = ExprVar(c.getConstTypeName()) - getvalue = MethodDefn(MethodDecl(getValueVar.name, - ret=c.refType(), - force_inline=True)) - getvalue.addstmts([ - StmtExpr(callAssertSanity(expectTypeVar=c.enumvar())), - StmtReturn(ExprDeref(c.callGetPtr())) - ]) - - getconstvalue = MethodDefn(MethodDecl( - getConstValueVar.name, ret=c.constRefType(), - const=True, force_inline=True)) - getconstvalue.addstmts([ - StmtExpr(callAssertSanity(expectTypeVar=c.enumvar())), - StmtReturn(c.getConstValue()) - ]) + getvalue = MethodDefn( + MethodDecl(getValueVar.name, ret=c.refType(), force_inline=True) + ) + getvalue.addstmts( + [ + StmtExpr(callAssertSanity(expectTypeVar=c.enumvar())), + StmtReturn(ExprDeref(c.callGetPtr())), + ] + ) + + getconstvalue = MethodDefn( + MethodDecl( + getConstValueVar.name, + ret=c.constRefType(), + const=True, + force_inline=True, + ) + ) + getconstvalue.addstmts( + [ + StmtExpr(callAssertSanity(expectTypeVar=c.enumvar())), + StmtReturn(c.getConstValue()), + ] + ) cls.addstmts([getvalue, getconstvalue]) - optype = MethodDefn(MethodDecl('', typeop=c.refType(), force_inline=True)) + optype = MethodDefn(MethodDecl("", typeop=c.refType(), force_inline=True)) optype.addstmt(StmtReturn(ExprCall(getValueVar))) - opconsttype = MethodDefn(MethodDecl( - '', const=True, typeop=c.constRefType(), force_inline=True)) + opconsttype = MethodDefn( + MethodDecl("", const=True, typeop=c.constRefType(), force_inline=True) + ) opconsttype.addstmt(StmtReturn(ExprCall(getConstValueVar))) cls.addstmts([optype, opconsttype, Whitespace.NL]) # private vars - cls.addstmts([ - Label.PRIVATE, - StmtDecl(Decl(valuetype, mvaluevar.name)), - StmtDecl(Decl(typetype, mtypevar.name)) - ]) + cls.addstmts( + [ + Label.PRIVATE, + StmtDecl(Decl(valuetype, mvaluevar.name)), + StmtDecl(Decl(typetype, mtypevar.name)), + ] + ) return forwarddeclstmts, fulldecltypes, cls + # ----------------------------------------------------------------------------- class _FindFriends(ipdl.ast.Visitor): def __init__(self): - self.mytype = None # ProtocolType - self.vtype = None # ProtocolType - self.friends = set() # set + self.mytype = None # ProtocolType + self.vtype = None # ProtocolType + self.friends = set() # set def findFriends(self, ptype): self.mytype = ptype @@ -3080,7 +3414,7 @@ def iterActorParams(self, md): class _GenerateProtocolActorCode(ipdl.ast.Visitor): def __init__(self, myside): - self.side = myside # "parent" or "child" + self.side = myside # "parent" or "child" self.prettyside = myside.title() self.clsname = None self.protocol = None @@ -3103,11 +3437,11 @@ def lower(self, tu, clsname, cxxHeaderFile, cxxFile): def standardTypedefs(self): return [ - Typedef(Type('mozilla::ipc::IProtocol'), 'IProtocol'), - Typedef(Type('IPC::Message'), 'Message'), - Typedef(Type('base::ProcessHandle'), 'ProcessHandle'), - Typedef(Type('mozilla::ipc::MessageChannel'), 'MessageChannel'), - Typedef(Type('mozilla::ipc::SharedMemory'), 'SharedMemory'), + Typedef(Type("mozilla::ipc::IProtocol"), "IProtocol"), + Typedef(Type("IPC::Message"), "Message"), + Typedef(Type("base::ProcessHandle"), "ProcessHandle"), + Typedef(Type("mozilla::ipc::MessageChannel"), "MessageChannel"), + Typedef(Type("mozilla::ipc::SharedMemory"), "SharedMemory"), ] def visitTranslationUnit(self, tu): @@ -3122,10 +3456,9 @@ def visitTranslationUnit(self, tu): + _includeGuardStart(hf) + [ Whitespace.NL, - CppDirective( - 'include', - '"' + _protocolHeaderName(tu.protocol) + '.h"') - ]) + CppDirective("include", '"' + _protocolHeaderName(tu.protocol) + '.h"'), + ] + ) for inc in tu.includes: inc.accept(self) @@ -3144,12 +3477,11 @@ def visitTranslationUnit(self, tu): # class scope for stmt in clsdefn.stmts: if isinstance(stmt, MethodDefn): - if stmt.decl.ret and stmt.decl.ret.name == 'Result': - stmt.decl.ret.name = clsdecl.name + '::' + stmt.decl.ret.name + if stmt.decl.ret and stmt.decl.ret.name == "Result": + stmt.decl.ret.name = clsdecl.name + "::" + stmt.decl.ret.name def setToIncludes(s): - return [CppDirective('include', '"%s"' % i) - for i in sorted(iter(s))] + return [CppDirective("include", '"%s"' % i) for i in sorted(iter(s))] def makeNamespace(p, file): if 0 == len(p.namespaces): @@ -3161,8 +3493,10 @@ def makeNamespace(p, file): if len(self.nonForwardDeclaredHeaders) != 0: self.hdrfile.addthings( - [Whitespace('// Headers for things that cannot be forward declared'), - Whitespace.NL] + [ + Whitespace("// Headers for things that cannot be forward declared"), + Whitespace.NL, + ] + setToIncludes(self.nonForwardDeclaredHeaders) + [Whitespace.NL] ) @@ -3170,20 +3504,14 @@ def makeNamespace(p, file): self.hdrfile.addthings(self.usingDecls) hdrns = makeNamespace(self.protocol, self.hdrfile) - hdrns.addstmts([ - Whitespace.NL, - Whitespace.NL, - clsdecl, - Whitespace.NL, - Whitespace.NL - ]) + hdrns.addstmts( + [Whitespace.NL, Whitespace.NL, clsdecl, Whitespace.NL, Whitespace.NL] + ) actortype = ActorType(tu.protocol.decl.type) traitsdecl, traitsdefn = _ParamTraits.actorPickling(actortype, self.side) - self.hdrfile.addthings( - [traitsdecl, Whitespace.NL] + _includeGuardEnd(hf) - ) + self.hdrfile.addthings([traitsdecl, Whitespace.NL] + _includeGuardEnd(hf)) # make the .cpp file if (self.protocol.name, self.side) not in VIRTUAL_CALL_CLASSES: @@ -3198,34 +3526,40 @@ def makeNamespace(p, file): ) self.externalIncludes.add(header_file) - cf.addthings([ - _DISCLAIMER, - Whitespace.NL, - CppDirective( - 'include', - '"' + _protocolHeaderName(self.protocol, self.side) + '.h"')] - + setToIncludes(self.externalIncludes)) - - cppheaders = [CppDirective('include', '"%s"' % filename) - for filename in ipdl.builtin.CppIncludes] - - cf.addthings(( - [Whitespace.NL] - + [CppDirective( - 'include', - '"%s.h"' % (inc)) for inc in self.protocolCxxIncludes] - + [Whitespace.NL] - + cppheaders - + [Whitespace.NL])) + cf.addthings( + [ + _DISCLAIMER, + Whitespace.NL, + CppDirective( + "include", + '"' + _protocolHeaderName(self.protocol, self.side) + '.h"', + ), + ] + + setToIncludes(self.externalIncludes) + ) + + cppheaders = [ + CppDirective("include", '"%s"' % filename) + for filename in ipdl.builtin.CppIncludes + ] + + cf.addthings( + ( + [Whitespace.NL] + + [ + CppDirective("include", '"%s.h"' % (inc)) + for inc in self.protocolCxxIncludes + ] + + [Whitespace.NL] + + cppheaders + + [Whitespace.NL] + ) + ) cppns = makeNamespace(self.protocol, cf) - cppns.addstmts([ - Whitespace.NL, - Whitespace.NL, - clsdefn, - Whitespace.NL, - Whitespace.NL - ]) + cppns.addstmts( + [Whitespace.NL, Whitespace.NL, clsdefn, Whitespace.NL, Whitespace.NL] + ) cf.addthing(traitsdefn) @@ -3236,12 +3570,17 @@ def visitUsingStmt(self, using): if using.canBeForwardDeclared() and not using.decl.type.isUniquePtr(): spec = using.type.spec - self.usingDecls.extend([ - _makeForwardDeclForQClass(spec.baseid, spec.quals, - cls=using.isClass(), - struct=using.isStruct()), - Whitespace.NL - ]) + self.usingDecls.extend( + [ + _makeForwardDeclForQClass( + spec.baseid, + spec.quals, + cls=using.isClass(), + struct=using.isStruct(), + ), + Whitespace.NL, + ] + ) self.externalIncludes.add(using.header) else: self.nonForwardDeclaredHeaders.add(using.header) @@ -3254,25 +3593,33 @@ def visitInclude(self, inc): if not ip: return - self.actorForwardDecls.extend([ - _makeForwardDeclForActor(ip.decl.type, self.side), - _makeForwardDeclForActor(ip.decl.type, _otherSide(self.side)), - Whitespace.NL - ]) + self.actorForwardDecls.extend( + [ + _makeForwardDeclForActor(ip.decl.type, self.side), + _makeForwardDeclForActor(ip.decl.type, _otherSide(self.side)), + Whitespace.NL, + ] + ) self.protocolCxxIncludes.append(_protocolHeaderName(ip, self.side)) if ip.decl.fullname is not None: - self.includedActorTypedefs.append(Typedef( - Type(_actorName(ip.decl.fullname, self.side.title())), - _actorName(ip.decl.shortname, self.side.title()))) + self.includedActorTypedefs.append( + Typedef( + Type(_actorName(ip.decl.fullname, self.side.title())), + _actorName(ip.decl.shortname, self.side.title()), + ) + ) - self.includedActorTypedefs.append(Typedef( - Type(_actorName(ip.decl.fullname, _otherSide(self.side).title())), - _actorName(ip.decl.shortname, _otherSide(self.side).title()))) + self.includedActorTypedefs.append( + Typedef( + Type(_actorName(ip.decl.fullname, _otherSide(self.side).title())), + _actorName(ip.decl.shortname, _otherSide(self.side).title()), + ) + ) def visitProtocol(self, p): self.hdrfile.addcode( - ''' + """ #ifdef DEBUG #include "prenv.h" #endif // DEBUG @@ -3280,7 +3627,8 @@ def visitProtocol(self, p): #include "mozilla/Tainting.h" #include "mozilla/ipc/MessageChannel.h" #include "mozilla/ipc/ProtocolUtils.h" - ''') + """ + ) self.protocol = p ptype = p.decl.type @@ -3294,25 +3642,20 @@ def visitProtocol(self, p): inherits = [] if ptype.isToplevel(): - inherits.append(Inherit(p.openedProtocolInterfaceType(), - viz='public')) + inherits.append(Inherit(p.openedProtocolInterfaceType(), viz="public")) else: - inherits.append(Inherit(p.managerInterfaceType(), viz='public')) + inherits.append(Inherit(p.managerInterfaceType(), viz="public")) if hasAsyncReturns: - inherits.append(Inherit(Type('SupportsWeakPtr'), viz='public')) - self.hdrfile.addthing(CppDirective('include', '"mozilla/WeakPtr.h"')) + inherits.append(Inherit(Type("SupportsWeakPtr"), viz="public")) + self.hdrfile.addthing(CppDirective("include", '"mozilla/WeakPtr.h"')) - if ptype.isToplevel() and self.side == 'parent': - self.hdrfile.addthings([ - _makeForwardDeclForQClass('nsIFile', []), - Whitespace.NL - ]) + if ptype.isToplevel() and self.side == "parent": + self.hdrfile.addthings( + [_makeForwardDeclForQClass("nsIFile", []), Whitespace.NL] + ) - self.cls = Class( - self.clsname, - inherits=inherits, - abstract=True) + self.cls = Class(self.clsname, inherits=inherits, abstract=True) self.cls.addstmt(Label.PRIVATE) friends = _FindFriends().findFriends(ptype) @@ -3326,12 +3669,12 @@ def visitProtocol(self, p): friends.discard(ptype) for friend in friends: - self.actorForwardDecls.extend([ - _makeForwardDeclForActor(friend, self.prettyside), - Whitespace.NL - ]) - self.cls.addstmt(FriendClassDecl(_actorName(friend.fullname(), - self.prettyside))) + self.actorForwardDecls.extend( + [_makeForwardDeclForActor(friend, self.prettyside), Whitespace.NL] + ) + self.cls.addstmt( + FriendClassDecl(_actorName(friend.fullname(), self.prettyside)) + ) self.cls.addstmt(Label.PROTECTED) for typedef in p.cxxTypedefs(): @@ -3346,12 +3689,12 @@ def visitProtocol(self, p): for md in p.messageDecls: if self.sendsMessage(md) and md.hasAsyncReturns(): self.cls.addstmt( - Typedef(_makePromise(md.returns, self.side), - md.promiseName())) + Typedef(_makePromise(md.returns, self.side), md.promiseName()) + ) if self.receivesMessage(md) and md.hasAsyncReturns(): self.cls.addstmt( - Typedef(_makeResolver(md.returns, self.side), - md.resolverName())) + Typedef(_makeResolver(md.returns, self.side), md.resolverName()) + ) self.cls.addstmt(Whitespace.NL) self.cls.addstmt(Label.PROTECTED) @@ -3361,14 +3704,20 @@ def visitProtocol(self, p): if self.receivesMessage(md): # generate Recv/Answer* interface - implicit = (not isdtor) - returnsems = 'resolver' if md.decl.type.isAsync() else 'out' + implicit = not isdtor + returnsems = "resolver" if md.decl.type.isAsync() else "out" recvDecl = MethodDecl( md.recvMethod(), - params=md.makeCxxParams(paramsems='move', returnsems=returnsems, - side=self.side, implicit=implicit, direction='recv'), - ret=Type('mozilla::ipc::IPCResult'), - methodspec=MethodSpec.VIRTUAL) + params=md.makeCxxParams( + paramsems="move", + returnsems=returnsems, + side=self.side, + implicit=implicit, + direction="recv", + ), + ret=Type("mozilla::ipc::IPCResult"), + methodspec=MethodSpec.VIRTUAL, + ) # These method implementations cause problems when trying to # override them with different types in a direct call class. @@ -3379,7 +3728,7 @@ def visitProtocol(self, p): # overriding. if isctor or (isdtor and not md.inParams): defaultRecv = MethodDefn(recvDecl) - defaultRecv.addcode('return IPC_OK();\n') + defaultRecv.addcode("return IPC_OK();\n") self.cls.addstmt(defaultRecv) elif (self.protocol.name, self.side) in VIRTUAL_CALL_CLASSES: # If we're using virtual calls, we need the methods to be @@ -3405,10 +3754,18 @@ def visitProtocol(self, p): actortype.ptr = False actortype = _alreadyaddrefed(actortype) - self.cls.addstmt(StmtDecl(MethodDecl( - _allocMethod(managed, self.side), - params=md.makeCxxParams(side=self.side, implicit=False, direction='recv'), - ret=actortype, methodspec=MethodSpec.PURE))) + self.cls.addstmt( + StmtDecl( + MethodDecl( + _allocMethod(managed, self.side), + params=md.makeCxxParams( + side=self.side, implicit=False, direction="recv" + ), + ret=actortype, + methodspec=MethodSpec.PURE, + ) + ) + ) # add the Dealloc interface for all managed non-refcounted actors, # even without ctors. This is useful for protocols which use @@ -3417,169 +3774,226 @@ def visitProtocol(self, p): if managed.isRefcounted(): continue - self.cls.addstmt(StmtDecl(MethodDecl( - _deallocMethod(managed, self.side), - params=[Decl(p.managedCxxType(managed, self.side), 'aActor')], - ret=Type.BOOL, methodspec=MethodSpec.PURE))) + self.cls.addstmt( + StmtDecl( + MethodDecl( + _deallocMethod(managed, self.side), + params=[ + Decl(p.managedCxxType(managed, self.side), "aActor") + ], + ret=Type.BOOL, + methodspec=MethodSpec.PURE, + ) + ) + ) if ptype.isToplevel(): # void ProcessingError(code); default to no-op processingerror = MethodDefn( - MethodDecl(p.processingErrorVar().name, - params=[Param(_Result.Type(), 'aCode'), - Param(Type('char', const=True, ptr=True), 'aReason')], - methodspec=MethodSpec.OVERRIDE)) + MethodDecl( + p.processingErrorVar().name, + params=[ + Param(_Result.Type(), "aCode"), + Param(Type("char", const=True, ptr=True), "aReason"), + ], + methodspec=MethodSpec.OVERRIDE, + ) + ) # bool ShouldContinueFromReplyTimeout(); default to |true| shouldcontinue = MethodDefn( - MethodDecl(p.shouldContinueFromTimeoutVar().name, - ret=Type.BOOL, methodspec=MethodSpec.OVERRIDE)) - shouldcontinue.addcode('return true;\n') + MethodDecl( + p.shouldContinueFromTimeoutVar().name, + ret=Type.BOOL, + methodspec=MethodSpec.OVERRIDE, + ) + ) + shouldcontinue.addcode("return true;\n") # void Entered*()/Exited*(); default to no-op entered = MethodDefn( - MethodDecl(p.enteredCxxStackVar().name, methodspec=MethodSpec.OVERRIDE)) + MethodDecl(p.enteredCxxStackVar().name, methodspec=MethodSpec.OVERRIDE) + ) exited = MethodDefn( - MethodDecl(p.exitedCxxStackVar().name, methodspec=MethodSpec.OVERRIDE)) + MethodDecl(p.exitedCxxStackVar().name, methodspec=MethodSpec.OVERRIDE) + ) enteredcall = MethodDefn( - MethodDecl(p.enteredCallVar().name, methodspec=MethodSpec.OVERRIDE)) + MethodDecl(p.enteredCallVar().name, methodspec=MethodSpec.OVERRIDE) + ) exitedcall = MethodDefn( - MethodDecl(p.exitedCallVar().name, methodspec=MethodSpec.OVERRIDE)) + MethodDecl(p.exitedCallVar().name, methodspec=MethodSpec.OVERRIDE) + ) - self.cls.addstmts([processingerror, - shouldcontinue, - entered, exited, - enteredcall, exitedcall, - Whitespace.NL]) + self.cls.addstmts( + [ + processingerror, + shouldcontinue, + entered, + exited, + enteredcall, + exitedcall, + Whitespace.NL, + ] + ) - self.cls.addstmts(( - [Label.PUBLIC] - + self.standardTypedefs() - + [Whitespace.NL] - )) + self.cls.addstmts(([Label.PUBLIC] + self.standardTypedefs() + [Whitespace.NL])) self.cls.addstmt(Label.PUBLIC) # Actor() ctor = ConstructorDefn(ConstructorDecl(self.clsname)) - side = ExprVar('mozilla::ipc::' + self.side.title() + 'Side') + side = ExprVar("mozilla::ipc::" + self.side.title() + "Side") if ptype.isToplevel(): name = ExprLiteral.String(_actorName(p.name, self.side)) ctor.memberinits = [ - ExprMemberInit(ExprVar('mozilla::ipc::IToplevelProtocol'), - [name, _protocolId(ptype), side]) + ExprMemberInit( + ExprVar("mozilla::ipc::IToplevelProtocol"), + [name, _protocolId(ptype), side], + ) ] else: ctor.memberinits = [ - ExprMemberInit(ExprVar('mozilla::ipc::IProtocol'), - [_protocolId(ptype), side]) + ExprMemberInit( + ExprVar("mozilla::ipc::IProtocol"), [_protocolId(ptype), side] + ) ] - ctor.addcode('MOZ_COUNT_CTOR(${clsname});\n', clsname=self.clsname) + ctor.addcode("MOZ_COUNT_CTOR(${clsname});\n", clsname=self.clsname) self.cls.addstmts([ctor, Whitespace.NL]) # ~Actor() dtor = DestructorDefn( - DestructorDecl(self.clsname, methodspec=MethodSpec.VIRTUAL)) - dtor.addcode('MOZ_COUNT_DTOR(${clsname});\n', clsname=self.clsname) + DestructorDecl(self.clsname, methodspec=MethodSpec.VIRTUAL) + ) + dtor.addcode("MOZ_COUNT_DTOR(${clsname});\n", clsname=self.clsname) self.cls.addstmts([dtor, Whitespace.NL]) if ptype.isRefcounted(): self.cls.addcode( - ''' + """ NS_INLINE_DECL_PURE_VIRTUAL_REFCOUNTING - ''') + """ + ) self.cls.addstmt(Label.PROTECTED) self.cls.addcode( - ''' + """ void ActorAlloc() final { AddRef(); } void ActorDealloc() final { Release(); } - ''') + """ + ) self.cls.addstmt(Label.PUBLIC) if not ptype.isToplevel(): if 1 == len(p.managers): # manager() const managertype = p.managerActorType(self.side, ptr=True) - managermeth = MethodDefn(MethodDecl( - 'Manager', ret=managertype, const=True)) + managermeth = MethodDefn( + MethodDecl("Manager", ret=managertype, const=True) + ) managermeth.addcode( - ''' + """ return static_cast<${type}>(IProtocol::Manager()); - ''', - type=managertype) + """, + type=managertype, + ) self.cls.addstmts([managermeth, Whitespace.NL]) def actorFromIter(itervar): - return ExprCode('${iter}.Get()->GetKey()', iter=itervar) + return ExprCode("${iter}.Get()->GetKey()", iter=itervar) def forLoopOverHashtable(hashtable, itervar, const=False): - itermeth = 'ConstIter' if const else 'Iter' + itermeth = "ConstIter" if const else "Iter" return StmtFor( - init=ExprCode('auto ${itervar} = ${hashtable}.${itermeth}()', - itervar=itervar, - hashtable=hashtable, - itermeth=itermeth), - cond=ExprCode('!${itervar}.Done()', itervar=itervar), - update=ExprCode('${itervar}.Next()', itervar=itervar)) + init=ExprCode( + "auto ${itervar} = ${hashtable}.${itermeth}()", + itervar=itervar, + hashtable=hashtable, + itermeth=itermeth, + ), + cond=ExprCode("!${itervar}.Done()", itervar=itervar), + update=ExprCode("${itervar}.Next()", itervar=itervar), + ) # Managed[T](Array& inout) const # const Array& Managed() const for managed in ptype.manages: container = p.managedVar(managed, self.side) - meth = MethodDefn(MethodDecl( - p.managedMethod(managed, self.side).name, - params=[Decl(_cxxArrayType(p.managedCxxType(managed, self.side), ref=True), - 'aArr')], - const=True)) - meth.addcode('${container}.ToArray(aArr);\n', container=container) - - refmeth = MethodDefn(MethodDecl( - p.managedMethod(managed, self.side).name, - params=[], - ret=p.managedVarType(managed, self.side, const=True, ref=True), - const=True)) - refmeth.addcode('return ${container};\n', container=container) + meth = MethodDefn( + MethodDecl( + p.managedMethod(managed, self.side).name, + params=[ + Decl( + _cxxArrayType( + p.managedCxxType(managed, self.side), ref=True + ), + "aArr", + ) + ], + const=True, + ) + ) + meth.addcode("${container}.ToArray(aArr);\n", container=container) + + refmeth = MethodDefn( + MethodDecl( + p.managedMethod(managed, self.side).name, + params=[], + ret=p.managedVarType(managed, self.side, const=True, ref=True), + const=True, + ) + ) + refmeth.addcode("return ${container};\n", container=container) self.cls.addstmts([meth, refmeth, Whitespace.NL]) # AllManagedActors(Array& inout) const - arrvar = ExprVar('arr__') - managedmeth = MethodDefn(MethodDecl( - 'AllManagedActors', - params=[Decl(_cxxArrayType(_refptr(_cxxLifecycleProxyType()), ref=True), - arrvar.name)], - methodspec=MethodSpec.OVERRIDE, - const=True)) + arrvar = ExprVar("arr__") + managedmeth = MethodDefn( + MethodDecl( + "AllManagedActors", + params=[ + Decl( + _cxxArrayType(_refptr(_cxxLifecycleProxyType()), ref=True), + arrvar.name, + ) + ], + methodspec=MethodSpec.OVERRIDE, + const=True, + ) + ) # Count the number of managed actors, and allocate space in the output array. managedmeth.addcode( - ''' + """ uint32_t total = 0; - ''') + """ + ) for managed in ptype.manages: managedmeth.addcode( - ''' + """ total += ${container}.Count(); - ''', - container=p.managedVar(managed, self.side)) + """, + container=p.managedVar(managed, self.side), + ) managedmeth.addcode( - ''' + """ arr__.SetCapacity(total); - ''') + """ + ) for managed in ptype.manages: managedmeth.addcode( - ''' + """ for (auto it = ${container}.ConstIter(); !it.Done(); it.Next()) { arr__.AppendElement(it.Get()->GetKey()->GetLifecycleProxy()); } - ''', - container=p.managedVar(managed, self.side)) + """, + container=p.managedVar(managed, self.side), + ) self.cls.addstmts([managedmeth, Whitespace.NL]) @@ -3590,21 +4004,21 @@ def forLoopOverHashtable(hashtable, itervar, const=False): # OnMessageReceived()/OnCallReceived() # save these away for use in message handler case stmts - msgvar = ExprVar('msg__') + msgvar = ExprVar("msg__") self.msgvar = msgvar - replyvar = ExprVar('reply__') + replyvar = ExprVar("reply__") self.replyvar = replyvar - itervar = ExprVar('iter__') + itervar = ExprVar("iter__") self.itervar = itervar - var = ExprVar('v__') + var = ExprVar("v__") self.var = var # for ctor recv cases, we can't read the actor ID into a PFoo* # because it doesn't exist on this side yet. Use a "special" # actor handle instead - handlevar = ExprVar('handle__') + handlevar = ExprVar("handle__") self.handlevar = handlevar - msgtype = ExprCode('msg__.type()') + msgtype = ExprCode("msg__.type()") self.asyncSwitch = StmtSwitch(msgtype) self.syncSwitch = None self.interruptSwitch = None @@ -3620,9 +4034,10 @@ def forLoopOverHashtable(hashtable, itervar, const=False): # add default cases default = StmtCode( - ''' + """ return MsgNotKnown; - ''') + """ + ) self.asyncSwitch.addcase(DefaultLabel(), default) if toplevel.isSync() or toplevel.isInterrupt(): self.syncSwitch.addcase(DefaultLabel(), default) @@ -3632,25 +4047,31 @@ def forLoopOverHashtable(hashtable, itervar, const=False): self.cls.addstmts(self.implementManagerIface()) def makeHandlerMethod(name, switch, hasReply, dispatches=False): - params = [Decl(Type('Message', const=True, ref=True), msgvar.name)] + params = [Decl(Type("Message", const=True, ref=True), msgvar.name)] if hasReply: - params.append(Decl(Type('Message', ref=True, ptr=True), - replyvar.name)) - - method = MethodDefn(MethodDecl(name, methodspec=MethodSpec.OVERRIDE, - params=params, ret=_Result.Type())) + params.append(Decl(Type("Message", ref=True, ptr=True), replyvar.name)) + + method = MethodDefn( + MethodDecl( + name, + methodspec=MethodSpec.OVERRIDE, + params=params, + ret=_Result.Type(), + ) + ) if not switch: method.addcode( - ''' + """ MOZ_ASSERT_UNREACHABLE("message protocol not supported"); return MsgNotKnown; - ''') + """ + ) return method if dispatches: method.addcode( - ''' + """ int32_t route__ = ${msgvar}.routing_id(); if (MSG_ROUTING_CONTROL != route__) { IProtocol* routed__ = Lookup(route__); @@ -3664,12 +4085,14 @@ def makeHandlerMethod(name, switch, hasReply, dispatches=False): return proxy__->Get()->${name}($,{args}); } - ''', + """, msgvar=msgvar, - logignored=self.logMessage(None, ExprAddrOf(msgvar), - 'Ignored message for dead actor'), + logignored=self.logMessage( + None, ExprAddrOf(msgvar), "Ignored message for dead actor" + ), name=name, - args=[p.name for p in params]) + args=[p.name for p in params], + ) # bug 509581: don't generate the switch stmt if there # is only the default case; MSVC doesn't like that @@ -3680,65 +4103,89 @@ def makeHandlerMethod(name, switch, hasReply, dispatches=False): return method - dispatches = (ptype.isToplevel() and ptype.isManager()) - self.cls.addstmts([ - makeHandlerMethod('OnMessageReceived', self.asyncSwitch, - hasReply=False, dispatches=dispatches), - Whitespace.NL - ]) - self.cls.addstmts([ - makeHandlerMethod('OnMessageReceived', self.syncSwitch, - hasReply=True, dispatches=dispatches), - Whitespace.NL - ]) - self.cls.addstmts([ - makeHandlerMethod('OnCallReceived', self.interruptSwitch, - hasReply=True, dispatches=dispatches), - Whitespace.NL - ]) - - clearsubtreevar = ExprVar('ClearSubtree') + dispatches = ptype.isToplevel() and ptype.isManager() + self.cls.addstmts( + [ + makeHandlerMethod( + "OnMessageReceived", + self.asyncSwitch, + hasReply=False, + dispatches=dispatches, + ), + Whitespace.NL, + ] + ) + self.cls.addstmts( + [ + makeHandlerMethod( + "OnMessageReceived", + self.syncSwitch, + hasReply=True, + dispatches=dispatches, + ), + Whitespace.NL, + ] + ) + self.cls.addstmts( + [ + makeHandlerMethod( + "OnCallReceived", + self.interruptSwitch, + hasReply=True, + dispatches=dispatches, + ), + Whitespace.NL, + ] + ) + + clearsubtreevar = ExprVar("ClearSubtree") if ptype.isToplevel(): # OnChannelClose() - onclose = MethodDefn(MethodDecl('OnChannelClose', - methodspec=MethodSpec.OVERRIDE)) + onclose = MethodDefn( + MethodDecl("OnChannelClose", methodspec=MethodSpec.OVERRIDE) + ) onclose.addcode( - ''' + """ DestroySubtree(NormalShutdown); ClearSubtree(); DeallocShmems(); if (GetLifecycleProxy()) { GetLifecycleProxy()->Release(); } - ''') + """ + ) self.cls.addstmts([onclose, Whitespace.NL]) # OnChannelError() - onerror = MethodDefn(MethodDecl('OnChannelError', - methodspec=MethodSpec.OVERRIDE)) + onerror = MethodDefn( + MethodDecl("OnChannelError", methodspec=MethodSpec.OVERRIDE) + ) onerror.addcode( - ''' + """ DestroySubtree(AbnormalShutdown); ClearSubtree(); DeallocShmems(); if (GetLifecycleProxy()) { GetLifecycleProxy()->Release(); } - ''') + """ + ) self.cls.addstmts([onerror, Whitespace.NL]) - if (ptype.isToplevel() and ptype.isInterrupt()): + if ptype.isToplevel() and ptype.isInterrupt(): processnative = MethodDefn( - MethodDecl('ProcessNativeEventsInInterruptCall', ret=Type.VOID)) + MethodDecl("ProcessNativeEventsInInterruptCall", ret=Type.VOID) + ) processnative.addcode( - ''' + """ #ifdef OS_WIN GetIPCChannel()->ProcessNativeEventsInInterruptCall(); #else FatalError("This method is Windows-only"); #endif - ''') + """ + ) self.cls.addstmts([processnative, Whitespace.NL]) @@ -3749,7 +4196,7 @@ def makeHandlerMethod(name, switch, hasReply, dispatches=False): clearsubtree = MethodDefn(MethodDecl(clearsubtreevar.name)) for managed in ptype.manages: clearsubtree.addcode( - ''' + """ for (auto it = ${container}.Iter(); !it.Done(); it.Next()) { it.Get()->GetKey()->ClearSubtree(); } @@ -3760,60 +4207,80 @@ def makeHandlerMethod(name, switch, hasReply, dispatches=False): } ${container}.Clear(); - ''', - container=p.managedVar(managed, self.side)) + """, + container=p.managedVar(managed, self.side), + ) # don't release our own IPC reference: either the manager will do it, # or we're toplevel self.cls.addstmts([clearsubtree, Whitespace.NL]) for managed in ptype.manages: - self.cls.addstmts([ - StmtDecl(Decl( - p.managedVarType(managed, self.side), - p.managedVar(managed, self.side).name))]) + self.cls.addstmts( + [ + StmtDecl( + Decl( + p.managedVarType(managed, self.side), + p.managedVar(managed, self.side).name, + ) + ) + ] + ) def genManagedEndpoint(self, managed): - hereEp = 'ManagedEndpoint<%s>' % _actorName(managed.name(), self.side) - thereEp = 'ManagedEndpoint<%s>' % _actorName(managed.name(), - _otherSide(self.side)) + hereEp = "ManagedEndpoint<%s>" % _actorName(managed.name(), self.side) + thereEp = "ManagedEndpoint<%s>" % _actorName( + managed.name(), _otherSide(self.side) + ) - actor = _HybridDecl(ipdl.type.ActorType(managed), 'aActor') + actor = _HybridDecl(ipdl.type.ActorType(managed), "aActor") # ManagedEndpoint OpenPEndpoint(PHere* aActor) - openmeth = MethodDefn(MethodDecl( - 'Open%sEndpoint' % managed.name(), - params=[Decl(self.protocol.managedCxxType(managed, self.side), actor.name)], - ret=Type(thereEp))) + openmeth = MethodDefn( + MethodDecl( + "Open%sEndpoint" % managed.name(), + params=[ + Decl(self.protocol.managedCxxType(managed, self.side), actor.name) + ], + ret=Type(thereEp), + ) + ) openmeth.addcode( - ''' + """ $*{bind} return ${thereEp}(mozilla::ipc::PrivateIPDLInterface(), aActor->Id()); - ''', + """, bind=self.bindManagedActor(actor, errfn=ExprCall(ExprVar(thereEp))), - thereEp=thereEp) + thereEp=thereEp, + ) # void BindPEndpoint(ManagedEndpoint&& aEndpoint, PHere* aActor) - bindmeth = MethodDefn(MethodDecl( - 'Bind%sEndpoint' % managed.name(), - params=[Decl(Type(hereEp), 'aEndpoint'), - Decl(self.protocol.managedCxxType(managed, self.side), - actor.name)], - ret=Type.BOOL)) + bindmeth = MethodDefn( + MethodDecl( + "Bind%sEndpoint" % managed.name(), + params=[ + Decl(Type(hereEp), "aEndpoint"), + Decl(self.protocol.managedCxxType(managed, self.side), actor.name), + ], + ret=Type.BOOL, + ) + ) bindmeth.addcode( - ''' + """ MOZ_RELEASE_ASSERT(aEndpoint.ActorId(), "Invalid Endpoint!"); $*{bind} return true; - ''', - bind=self.bindManagedActor(actor, errfn=ExprLiteral.FALSE, - idexpr=ExprCode('*aEndpoint.ActorId()'))) + """, + bind=self.bindManagedActor( + actor, errfn=ExprLiteral.FALSE, idexpr=ExprCode("*aEndpoint.ActorId()") + ), + ) self.cls.addstmts([openmeth, bindmeth, Whitespace.NL]) def implementManagerIface(self): p = self.protocol - protocolbase = Type('IProtocol', ptr=True) + protocolbase = Type("IProtocol", ptr=True) methods = [] @@ -3823,21 +4290,25 @@ def implementManagerIface(self): # to the other if p.subtreeUsesShmem(): self.asyncSwitch.addcase( - CaseLabel('SHMEM_CREATED_MESSAGE_TYPE'), - self.genShmemCreatedHandler()) + CaseLabel("SHMEM_CREATED_MESSAGE_TYPE"), + self.genShmemCreatedHandler(), + ) self.asyncSwitch.addcase( - CaseLabel('SHMEM_DESTROYED_MESSAGE_TYPE'), - self.genShmemDestroyedHandler()) + CaseLabel("SHMEM_DESTROYED_MESSAGE_TYPE"), + self.genShmemDestroyedHandler(), + ) else: abort = StmtBlock() - abort.addstmts([ - _fatalError('this protocol tree does not use shmem'), - StmtReturn(_Result.NotKnown) - ]) - self.asyncSwitch.addcase( - CaseLabel('SHMEM_CREATED_MESSAGE_TYPE'), abort) + abort.addstmts( + [ + _fatalError("this protocol tree does not use shmem"), + StmtReturn(_Result.NotKnown), + ] + ) + self.asyncSwitch.addcase(CaseLabel("SHMEM_CREATED_MESSAGE_TYPE"), abort) self.asyncSwitch.addcase( - CaseLabel('SHMEM_DESTROYED_MESSAGE_TYPE'), abort) + CaseLabel("SHMEM_DESTROYED_MESSAGE_TYPE"), abort + ) # Keep track of types created with an INOUT ctor. We need to call # Register() or RegisterID() for them depending on the side the managee @@ -3849,28 +4320,35 @@ def implementManagerIface(self): inoutCtorTypes.append(msgtype.constructedType()) # all protocols share the "same" RemoveManagee() implementation - pvar = ExprVar('aProtocolId') - listenervar = ExprVar('aListener') - removemanagee = MethodDefn(MethodDecl( - p.removeManageeMethod().name, - params=[Decl(_protocolIdType(), pvar.name), - Decl(protocolbase, listenervar.name)], - methodspec=MethodSpec.OVERRIDE)) + pvar = ExprVar("aProtocolId") + listenervar = ExprVar("aListener") + removemanagee = MethodDefn( + MethodDecl( + p.removeManageeMethod().name, + params=[ + Decl(_protocolIdType(), pvar.name), + Decl(protocolbase, listenervar.name), + ], + methodspec=MethodSpec.OVERRIDE, + ) + ) if not len(p.managesStmts): removemanagee.addcode( - ''' + """ FatalError("unreached"); return; - ''') + """ + ) else: switchontype = StmtSwitch(pvar) for managee in p.managesStmts: manageeipdltype = managee.decl.type - manageecxxtype = _cxxBareType(ipdl.type.ActorType(manageeipdltype), - self.side) + manageecxxtype = _cxxBareType( + ipdl.type.ActorType(manageeipdltype), self.side + ) case = ExprCode( - ''' + """ { ${manageecxxtype} actor = static_cast<${manageecxxtype}>(aListener); auto& container = ${container}; @@ -3886,32 +4364,42 @@ def implementManagerIface(self): NS_IF_RELEASE(proxy); return; } - ''', + """, manageecxxtype=manageecxxtype, - container=p.managedVar(manageeipdltype, self.side)) - switchontype.addcase(CaseLabel(_protocolId(manageeipdltype).name), - case) - switchontype.addcase(DefaultLabel(), ExprCode( - ''' + container=p.managedVar(manageeipdltype, self.side), + ) + switchontype.addcase(CaseLabel(_protocolId(manageeipdltype).name), case) + switchontype.addcase( + DefaultLabel(), + ExprCode( + """ FatalError("unreached"); return; - ''')) + """ + ), + ) removemanagee.addstmt(switchontype) # The `DeallocManagee` method is called for managed actors to trigger # deallocation when ActorLifecycleProxy is freed. - deallocmanagee = MethodDefn(MethodDecl( - p.deallocManageeMethod().name, - params=[Decl(_protocolIdType(), pvar.name), - Decl(protocolbase, listenervar.name)], - methodspec=MethodSpec.OVERRIDE)) + deallocmanagee = MethodDefn( + MethodDecl( + p.deallocManageeMethod().name, + params=[ + Decl(_protocolIdType(), pvar.name), + Decl(protocolbase, listenervar.name), + ], + methodspec=MethodSpec.OVERRIDE, + ) + ) if not len(p.managesStmts): deallocmanagee.addcode( - ''' + """ FatalError("unreached"); return; - ''') + """ + ) else: switchontype = StmtSwitch(pvar) for managee in p.managesStmts: @@ -3923,20 +4411,24 @@ def implementManagerIface(self): continue case = StmtCode( - ''' + """ ${concrete}->${dealloc}(static_cast<${type}>(aListener)); return; - ''', + """, concrete=self.concreteThis(), dealloc=_deallocMethod(manageeipdltype, self.side), - type=_cxxBareType(ipdl.type.ActorType(manageeipdltype), self.side)) - switchontype.addcase(CaseLabel(_protocolId(manageeipdltype).name), - case) - switchontype.addcase(DefaultLabel(), StmtCode( - ''' + type=_cxxBareType(ipdl.type.ActorType(manageeipdltype), self.side), + ) + switchontype.addcase(CaseLabel(_protocolId(manageeipdltype).name), case) + switchontype.addcase( + DefaultLabel(), + StmtCode( + """ FatalError("unreached"); return; - ''')) + """ + ), + ) deallocmanagee.addstmt(switchontype) return methods + [removemanagee, deallocmanagee, Whitespace.NL] @@ -3945,29 +4437,31 @@ def genShmemCreatedHandler(self): assert self.protocol.decl.type.isToplevel() return StmtCode( - ''' + """ { if (!ShmemCreated(${msgvar})) { return MsgPayloadError; } return MsgProcessed; } - ''', - msgvar=self.msgvar) + """, + msgvar=self.msgvar, + ) def genShmemDestroyedHandler(self): assert self.protocol.decl.type.isToplevel() return StmtCode( - ''' + """ { if (!ShmemDestroyed(${msgvar})) { return MsgPayloadError; } return MsgProcessed; } - ''', - msgvar=self.msgvar) + """, + msgvar=self.msgvar, + ) # ------------------------------------------------------------------------- # The next few functions are the crux of the IPDL code generator. @@ -3986,11 +4480,10 @@ def concreteThis(self): assert self.protocol.name.startswith("P") class_name = "{}{}".format(self.protocol.name[1:], self.side.capitalize()) - return ExprCode('static_cast<${class_name}*>(this)', class_name=class_name) + return ExprCode("static_cast<${class_name}*>(this)", class_name=class_name) def thisCall(self, function, args): - return ExprCall(ExprSelect(self.concreteThis(), '->', function), - args=args) + return ExprCall(ExprSelect(self.concreteThis(), "->", function), args=args) def visitMessageDecl(self, md): isctor = md.decl.type.isCtor() @@ -4035,8 +4528,12 @@ def addRecvCase(lbl, case): elif isdtor: sendmethod = self.genBlockingDtorMethod(md) elif isasync: - sendmethod, movesendmethod, promisesendmethod, (recvlbl, recvcase) = \ - self.genAsyncSendMethod(md) + ( + sendmethod, + movesendmethod, + promisesendmethod, + (recvlbl, recvcase), + ) = self.genAsyncSendMethod(md) else: sendmethod, movesendmethod = self.genBlockingSendMethod(md) @@ -4076,7 +4573,7 @@ def genAsyncCtor(self, md): sendok, sendstmts = self.sendAsync(md, msgvar) method.addcode( - ''' + """ $*{bind} // Build our constructor message & verify it. @@ -4099,17 +4596,20 @@ def genAsyncCtor(self, md): return nullptr; } return ${actor}; - ''', + """, bind=self.bindManagedActor(actor), stmts=stmts, - verify=self.genVerifyMessage(md.decl.type.verify, md.params, - errfnSendCtor, ExprVar('msg__')), + verify=self.genVerifyMessage( + md.decl.type.verify, md.params, errfnSendCtor, ExprVar("msg__") + ), sendstmts=sendstmts, sendok=sendok, - destroy=self.destroyActor(md, actor.var(), - why=_DestroyReason.FailedConstructor), + destroy=self.destroyActor( + md, actor.var(), why=_DestroyReason.FailedConstructor + ), actor=actor.var(), - actorname=actor.ipdltype.protocol.name() + self.side.capitalize()) + actorname=actor.ipdltype.protocol.name() + self.side.capitalize(), + ) lbl = CaseLabel(md.pqReplyId()) case = StmtBlock() @@ -4124,17 +4624,22 @@ def genBlockingCtorMethod(self, md): method = MethodDefn(self.makeSendMethodDecl(md)) msgvar, stmts = self.makeMessage(md, errfnSendCtor) - verify = self.genVerifyMessage(md.decl.type.verify, md.params, - errfnSendCtor, ExprVar('msg__')) + verify = self.genVerifyMessage( + md.decl.type.verify, md.params, errfnSendCtor, ExprVar("msg__") + ) replyvar = self.replyvar sendok, sendstmts = self.sendBlocking(md, msgvar, replyvar) replystmts = self.deserializeReply( - md, ExprAddrOf(replyvar), self.side, - errfnSendCtor, errfnSentinel(ExprLiteral.NULL)) + md, + ExprAddrOf(replyvar), + self.side, + errfnSendCtor, + errfnSentinel(ExprLiteral.NULL), + ) method.addcode( - ''' + """ $*{bind} // Build our constructor message & verify it. @@ -4157,18 +4662,20 @@ def genBlockingCtorMethod(self, md): $*{replystmts} return ${actor}; - ''', + """, bind=self.bindManagedActor(actor), stmts=stmts, verify=verify, replyvar=replyvar, sendstmts=sendstmts, sendok=sendok, - destroy=self.destroyActor(md, actor.var(), - why=_DestroyReason.FailedConstructor), + destroy=self.destroyActor( + md, actor.var(), why=_DestroyReason.FailedConstructor + ), replystmts=replystmts, actor=actor.var(), - actorname=actor.ipdltype.protocol.name() + self.side.capitalize()) + actorname=actor.ipdltype.protocol.name() + self.side.capitalize(), + ) return method @@ -4180,8 +4687,9 @@ def bindManagedActor(self, actordecl, errfn=ExprLiteral.NULL, idexpr=None): else: setManagerArgs = [ExprVar.THIS, idexpr] - return [StmtCode( - ''' + return [ + StmtCode( + """ if (!${actor}) { NS_WARNING("Cannot bind null ${actorname} actor"); return ${errfn}; @@ -4189,22 +4697,26 @@ def bindManagedActor(self, actordecl, errfn=ExprLiteral.NULL, idexpr=None): ${actor}->SetManagerAndRegister($,{setManagerArgs}); ${container}.PutEntry(${actor}); - ''', - actor=actordecl.var(), - actorname=actorproto.name() + self.side.capitalize(), - errfn=errfn, - setManagerArgs=setManagerArgs, - container=self.protocol.managedVar(actorproto, self.side))] + """, + actor=actordecl.var(), + actorname=actorproto.name() + self.side.capitalize(), + errfn=errfn, + setManagerArgs=setManagerArgs, + container=self.protocol.managedVar(actorproto, self.side), + ) + ] def genHelperCtor(self, md): helperdecl = self.makeSendMethodDecl(md) helperdecl.params = helperdecl.params[1:] helper = MethodDefn(helperdecl) - helper.addstmts([ - self.callAllocActor(md, retsems='out', side=self.side), - StmtReturn(ExprCall(ExprVar(helperdecl.name), args=md.makeCxxArgs())), - ]) + helper.addstmts( + [ + self.callAllocActor(md, retsems="out", side=self.side), + StmtReturn(ExprCall(ExprVar(helperdecl.name), args=md.makeCxxArgs())), + ] + ) return helper def genAsyncDtor(self, md): @@ -4218,12 +4730,14 @@ def genAsyncDtor(self, md): sendok, sendstmts = self.sendAsync(md, msgvar, actorvar) method.addstmts( stmts - + self.genVerifyMessage(md.decl.type.verify, md.params, - errfnSendDtor, ExprVar('msg__')) + + self.genVerifyMessage( + md.decl.type.verify, md.params, errfnSendDtor, ExprVar("msg__") + ) + sendstmts + [Whitespace.NL] + self.dtorEpilogue(md, actor.var()) - + [StmtReturn(sendok)]) + + [StmtReturn(sendok)] + ) lbl = CaseLabel(md.pqReplyId()) case = StmtBlock() @@ -4246,25 +4760,27 @@ def genBlockingDtorMethod(self, md): sendok, sendstmts = self.sendBlocking(md, msgvar, replyvar, actorvar) method.addstmts( stmts - + self.genVerifyMessage(md.decl.type.verify, md.params, - errfnSendDtor, ExprVar('msg__')) - + [Whitespace.NL, - StmtDecl(Decl(Type('Message'), replyvar.name))] - + sendstmts) + + self.genVerifyMessage( + md.decl.type.verify, md.params, errfnSendDtor, ExprVar("msg__") + ) + + [Whitespace.NL, StmtDecl(Decl(Type("Message"), replyvar.name))] + + sendstmts + ) destmts = self.deserializeReply( - md, ExprAddrOf(replyvar), self.side, errfnSend, - errfnSentinel(), actorvar) + md, ExprAddrOf(replyvar), self.side, errfnSend, errfnSentinel(), actorvar + ) ifsendok = StmtIf(ExprLiteral.FALSE) ifsendok.addifstmts(destmts) - ifsendok.addifstmts([Whitespace.NL, - StmtExpr(ExprAssn(sendok, ExprLiteral.FALSE, '&='))]) + ifsendok.addifstmts( + [Whitespace.NL, StmtExpr(ExprAssn(sendok, ExprLiteral.FALSE, "&="))] + ) method.addstmt(ifsendok) method.addstmts( - self.dtorEpilogue(md, actor.var()) - + [Whitespace.NL, StmtReturn(sendok)]) + self.dtorEpilogue(md, actor.var()) + [Whitespace.NL, StmtReturn(sendok)] + ) return method @@ -4274,26 +4790,30 @@ def destroyActor(self, md, actorexpr, why=_DestroyReason.Deletion): else: destroyedType = self.protocol.decl.type - return [StmtCode( - ''' + return [ + StmtCode( + """ IProtocol* mgr = ${actor}->Manager(); ${actor}->DestroySubtree(${why}); ${actor}->ClearSubtree(); mgr->RemoveManagee(${protoId}, ${actor}); - ''', - actor=actorexpr, - why=why, - protoId=_protocolId(destroyedType))] + """, + actor=actorexpr, + why=why, + protoId=_protocolId(destroyedType), + ) + ] def dtorPrologue(self, actorexpr): return StmtCode( - ''' + """ if (!${actor} || !${actor}->CanSend()) { NS_WARNING("Attempt to __delete__ missing or closed actor"); return false; } - ''', - actor=actorexpr) + """, + actor=actorexpr, + ) def dtorEpilogue(self, md, actorexpr): return self.destroyActor(md, actorexpr) @@ -4302,18 +4822,20 @@ def genRecvAsyncReplyCase(self, md): lbl = CaseLabel(md.pqReplyId()) case = StmtBlock() resolve, reason, prologue, desrej, desstmts = self.deserializeAsyncReply( - md, self.side, errfnRecv, errfnSentinel(_Result.ValuError)) + md, self.side, errfnRecv, errfnSentinel(_Result.ValuError) + ) if len(md.returns) > 1: resolvetype = _tuple([d.bareType(self.side) for d in md.returns]) - resolvearg = ExprCall(ExprVar('MakeTuple'), - args=[ExprMove(p.var()) for p in md.returns]) + resolvearg = ExprCall( + ExprVar("MakeTuple"), args=[ExprMove(p.var()) for p in md.returns] + ) else: resolvetype = md.returns[0].bareType(self.side) resolvearg = ExprMove(md.returns[0].var()) case.addcode( - ''' + """ $*{prologue} UniquePtr untypedCallback = @@ -4334,7 +4856,7 @@ def genRecvAsyncReplyCase(self, md): callback->Reject(std::move(${reason})); } return MsgProcessed; - ''', + """, prologue=prologue, msgvar=self.msgvar, resolve=resolve, @@ -4342,7 +4864,8 @@ def genRecvAsyncReplyCase(self, md): desstmts=desstmts, resolvearg=resolvearg, desrej=desrej, - reason=reason) + reason=reason, + ) return (lbl, case) @@ -4351,12 +4874,15 @@ def genAsyncSendMethod(self, md): msgvar, stmts = self.makeMessage(md, errfnSend) retvar, sendstmts = self.sendAsync(md, msgvar) - method.addstmts(stmts - + [Whitespace.NL] - + self.genVerifyMessage(md.decl.type.verify, md.params, - errfnSend, ExprVar('msg__')) - + sendstmts - + [StmtReturn(retvar)]) + method.addstmts( + stmts + + [Whitespace.NL] + + self.genVerifyMessage( + md.decl.type.verify, md.params, errfnSend, ExprVar("msg__") + ) + + sendstmts + + [StmtReturn(retvar)] + ) movemethod = None @@ -4383,19 +4909,20 @@ def genBlockingSendMethod(self, md, fromActor=None): failif.addifstmt(StmtReturn.FALSE) desstmts = self.deserializeReply( - md, ExprAddrOf(replyvar), self.side, errfnSend, errfnSentinel()) + md, ExprAddrOf(replyvar), self.side, errfnSend, errfnSentinel() + ) method.addstmts( serstmts - + self.genVerifyMessage(md.decl.type.verify, md.params, errfnSend, - ExprVar('msg__')) - + [Whitespace.NL, - StmtDecl(Decl(Type('Message'), replyvar.name))] + + self.genVerifyMessage( + md.decl.type.verify, md.params, errfnSend, ExprVar("msg__") + ) + + [Whitespace.NL, StmtDecl(Decl(Type("Message"), replyvar.name))] + sendstmts + [failif] + desstmts - + [Whitespace.NL, - StmtReturn.TRUE]) + + [Whitespace.NL, StmtReturn.TRUE] + ) movemethod = None @@ -4406,26 +4933,31 @@ def genCtorRecvCase(self, md): case = StmtBlock() actorhandle = self.handlevar - stmts = self.deserializeMessage(md, self.side, errfnRecv, - errfnSent=errfnSentinel(_Result.ValuError)) + stmts = self.deserializeMessage( + md, self.side, errfnRecv, errfnSent=errfnSentinel(_Result.ValuError) + ) idvar, saveIdStmts = self.saveActorId(md) case.addstmts( stmts - + [StmtDecl(Decl(r.bareType(self.side), r.var().name), initargs=[]) - for r in md.returns] + + [ + StmtDecl(Decl(r.bareType(self.side), r.var().name), initargs=[]) + for r in md.returns + ] # alloc the actor, register it under the foreign ID - + [self.callAllocActor(md, retsems='in', side=self.side)] - + self.bindManagedActor(md.actorDecl(), errfn=_Result.ValuError, - idexpr=_actorHId(actorhandle)) + + [self.callAllocActor(md, retsems="in", side=self.side)] + + self.bindManagedActor( + md.actorDecl(), errfn=_Result.ValuError, idexpr=_actorHId(actorhandle) + ) + [Whitespace.NL] + saveIdStmts + self.invokeRecvHandler(md) + self.makeReply(md, errfnRecv, idvar) - + self.genVerifyMessage(md.decl.type.verify, md.returns, errfnRecv, - self.replyvar) - + [Whitespace.NL, - StmtReturn(_Result.Processed)]) + + self.genVerifyMessage( + md.decl.type.verify, md.returns, errfnRecv, self.replyvar + ) + + [Whitespace.NL, StmtReturn(_Result.Processed)] + ) return lbl, case @@ -4433,24 +4965,28 @@ def genDtorRecvCase(self, md): lbl = CaseLabel(md.pqMsgId()) case = StmtBlock() - stmts = self.deserializeMessage(md, self.side, errfnRecv, - errfnSent=errfnSentinel(_Result.ValuError)) + stmts = self.deserializeMessage( + md, self.side, errfnRecv, errfnSent=errfnSentinel(_Result.ValuError) + ) idvar, saveIdStmts = self.saveActorId(md) case.addstmts( stmts - + [StmtDecl(Decl(r.bareType(self.side), r.var().name), initargs=[]) - for r in md.returns] + + [ + StmtDecl(Decl(r.bareType(self.side), r.var().name), initargs=[]) + for r in md.returns + ] + self.invokeRecvHandler(md, implicit=False) + [Whitespace.NL] + saveIdStmts + self.makeReply(md, errfnRecv, routingId=idvar) + [Whitespace.NL] - + self.genVerifyMessage(md.decl.type.verify, md.returns, errfnRecv, - self.replyvar) + + self.genVerifyMessage( + md.decl.type.verify, md.returns, errfnRecv, self.replyvar + ) + self.dtorEpilogue(md, md.actorDecl().var()) - + [Whitespace.NL, - StmtReturn(_Result.Processed)]) + + [Whitespace.NL, StmtReturn(_Result.Processed)] + ) return lbl, case @@ -4458,12 +4994,15 @@ def genRecvCase(self, md): lbl = CaseLabel(md.pqMsgId()) case = StmtBlock() - stmts = self.deserializeMessage(md, self.side, errfn=errfnRecv, - errfnSent=errfnSentinel(_Result.ValuError)) + stmts = self.deserializeMessage( + md, self.side, errfn=errfnRecv, errfnSent=errfnSentinel(_Result.ValuError) + ) idvar, saveIdStmts = self.saveActorId(md) - declstmts = [StmtDecl(Decl(r.bareType(self.side), r.var().name), initargs=[]) - for r in md.returns] + declstmts = [ + StmtDecl(Decl(r.bareType(self.side), r.var().name), initargs=[]) + for r in md.returns + ] if md.decl.type.isAsync() and md.returns: declstmts = self.makeResolver(md, errfnRecv, routingId=idvar) case.addstmts( @@ -4473,9 +5012,11 @@ def genRecvCase(self, md): + self.invokeRecvHandler(md) + [Whitespace.NL] + self.makeReply(md, errfnRecv, routingId=idvar) - + self.genVerifyMessage(md.decl.type.verify, md.returns, errfnRecv, - self.replyvar) - + [StmtReturn(_Result.Processed)]) + + self.genVerifyMessage( + md.decl.type.verify, md.returns, errfnRecv, self.replyvar + ) + + [StmtReturn(_Result.Processed)] + ) return lbl, case @@ -4488,15 +5029,23 @@ def makeMessage(self, md, errfn, fromActor=None): if md.decl.type.isDtor(): this = md.actorDecl().var() - stmts = ([StmtDecl(Decl(Type('IPC::Message', ptr=True), msgvar.name), - init=ExprCall(ExprVar(md.pqMsgCtorFunc()), - args=[routingId]))] - + [Whitespace.NL] - + [_ParamTraits.checkedWrite(p.ipdltype, p.var(), msgvar, - sentinelKey=p.name, actor=this) - for p in md.params] - + [Whitespace.NL] - + self.setMessageFlags(md, msgvar)) + stmts = ( + [ + StmtDecl( + Decl(Type("IPC::Message", ptr=True), msgvar.name), + init=ExprCall(ExprVar(md.pqMsgCtorFunc()), args=[routingId]), + ) + ] + + [Whitespace.NL] + + [ + _ParamTraits.checkedWrite( + p.ipdltype, p.var(), msgvar, sentinelKey=p.name, actor=this + ) + for p in md.params + ] + + [Whitespace.NL] + + self.setMessageFlags(md, msgvar) + ) return msgvar, stmts def makeResolver(self, md, errfn, routingId): @@ -4505,58 +5054,105 @@ def makeResolver(self, md, errfn, routingId): if not md.decl.type.isAsync() or not md.hasReply(): return [] - sendok = ExprVar('sendok__') - seqno = ExprVar('seqno__') - resolve = ExprVar('resolve__') + sendok = ExprVar("sendok__") + seqno = ExprVar("seqno__") + resolve = ExprVar("resolve__") resolvertype = Type(md.resolverName()) failifsendok = StmtIf(ExprNot(sendok)) - failifsendok.addifstmt(_printWarningMessage('Error sending reply')) - sendmsg = (self.setMessageFlags(md, self.replyvar, seqno=seqno) - + [self.logMessage(md, self.replyvar, 'Sending reply '), - StmtDecl(Decl(Type.BOOL, sendok.name), - init=ExprCall(ExprVar('ChannelSend'), - args=[self.replyvar])), - failifsendok]) - selfvar = ExprVar('self__') + failifsendok.addifstmt(_printWarningMessage("Error sending reply")) + sendmsg = self.setMessageFlags(md, self.replyvar, seqno=seqno) + [ + self.logMessage(md, self.replyvar, "Sending reply "), + StmtDecl( + Decl(Type.BOOL, sendok.name), + init=ExprCall(ExprVar("ChannelSend"), args=[self.replyvar]), + ), + failifsendok, + ] + selfvar = ExprVar("self__") ifactorisdead = StmtIf(ExprNot(selfvar)) - ifactorisdead.addifstmts([ - _printWarningMessage("Not resolving response because actor is dead."), - StmtReturn()]) - resolverfn = ExprLambda([ExprVar.THIS, selfvar, routingId, seqno], - [Decl(Type.AUTORVAL, 'aParam')]) - resolverfn.addstmts([ifactorisdead] - + [StmtDecl(Decl(Type.BOOL, resolve.name), - init=ExprLiteral.TRUE)]) + ifactorisdead.addifstmts( + [ + _printWarningMessage("Not resolving response because actor is dead."), + StmtReturn(), + ] + ) + resolverfn = ExprLambda( + [ExprVar.THIS, selfvar, routingId, seqno], [Decl(Type.AUTORVAL, "aParam")] + ) + resolverfn.addstmts( + [ifactorisdead] + + [StmtDecl(Decl(Type.BOOL, resolve.name), init=ExprLiteral.TRUE)] + ) fwdparam = ExprCode("std::forward(aParam)") if len(md.returns) > 1: - resolverfn.addstmts([StmtDecl(Decl(p.bareType(self.side), p.var().name), initargs=[]) - for p in md.returns] - + [StmtExpr(ExprAssn(ExprCall(ExprVar('Tie'), - args=[p.var() for p in md.returns]), - fwdparam))]) + resolverfn.addstmts( + [ + StmtDecl(Decl(p.bareType(self.side), p.var().name), initargs=[]) + for p in md.returns + ] + + [ + StmtExpr( + ExprAssn( + ExprCall( + ExprVar("Tie"), args=[p.var() for p in md.returns] + ), + fwdparam, + ) + ) + ] + ) else: - resolverfn.addstmts([StmtDecl(Decl(md.returns[0].bareType(self.side), - md.returns[0].var().name), - init=fwdparam)]) - resolverfn.addstmts([StmtDecl(Decl(Type('IPC::Message', ptr=True), - self.replyvar.name), - init=ExprCall(ExprVar(md.pqReplyCtorFunc()), - args=[routingId]))] - + [_ParamTraits.checkedWrite(None, resolve, self.replyvar, - sentinelKey=resolve.name, actor=selfvar)] - + [_ParamTraits.checkedWrite(r.ipdltype, ExprMove(r.var()), self.replyvar, - sentinelKey=r.name, actor=selfvar) - for r in md.returns]) + resolverfn.addstmts( + [ + StmtDecl( + Decl( + md.returns[0].bareType(self.side), md.returns[0].var().name + ), + init=fwdparam, + ) + ] + ) + resolverfn.addstmts( + [ + StmtDecl( + Decl(Type("IPC::Message", ptr=True), self.replyvar.name), + init=ExprCall(ExprVar(md.pqReplyCtorFunc()), args=[routingId]), + ) + ] + + [ + _ParamTraits.checkedWrite( + None, + resolve, + self.replyvar, + sentinelKey=resolve.name, + actor=selfvar, + ) + ] + + [ + _ParamTraits.checkedWrite( + r.ipdltype, + ExprMove(r.var()), + self.replyvar, + sentinelKey=r.name, + actor=selfvar, + ) + for r in md.returns + ] + ) resolverfn.addstmts(sendmsg) - makeresolver = [Whitespace.NL, - StmtDecl(Decl(Type.INT32, seqno.name), - init=ExprCall(ExprSelect(self.msgvar, '.', 'seqno'))), - StmtDecl(Decl(Type('WeakPtr', T=ExprVar(self.clsname)), - selfvar.name), - init=ExprVar.THIS), - StmtDecl(Decl(resolvertype, 'resolver'), - init=resolverfn)] + makeresolver = [ + Whitespace.NL, + StmtDecl( + Decl(Type.INT32, seqno.name), + init=ExprCall(ExprSelect(self.msgvar, ".", "seqno")), + ), + StmtDecl( + Decl(Type("WeakPtr", T=ExprVar(self.clsname)), selfvar.name), + init=ExprVar.THIS, + ), + StmtDecl(Decl(resolvertype, "resolver"), init=resolverfn), + ] return makeresolver def makeReply(self, md, errfn, routingId): @@ -4570,14 +5166,28 @@ def makeReply(self, md, errfn, routingId): replyvar = self.replyvar return ( - [StmtExpr(ExprAssn( - replyvar, ExprCall(ExprVar(md.pqReplyCtorFunc()), args=[routingId]))), - Whitespace.NL] - + [_ParamTraits.checkedWrite(r.ipdltype, r.var(), replyvar, - sentinelKey=r.name, actor=ExprVar.THIS) - for r in md.returns] + [ + StmtExpr( + ExprAssn( + replyvar, + ExprCall(ExprVar(md.pqReplyCtorFunc()), args=[routingId]), + ) + ), + Whitespace.NL, + ] + + [ + _ParamTraits.checkedWrite( + r.ipdltype, + r.var(), + replyvar, + sentinelKey=r.name, + actor=ExprVar.THIS, + ) + for r in md.returns + ] + self.setMessageFlags(md, replyvar) - + [self.logMessage(md, replyvar, 'Sending reply ')]) + + [self.logMessage(md, replyvar, "Sending reply ")] + ) def genVerifyMessage(self, verify, params, errfn, msgsrcVar): stmts = [] @@ -4586,37 +5196,50 @@ def genVerifyMessage(self, verify, params, errfn, msgsrcVar): if len(params) == 0: return stmts - msgvar = ExprVar('msgverify__') + msgvar = ExprVar("msgverify__") side = self.side msgexpr = ExprAddrOf(msgvar) - itervar = ExprVar('msgverifyIter__') + itervar = ExprVar("msgverifyIter__") # IPC::Message msgverify__ = Move(*(reply__)); or # IPC::Message msgverify__ = Move(*(msg__)); - stmts.append(StmtDecl(Decl(Type('IPC::Message', ptr=False), 'msgverify__'), - init=ExprMove(ExprDeref(msgsrcVar)))) - - stmts.extend(( - # PickleIterator msgverifyIter__(msgverify__); - [StmtDecl(Decl(_iterType(ptr=False), itervar.name), - initargs=[msgvar])] - # declare varCopy for each variable to deserialize. - + [StmtDecl(Decl(p.bareType(side), p.var().name + 'Copy'), initargs=[]) - for p in params] - + [Whitespace.NL] - # checked Read(&(varCopy), &(msgverify__), &(msgverifyIter__)) - + [_ParamTraits.checkedRead(p.ipdltype, - ExprAddrOf(ExprVar(p.var().name + 'Copy')), - msgexpr, ExprAddrOf(itervar), - errfn, p.ipdltype.name(), - sentinelKey=p.name, - errfnSentinel=errfnSentinel(), - actor=ExprVar.THIS) - for p in params] - + [self.endRead(msgvar, itervar)] - # Move the message back to its source before sending. - + [StmtExpr(ExprAssn(ExprDeref(msgsrcVar), ExprMove(msgvar)))] - )) + stmts.append( + StmtDecl( + Decl(Type("IPC::Message", ptr=False), "msgverify__"), + init=ExprMove(ExprDeref(msgsrcVar)), + ) + ) + + stmts.extend( + ( + # PickleIterator msgverifyIter__(msgverify__); + [StmtDecl(Decl(_iterType(ptr=False), itervar.name), initargs=[msgvar])] + # declare varCopy for each variable to deserialize. + + [ + StmtDecl(Decl(p.bareType(side), p.var().name + "Copy"), initargs=[]) + for p in params + ] + + [Whitespace.NL] + # checked Read(&(varCopy), &(msgverify__), &(msgverifyIter__)) + + [ + _ParamTraits.checkedRead( + p.ipdltype, + ExprAddrOf(ExprVar(p.var().name + "Copy")), + msgexpr, + ExprAddrOf(itervar), + errfn, + p.ipdltype.name(), + sentinelKey=p.name, + errfnSentinel=errfnSentinel(), + actor=ExprVar.THIS, + ) + for p in params + ] + + [self.endRead(msgvar, itervar)] + # Move the message back to its source before sending. + + [StmtExpr(ExprAssn(ExprDeref(msgsrcVar), ExprMove(msgvar)))] + ) + ) return stmts @@ -4624,9 +5247,9 @@ def setMessageFlags(self, md, var, seqno=None): stmts = [] if seqno: - stmts.append(StmtExpr(ExprCall( - ExprSelect(var, '->', 'set_seqno'), - args=[seqno]))) + stmts.append( + StmtExpr(ExprCall(ExprSelect(var, "->", "set_seqno"), args=[seqno])) + ) return stmts + [Whitespace.NL] @@ -4635,12 +5258,11 @@ def deserializeMessage(self, md, side, errfn, errfnSent): itervar = self.itervar msgexpr = ExprAddrOf(msgvar) isctor = md.decl.type.isCtor() - stmts = ([ - self.logMessage(md, msgexpr, 'Received ', - receiving=True), + stmts = [ + self.logMessage(md, msgexpr, "Received ", receiving=True), self.profilerLabel(md), - Whitespace.NL - ]) + Whitespace.NL, + ] if 0 == len(md.params): return stmts @@ -4650,36 +5272,69 @@ def deserializeMessage(self, md, side, errfn, errfnSent): # return the raw actor handle so that its ID can be used # to construct the "real" actor handlevar = self.handlevar - handletype = Type('ActorHandle') + handletype = Type("ActorHandle") decls = [StmtDecl(Decl(handletype, handlevar.name), initargs=[])] - reads = [_ParamTraits.checkedRead(None, ExprAddrOf(handlevar), msgexpr, - ExprAddrOf(self.itervar), - errfn, "'%s'" % handletype.name, - sentinelKey='actor', errfnSentinel=errfnSent, - actor=ExprVar.THIS)] + reads = [ + _ParamTraits.checkedRead( + None, + ExprAddrOf(handlevar), + msgexpr, + ExprAddrOf(self.itervar), + errfn, + "'%s'" % handletype.name, + sentinelKey="actor", + errfnSentinel=errfnSent, + actor=ExprVar.THIS, + ) + ] start = 1 - decls.extend([StmtDecl(Decl( - (Type('Tainted', T=p.bareType(side)) - if md.decl.type.tainted else - p.bareType(side)), - p.var().name), initargs=[]) - for p in md.params[start:]]) - reads.extend([_ParamTraits.checkedRead(p.ipdltype, - ExprAddrOf(p.var()), - msgexpr, ExprAddrOf(itervar), - errfn, "'%s'" % p.ipdltype.name(), - sentinelKey=p.name, errfnSentinel=errfnSent, - actor=ExprVar.THIS) - for p in md.params[start:]]) - - stmts.extend(( - [StmtDecl(Decl(_iterType(ptr=False), self.itervar.name), - initargs=[msgvar])] - + decls - + [Whitespace.NL] - + reads - + [self.endRead(msgvar, itervar)])) + decls.extend( + [ + StmtDecl( + Decl( + ( + Type("Tainted", T=p.bareType(side)) + if md.decl.type.tainted + else p.bareType(side) + ), + p.var().name, + ), + initargs=[], + ) + for p in md.params[start:] + ] + ) + reads.extend( + [ + _ParamTraits.checkedRead( + p.ipdltype, + ExprAddrOf(p.var()), + msgexpr, + ExprAddrOf(itervar), + errfn, + "'%s'" % p.ipdltype.name(), + sentinelKey=p.name, + errfnSentinel=errfnSent, + actor=ExprVar.THIS, + ) + for p in md.params[start:] + ] + ) + + stmts.extend( + ( + [ + StmtDecl( + Decl(_iterType(ptr=False), self.itervar.name), initargs=[msgvar] + ) + ] + + decls + + [Whitespace.NL] + + reads + + [self.endRead(msgvar, itervar)] + ) + ) return stmts @@ -4688,139 +5343,213 @@ def deserializeAsyncReply(self, md, side, errfn, errfnSent): itervar = self.itervar msgexpr = ExprAddrOf(msgvar) isctor = md.decl.type.isCtor() - resolve = ExprVar('resolve__') - reason = ExprVar('reason__') - desresolve = [StmtDecl(Decl(Type.BOOL, resolve.name), init=ExprLiteral.FALSE), - _ParamTraits.checkedRead(None, ExprAddrOf(resolve), msgexpr, - ExprAddrOf(itervar), - errfn, "'%s'" % resolve.name, - sentinelKey=resolve.name, errfnSentinel=errfnSent, - actor=ExprVar.THIS)] - desrej = [StmtDecl(Decl(_ResponseRejectReason.Type(), reason.name), initargs=[]), - _ParamTraits.checkedRead(None, ExprAddrOf(reason), msgexpr, - ExprAddrOf(itervar), - errfn, "'%s'" % reason.name, - sentinelKey=reason.name, errfnSentinel=errfnSent, - actor=ExprVar.THIS), - self.endRead(msgvar, itervar)] - prologue = ([ - self.logMessage(md, msgexpr, 'Received ', - receiving=True), + resolve = ExprVar("resolve__") + reason = ExprVar("reason__") + desresolve = [ + StmtDecl(Decl(Type.BOOL, resolve.name), init=ExprLiteral.FALSE), + _ParamTraits.checkedRead( + None, + ExprAddrOf(resolve), + msgexpr, + ExprAddrOf(itervar), + errfn, + "'%s'" % resolve.name, + sentinelKey=resolve.name, + errfnSentinel=errfnSent, + actor=ExprVar.THIS, + ), + ] + desrej = [ + StmtDecl(Decl(_ResponseRejectReason.Type(), reason.name), initargs=[]), + _ParamTraits.checkedRead( + None, + ExprAddrOf(reason), + msgexpr, + ExprAddrOf(itervar), + errfn, + "'%s'" % reason.name, + sentinelKey=reason.name, + errfnSentinel=errfnSent, + actor=ExprVar.THIS, + ), + self.endRead(msgvar, itervar), + ] + prologue = [ + self.logMessage(md, msgexpr, "Received ", receiving=True), self.profilerLabel(md), - Whitespace.NL - ]) + Whitespace.NL, + ] if not md.returns: return prologue - prologue.extend([StmtDecl(Decl(_iterType(ptr=False), itervar.name), - initargs=[msgvar])] - + desresolve) + prologue.extend( + [StmtDecl(Decl(_iterType(ptr=False), itervar.name), initargs=[msgvar])] + + desresolve + ) start, decls, reads = 0, [], [] if isctor: # return the raw actor handle so that its ID can be used # to construct the "real" actor handlevar = self.handlevar - handletype = Type('ActorHandle') + handletype = Type("ActorHandle") decls = [StmtDecl(Decl(handletype, handlevar.name), initargs=[])] - reads = [_ParamTraits.checkedRead(None, ExprAddrOf(handlevar), msgexpr, - ExprAddrOf(itervar), - errfn, "'%s'" % handletype.name, - sentinelKey='actor', errfnSentinel=errfnSent, - actor=ExprVar.THIS)] + reads = [ + _ParamTraits.checkedRead( + None, + ExprAddrOf(handlevar), + msgexpr, + ExprAddrOf(itervar), + errfn, + "'%s'" % handletype.name, + sentinelKey="actor", + errfnSentinel=errfnSent, + actor=ExprVar.THIS, + ) + ] start = 1 stmts = ( - decls + [StmtDecl(Decl(p.bareType(side), p.var().name), initargs=[]) - for p in md.returns] + decls + + [ + StmtDecl(Decl(p.bareType(side), p.var().name), initargs=[]) + for p in md.returns + ] + [Whitespace.NL] - + reads + [_ParamTraits.checkedRead(p.ipdltype, ExprAddrOf(p.var()), - msgexpr, ExprAddrOf(itervar), - errfn, "'%s'" % p.ipdltype.name(), - sentinelKey=p.name, errfnSentinel=errfnSent, - actor=ExprVar.THIS) - for p in md.returns[start:]] - + [self.endRead(msgvar, itervar)]) + + reads + + [ + _ParamTraits.checkedRead( + p.ipdltype, + ExprAddrOf(p.var()), + msgexpr, + ExprAddrOf(itervar), + errfn, + "'%s'" % p.ipdltype.name(), + sentinelKey=p.name, + errfnSentinel=errfnSent, + actor=ExprVar.THIS, + ) + for p in md.returns[start:] + ] + + [self.endRead(msgvar, itervar)] + ) return resolve, reason, prologue, desrej, stmts - def deserializeReply(self, md, replyexpr, side, errfn, errfnSentinel, actor=None, decls=False): - stmts = [Whitespace.NL, - self.logMessage(md, replyexpr, - 'Received reply ', actor, receiving=True)] + def deserializeReply( + self, md, replyexpr, side, errfn, errfnSentinel, actor=None, decls=False + ): + stmts = [ + Whitespace.NL, + self.logMessage(md, replyexpr, "Received reply ", actor, receiving=True), + ] if 0 == len(md.returns): return stmts itervar = self.itervar declstmts = [] if decls: - declstmts = [StmtDecl(Decl(p.bareType(side), p.var().name), initargs=[]) - for p in md.returns] + declstmts = [ + StmtDecl(Decl(p.bareType(side), p.var().name), initargs=[]) + for p in md.returns + ] stmts.extend( - [Whitespace.NL, - StmtDecl(Decl(_iterType(ptr=False), itervar.name), - initargs=[self.replyvar])] + [ + Whitespace.NL, + StmtDecl( + Decl(_iterType(ptr=False), itervar.name), initargs=[self.replyvar] + ), + ] + declstmts + [Whitespace.NL] - + [_ParamTraits.checkedRead(r.ipdltype, r.var(), - ExprAddrOf(self.replyvar), - ExprAddrOf(self.itervar), - errfn, "'%s'" % r.ipdltype.name(), - sentinelKey=r.name, errfnSentinel=errfnSentinel, - actor=ExprVar.THIS) - for r in md.returns] - + [self.endRead(self.replyvar, itervar)]) + + [ + _ParamTraits.checkedRead( + r.ipdltype, + r.var(), + ExprAddrOf(self.replyvar), + ExprAddrOf(self.itervar), + errfn, + "'%s'" % r.ipdltype.name(), + sentinelKey=r.name, + errfnSentinel=errfnSentinel, + actor=ExprVar.THIS, + ) + for r in md.returns + ] + + [self.endRead(self.replyvar, itervar)] + ) return stmts def sendAsync(self, md, msgexpr, actor=None): - sendok = ExprVar('sendok__') - resolvefn = ExprVar('aResolve') - rejectfn = ExprVar('aReject') + sendok = ExprVar("sendok__") + resolvefn = ExprVar("aResolve") + rejectfn = ExprVar("aReject") - stmts = [Whitespace.NL, - self.logMessage(md, msgexpr, 'Sending ', actor), - self.profilerLabel(md)] + stmts = [ + Whitespace.NL, + self.logMessage(md, msgexpr, "Sending ", actor), + self.profilerLabel(md), + ] stmts.append(Whitespace.NL) # Generate the actual call expression. - send = ExprVar('ChannelSend') + send = ExprVar("ChannelSend") if actor is not None: - send = ExprSelect(actor, '->', send.name) + send = ExprSelect(actor, "->", send.name) if md.returns: - stmts.append(StmtExpr(ExprCall(send, args=[msgexpr, - ExprMove(resolvefn), - ExprMove(rejectfn)]))) + stmts.append( + StmtExpr( + ExprCall( + send, args=[msgexpr, ExprMove(resolvefn), ExprMove(rejectfn)] + ) + ) + ) retvar = None else: - stmts.append(StmtDecl(Decl(Type.BOOL, sendok.name), - init=ExprCall(send, args=[msgexpr]))) + stmts.append( + StmtDecl( + Decl(Type.BOOL, sendok.name), init=ExprCall(send, args=[msgexpr]) + ) + ) retvar = sendok return (retvar, stmts) def sendBlocking(self, md, msgexpr, replyexpr, actor=None): - send = ExprVar('ChannelSend') + send = ExprVar("ChannelSend") if md.decl.type.isInterrupt(): - send = ExprVar('ChannelCall') + send = ExprVar("ChannelCall") if actor is not None: - send = ExprSelect(actor, '->', send.name) + send = ExprSelect(actor, "->", send.name) - sendok = ExprVar('sendok__') + sendok = ExprVar("sendok__") return ( sendok, - ([Whitespace.NL, - self.logMessage(md, msgexpr, 'Sending ', actor), - self.profilerLabel(md)] - + [Whitespace.NL, - StmtDecl(Decl(Type.BOOL, sendok.name), init=ExprLiteral.FALSE), - StmtBlock([ - StmtExpr(ExprAssn(sendok, ExprCall( - send, args=[msgexpr, ExprAddrOf(replyexpr)] - ))), - ]) - ]) + ( + [ + Whitespace.NL, + self.logMessage(md, msgexpr, "Sending ", actor), + self.profilerLabel(md), + ] + + [ + Whitespace.NL, + StmtDecl(Decl(Type.BOOL, sendok.name), init=ExprLiteral.FALSE), + StmtBlock( + [ + StmtExpr( + ExprAssn( + sendok, + ExprCall( + send, args=[msgexpr, ExprAddrOf(replyexpr)] + ), + ) + ), + ] + ), + ] + ), ) def sendAsyncWithPromise(self, md): @@ -4833,31 +5562,34 @@ def sendAsyncWithPromise(self, md): resolvetype = md.returns[0].bareType(self.side) resolve = ExprCode( - ''' + """ [promise__](${resolvetype}&& aValue) { promise__->Resolve(std::move(aValue), __func__); } - ''', - resolvetype=resolvetype) + """, + resolvetype=resolvetype, + ) reject = ExprCode( - ''' + """ [promise__](ResponseRejectReason&& aReason) { promise__->Reject(std::move(aReason), __func__); } - ''', - resolvetype=resolvetype) + """, + resolvetype=resolvetype, + ) args = [ExprMove(p.var()) for p in md.params] + [resolve, reject] stmt = StmtCode( - ''' + """ RefPtr<${promise}> promise__ = new ${promise}(__func__); promise__->UseDirectTaskDispatch(__func__); ${send}($,{args}); return promise__; - ''', + """, promise=promise, send=md.sendMethod(), - args=args) + args=args, + ) return [stmt] def callAllocActor(self, md, retsems, side): @@ -4868,30 +5600,37 @@ def callAllocActor(self, md, retsems, side): callalloc = self.thisCall( _allocMethod(md.decl.type.constructedType(), side), - args=md.makeCxxArgs(retsems=retsems, retcallsems='out', - implicit=False)) + args=md.makeCxxArgs(retsems=retsems, retcallsems="out", implicit=False), + ) - return StmtDecl(Decl(actortype, md.actorDecl().var().name), - init=callalloc) + return StmtDecl(Decl(actortype, md.actorDecl().var().name), init=callalloc) def invokeRecvHandler(self, md, implicit=True): - retsems = 'in' + retsems = "in" if md.decl.type.isAsync() and md.returns: - retsems = 'resolver' - failif = StmtIf(ExprNot(self.thisCall( - md.recvMethod(), - md.makeCxxArgs( - paramsems='move', - retsems=retsems, - retcallsems='out', - implicit=implicit + retsems = "resolver" + failif = StmtIf( + ExprNot( + self.thisCall( + md.recvMethod(), + md.makeCxxArgs( + paramsems="move", + retsems=retsems, + retcallsems="out", + implicit=implicit, + ), + ) ) - ))) - failif.addifstmts([ - _protocolErrorBreakpoint('Handler returned error code!'), - Whitespace('// Error handled in mozilla::ipc::IPCResult\n', indent=True), - StmtReturn(_Result.ProcessingError) - ]) + ) + failif.addifstmts( + [ + _protocolErrorBreakpoint("Handler returned error code!"), + Whitespace( + "// Error handled in mozilla::ipc::IPCResult\n", indent=True + ), + StmtReturn(_Result.ProcessingError), + ] + ) return [failif] def makeDtorMethodDecl(self, md): @@ -4899,26 +5638,34 @@ def makeDtorMethodDecl(self, md): decl.methodspec = MethodSpec.STATIC return decl - def makeSendMethodDecl(self, md, promise=False, paramsems='in'): + def makeSendMethodDecl(self, md, promise=False, paramsems="in"): implicit = md.decl.type.hasImplicitActorParam() if md.decl.type.isAsync() and md.returns: if promise: - returnsems = 'promise' + returnsems = "promise" rettype = _refptr(Type(md.promiseName())) else: - returnsems = 'callback' + returnsems = "callback" rettype = Type.VOID else: assert not promise - returnsems = 'out' + returnsems = "out" rettype = Type.BOOL decl = MethodDecl( md.sendMethod(), - params=md.makeCxxParams(paramsems, returnsems=returnsems, - side=self.side, implicit=implicit, direction='send'), - warn_unused=((self.side == 'parent' and returnsems != 'callback') or - (md.decl.type.isCtor() and not md.decl.type.isAsync())), - ret=rettype) + params=md.makeCxxParams( + paramsems, + returnsems=returnsems, + side=self.side, + implicit=implicit, + direction="send", + ), + warn_unused=( + (self.side == "parent" and returnsems != "callback") + or (md.decl.type.isCtor() and not md.decl.type.isAsync()) + ), + ret=rettype, + ) if md.decl.type.isCtor(): decl.ret = md.actorDecl().bareType(self.side) return decl @@ -4926,7 +5673,7 @@ def makeSendMethodDecl(self, md, promise=False, paramsems='in'): def logMessage(self, md, msgptr, pfx, actor=None, receiving=False): actorname = _actorName(self.protocol.name, self.side) return StmtCode( - ''' + """ if (mozilla::ipc::LoggingEnabledFor(${actorname})) { mozilla::ipc::LogMessageForProtocol( ${actorname}, @@ -4935,44 +5682,48 @@ def logMessage(self, md, msgptr, pfx, actor=None, receiving=False): ${msgptr}->type(), mozilla::ipc::MessageDirection::${direction}); } - ''', + """, actorname=ExprLiteral.String(actorname), otherpid=self.protocol.callOtherPid(actor), pfx=ExprLiteral.String(pfx), msgptr=msgptr, - direction='eReceiving' if receiving else 'eSending') + direction="eReceiving" if receiving else "eSending", + ) def profilerLabel(self, md): return StmtCode( - ''' + """ AUTO_PROFILER_LABEL("${name}::${msgname}", OTHER); - ''', + """, name=self.protocol.name, - msgname=md.prettyMsgName()) + msgname=md.prettyMsgName(), + ) def saveActorId(self, md): - idvar = ExprVar('id__') + idvar = ExprVar("id__") if md.decl.type.hasReply(): # only save the ID if we're actually going to use it, to # avoid unused-variable warnings - saveIdStmts = [StmtDecl(Decl(_actorIdType(), idvar.name), - self.protocol.routingId())] + saveIdStmts = [ + StmtDecl(Decl(_actorIdType(), idvar.name), self.protocol.routingId()) + ] else: saveIdStmts = [] return idvar, saveIdStmts def endRead(self, msgexpr, iterexpr): return StmtCode( - ''' + """ ${msg}.EndRead(${iter}, ${msg}.type()); - ''', + """, msg=msgexpr, - iter=iterexpr) + iter=iterexpr, + ) class _GenerateProtocolParentCode(_GenerateProtocolActorCode): def __init__(self): - _GenerateProtocolActorCode.__init__(self, 'parent') + _GenerateProtocolActorCode.__init__(self, "parent") def sendsMessage(self, md): return not md.decl.type.isIn() @@ -4983,7 +5734,7 @@ def receivesMessage(self, md): class _GenerateProtocolChildCode(_GenerateProtocolActorCode): def __init__(self): - _GenerateProtocolActorCode.__init__(self, 'child') + _GenerateProtocolActorCode.__init__(self, "child") def sendsMessage(self, md): return not md.decl.type.isOut() @@ -4996,10 +5747,11 @@ def receivesMessage(self, md): # Utility passes ## + def _splitClassDeclDefn(cls): """Destructively split |cls| methods into declarations and -definitions (if |not methodDecl.force_inline|). Return classDecl, -methodDefns.""" + definitions (if |not methodDecl.force_inline|). Return classDecl, + methodDefns.""" defns = Block() for i, stmt in enumerate(cls.stmts): diff --git a/ipc/ipdl/ipdl/parser.py b/ipc/ipdl/ipdl/parser.py index e625c40456fe4a..9ffe9e5bed0c4f 100644 --- a/ipc/ipdl/ipdl/parser.py +++ b/ipc/ipdl/ipdl/parser.py @@ -13,18 +13,19 @@ class ParseError(Exception): def __init__(self, loc, fmt, *args): self.loc = loc - self.error = ('%s%s: error: %s' % ( - Parser.includeStackString(), loc, fmt)) % args + self.error = ( + "%s%s: error: %s" % (Parser.includeStackString(), loc, fmt) + ) % args def __str__(self): return self.error def _safeLinenoValue(t): - lineno, value = 0, '???' - if hasattr(t, 'lineno'): + lineno, value = 0, "???" + if hasattr(t, "lineno"): lineno = t.lineno - if hasattr(t, 'value'): + if hasattr(t, "value"): value = t.value return lineno, value @@ -49,7 +50,7 @@ def __init__(self, type, name, debug=False): self.debug = debug self.filename = None self.includedirs = None - self.loc = None # not always up to date + self.loc = None # not always up to date self.lexer = None self.parser = None self.tu = TranslationUnit(type, name) @@ -61,10 +62,11 @@ def parse(self, input, filename, includedirs): if self.tu.name in Parser.parsed: priorTU = Parser.parsed[self.tu.name].tu if priorTU.filename != filename: - _error(Loc(filename), - "Trying to load `%s' from a file when we'd already seen it in file `%s'" % ( - self.tu.name, - priorTU.filename)) + _error( + Loc(filename), + "Trying to load `%s' from a file when we'd already seen it in file `%s'" + % (self.tu.name, priorTU.filename), + ) return priorTU @@ -79,17 +81,16 @@ def parse(self, input, filename, includedirs): Parser.current = self try: - ast = self.parser.parse(input=input, lexer=self.lexer, - debug=self.debug) + ast = self.parser.parse(input=input, lexer=self.lexer, debug=self.debug) finally: Parser.current = Parser.parseStack.pop() return ast def resolveIncludePath(self, filepath): - '''Return the absolute path from which the possibly partial -|filepath| should be read, or |None| if |filepath| cannot be located.''' - for incdir in self.includedirs + ['']: + """Return the absolute path from which the possibly partial + |filepath| should be read, or |None| if |filepath| cannot be located.""" + for incdir in self.includedirs + [""]: realpath = os.path.join(incdir, filepath) if os.path.isfile(realpath): return os.path.abspath(realpath) @@ -102,10 +103,12 @@ def resolveIncludePath(self, filepath): # which can be printed above a proper error message or warning @staticmethod def includeStackString(): - s = '' + s = "" for parse in Parser.parseStack[1:]: s += " in file included from `%s', line %d:\n" % ( - parse.loc.filename, parse.loc.lineno) + parse.loc.filename, + parse.loc.lineno, + ) return s @@ -115,63 +118,68 @@ def locFromTok(p, num): # ----------------------------------------------------------------------------- -reserved = set(( - 'async', - 'both', - 'child', - 'class', - 'comparable', - 'compress', - 'compressall', - 'from', - 'include', - 'intr', - 'manager', - 'manages', - 'namespace', - 'nested', - 'nullable', - 'or', - 'parent', - 'prio', - 'protocol', - 'refcounted', - 'moveonly', - 'returns', - 'struct', - 'sync', - 'tainted', - 'union', - 'UniquePtr', - 'upto', - 'using', - 'verify')) +reserved = set( + ( + "async", + "both", + "child", + "class", + "comparable", + "compress", + "compressall", + "from", + "include", + "intr", + "manager", + "manages", + "namespace", + "nested", + "nullable", + "or", + "parent", + "prio", + "protocol", + "refcounted", + "moveonly", + "returns", + "struct", + "sync", + "tainted", + "union", + "UniquePtr", + "upto", + "using", + "verify", + ) +) tokens = [ - 'COLONCOLON', 'ID', 'STRING', + "COLONCOLON", + "ID", + "STRING", ] + [r.upper() for r in reserved] -t_COLONCOLON = '::' +t_COLONCOLON = "::" -literals = '(){}[]<>;:,?' -t_ignore = ' \f\t\v' +literals = "(){}[]<>;:,?" +t_ignore = " \f\t\v" def t_linecomment(t): - r'//[^\n]*' + r"//[^\n]*" def t_multilinecomment(t): - r'/\*(\n|.)*?\*/' - t.lexer.lineno += t.value.count('\n') + r"/\*(\n|.)*?\*/" + t.lexer.lineno += t.value.count("\n") def t_NL(t): - r'(?:\r\n|\n|\n)+' + r"(?:\r\n|\n|\n)+" t.lexer.lineno += len(t.value) def t_ID(t): - r'[a-zA-Z_][a-zA-Z0-9_]*' + r"[a-zA-Z_][a-zA-Z0-9_]*" if t.value in reserved: t.type = t.value.upper() return t @@ -184,8 +192,12 @@ def t_STRING(t): def t_error(t): - _error(Loc(Parser.current.filename, t.lineno), - 'lexically invalid characters `%s', t.value) + _error( + Loc(Parser.current.filename, t.lineno), + "lexically invalid characters `%s", + t.value, + ) + # ----------------------------------------------------------------------------- @@ -214,16 +226,16 @@ def p_TranslationUnit(p): _error(thing.loc, "only one protocol definition per file") tu.protocol = thing else: - assert(0) + assert 0 # The "canonical" namespace of the tu, what it's considered to be # in for the purposes of C++: |#include "foo/bar/TU.h"| if tu.protocol: - assert tu.filetype == 'protocol' + assert tu.filetype == "protocol" tu.namespaces = tu.protocol.namespaces tu.name = tu.protocol.name else: - assert tu.filetype == 'header' + assert tu.filetype == "header" # There's not really a canonical "thing" in headers. So # somewhat arbitrarily use the namespace of the last # interesting thing that was declared. @@ -233,13 +245,14 @@ def p_TranslationUnit(p): p[0] = tu + # -------------------- # Preamble def p_Preamble(p): """Preamble : Preamble PreambleStmt ';' - |""" + |""" if 1 == len(p): p[0] = [] else: @@ -249,8 +262,8 @@ def p_Preamble(p): def p_PreambleStmt(p): """PreambleStmt : CxxIncludeStmt - | IncludeStmt - | UsingStmt""" + | IncludeStmt + | UsingStmt""" p[0] = p[1] @@ -261,22 +274,21 @@ def p_CxxIncludeStmt(p): def p_IncludeStmt(p): """IncludeStmt : INCLUDE PROTOCOL ID - | INCLUDE ID""" + | INCLUDE ID""" loc = locFromTok(p, 1) Parser.current.loc = loc if 4 == len(p): id = p[3] - type = 'protocol' + type = "protocol" else: id = p[2] - type = 'header' + type = "header" inc = Include(loc, type, id) path = Parser.current.resolveIncludePath(inc.file) if path is None: - raise ParseError(loc, "can't locate include file `%s'" % ( - inc.file)) + raise ParseError(loc, "can't locate include file `%s'" % (inc.file)) inc.tu = Parser(type, id).parse(open(path).read(), path, Parser.current.includedirs) p[0] = inc @@ -284,37 +296,40 @@ def p_IncludeStmt(p): def p_UsingKind(p): """UsingKind : CLASS - | STRUCT - |""" + | STRUCT + |""" p[0] = p[1] if 2 == len(p) else None def p_MaybeComparable(p): """MaybeComparable : COMPARABLE - |""" + |""" p[0] = 2 == len(p) def p_MaybeRefcounted(p): """MaybeRefcounted : REFCOUNTED - |""" + |""" p[0] = 2 == len(p) def p_MaybeMoveOnly(p): """MaybeMoveOnly : MOVEONLY - |""" + |""" p[0] = 2 == len(p) def p_UsingStmt(p): """UsingStmt : USING MaybeRefcounted MaybeMoveOnly UsingKind CxxType FROM STRING""" - p[0] = UsingStmt(locFromTok(p, 1), - refcounted=p[2], - moveonly=p[3], - kind=p[4], - cxxTypeSpec=p[5], - cxxHeader=p[7]) + p[0] = UsingStmt( + locFromTok(p, 1), + refcounted=p[2], + moveonly=p[3], + kind=p[4], + cxxTypeSpec=p[5], + cxxHeader=p[7], + ) + # -------------------- # Namespaced stuff @@ -322,7 +337,7 @@ def p_UsingStmt(p): def p_NamespacedStuff(p): """NamespacedStuff : NamespacedStuff NamespaceThing - | NamespaceThing""" + | NamespaceThing""" if 2 == len(p): p[0] = p[1] else: @@ -332,9 +347,9 @@ def p_NamespacedStuff(p): def p_NamespaceThing(p): """NamespaceThing : NAMESPACE ID '{' NamespacedStuff '}' - | StructDecl - | UnionDecl - | ProtocolDefn""" + | StructDecl + | UnionDecl + | ProtocolDefn""" if 2 == len(p): p[0] = [p[1]] else: @@ -345,7 +360,7 @@ def p_NamespaceThing(p): def p_StructDecl(p): """StructDecl : MaybeComparable STRUCT ID '{' StructFields '}' ';' - | MaybeComparable STRUCT ID '{' '}' ';'""" + | MaybeComparable STRUCT ID '{' '}' ';'""" if 8 == len(p): p[0] = StructDecl(locFromTok(p, 2), p[3], p[5], p[1]) else: @@ -354,7 +369,7 @@ def p_StructDecl(p): def p_StructFields(p): """StructFields : StructFields StructField ';' - | StructField ';'""" + | StructField ';'""" if 3 == len(p): p[0] = [p[1]] else: @@ -374,7 +389,7 @@ def p_UnionDecl(p): def p_ComponentTypes(p): """ComponentTypes : ComponentTypes Type ';' - | Type ';'""" + | Type ';'""" if 3 == len(p): p[0] = [p[1]] else: @@ -393,9 +408,11 @@ def p_ProtocolDefn(p): protocol.refcounted = p[2] p[0] = protocol - if Parser.current.type == 'header': - _error(protocol.loc, - 'can\'t define a protocol in a header. Do it in a protocol spec instead.') + if Parser.current.type == "header": + _error( + protocol.loc, + "can't define a protocol in a header. Do it in a protocol spec instead.", + ) def p_ProtocolBody(p): @@ -406,9 +423,10 @@ def p_ProtocolBody(p): # -------------------- # manager/manages stmts + def p_ManagersStmtOpt(p): """ManagersStmtOpt : ManagersStmt ManagesStmtsOpt - | ManagesStmtsOpt""" + | ManagesStmtsOpt""" if 2 == len(p): p[0] = p[1] else: @@ -426,7 +444,7 @@ def p_ManagersStmt(p): def p_ManagerList(p): """ManagerList : ID - | ManagerList OR ID""" + | ManagerList OR ID""" if 2 == len(p): p[0] = [Manager(locFromTok(p, 1), p[1])] else: @@ -436,7 +454,7 @@ def p_ManagerList(p): def p_ManagesStmtsOpt(p): """ManagesStmtsOpt : ManagesStmt ManagesStmtsOpt - | MessageDeclsOpt""" + | MessageDeclsOpt""" if 2 == len(p): p[0] = p[1] else: @@ -452,9 +470,10 @@ def p_ManagesStmt(p): # -------------------- # Message decls + def p_MessageDeclsOpt(p): """MessageDeclsOpt : MessageDeclThing MessageDeclsOpt - | """ + |""" if 1 == len(p): # we fill in |loc| in the Protocol rule p[0] = Protocol(None) @@ -465,7 +484,7 @@ def p_MessageDeclsOpt(p): def p_MessageDeclThing(p): """MessageDeclThing : MessageDirectionLabel ':' MessageDecl ';' - | MessageDecl ';'""" + | MessageDecl ';'""" if 3 == len(p): p[0] = p[1] else: @@ -474,13 +493,13 @@ def p_MessageDeclThing(p): def p_MessageDirectionLabel(p): """MessageDirectionLabel : PARENT - | CHILD - | BOTH""" - if p[1] == 'parent': + | CHILD + | BOTH""" + if p[1] == "parent": Parser.current.direction = IN - elif p[1] == 'child': + elif p[1] == "child": Parser.current.direction = OUT - elif p[1] == 'both': + elif p[1] == "both": Parser.current.direction = INOUT else: assert 0 @@ -494,7 +513,7 @@ def p_MessageDecl(p): msg.sendSemantics = p[1][2] if Parser.current.direction is None: - _error(msg.loc, 'missing message direction') + _error(msg.loc, "missing message direction") msg.direction = Parser.current.direction p[0] = msg @@ -520,7 +539,7 @@ def p_MessageInParams(p): def p_MessageOutParams(p): """MessageOutParams : RETURNS '(' ParamList ')' - | """ + |""" if 1 == len(p): p[0] = [] else: @@ -529,8 +548,8 @@ def p_MessageOutParams(p): def p_OptionalMessageModifiers(p): """OptionalMessageModifiers : OptionalMessageModifiers MessageModifier - | MessageModifier - | """ + | MessageModifier + |""" if 1 == len(p): p[0] = [] elif 2 == len(p): @@ -541,9 +560,9 @@ def p_OptionalMessageModifiers(p): def p_MessageModifier(p): - """ MessageModifier : MessageVerify - | MessageCompress - | MessageTainted """ + """MessageModifier : MessageVerify + | MessageCompress + | MessageTainted""" p[0] = p[1] @@ -554,9 +573,10 @@ def p_MessageVerify(p): def p_MessageCompress(p): """MessageCompress : COMPRESS - | COMPRESSALL""" + | COMPRESSALL""" p[0] = p[1] + def p_MessageTainted(p): """MessageTainted : TAINTED""" p[0] = p[1] @@ -566,36 +586,35 @@ def p_MessageTainted(p): # Minor stuff def p_Nested(p): """Nested : ID""" - kinds = {'not': 1, - 'inside_sync': 2, - 'inside_cpow': 3} + kinds = {"not": 1, "inside_sync": 2, "inside_cpow": 3} if p[1] not in kinds: - _error(locFromTok(p, 1), "Expected not, inside_sync, or inside_cpow for nested()") + _error( + locFromTok(p, 1), "Expected not, inside_sync, or inside_cpow for nested()" + ) - p[0] = {'nested': kinds[p[1]]} + p[0] = {"nested": kinds[p[1]]} def p_Priority(p): """Priority : ID""" - kinds = {'normal': 1, - 'input': 2, - 'high': 3, - 'mediumhigh': 4} + kinds = {"normal": 1, "input": 2, "high": 3, "mediumhigh": 4} if p[1] not in kinds: - _error(locFromTok(p, 1), "Expected normal, input, high or mediumhigh for prio()") + _error( + locFromTok(p, 1), "Expected normal, input, high or mediumhigh for prio()" + ) - p[0] = {'prio': kinds[p[1]]} + p[0] = {"prio": kinds[p[1]]} def p_SendQualifier(p): """SendQualifier : NESTED '(' Nested ')' - | PRIO '(' Priority ')'""" + | PRIO '(' Priority ')'""" p[0] = p[3] def p_SendQualifierList(p): """SendQualifierList : SendQualifier SendQualifierList - | """ + |""" if len(p) > 1: p[0] = p[1] p[0].update(p[2]) @@ -605,30 +624,30 @@ def p_SendQualifierList(p): def p_SendSemanticsQual(p): """SendSemanticsQual : SendQualifierList ASYNC - | SendQualifierList SYNC - | INTR""" + | SendQualifierList SYNC + | INTR""" quals = {} if len(p) == 3: quals = p[1] mtype = p[2] else: - mtype = 'intr' + mtype = "intr" - if mtype == 'async': + if mtype == "async": mtype = ASYNC - elif mtype == 'sync': + elif mtype == "sync": mtype = SYNC - elif mtype == 'intr': + elif mtype == "intr": mtype = INTR else: assert 0 - p[0] = [quals.get('nested', NOT_NESTED), quals.get('prio', NORMAL_PRIORITY), mtype] + p[0] = [quals.get("nested", NOT_NESTED), quals.get("prio", NORMAL_PRIORITY), mtype] def p_OptionalProtocolSendSemanticsQual(p): """OptionalProtocolSendSemanticsQual : ProtocolSendSemanticsQual - | """ + |""" if 2 == len(p): p[0] = p[1] else: @@ -637,22 +656,22 @@ def p_OptionalProtocolSendSemanticsQual(p): def p_ProtocolSendSemanticsQual(p): """ProtocolSendSemanticsQual : ASYNC - | SYNC - | NESTED '(' UPTO Nested ')' ASYNC - | NESTED '(' UPTO Nested ')' SYNC - | INTR""" - if p[1] == 'nested': + | SYNC + | NESTED '(' UPTO Nested ')' ASYNC + | NESTED '(' UPTO Nested ')' SYNC + | INTR""" + if p[1] == "nested": mtype = p[6] nested = p[4] else: mtype = p[1] nested = NOT_NESTED - if mtype == 'async': + if mtype == "async": mtype = ASYNC - elif mtype == 'sync': + elif mtype == "sync": mtype = SYNC - elif mtype == 'intr': + elif mtype == "intr": mtype = INTR else: assert 0 @@ -662,8 +681,8 @@ def p_ProtocolSendSemanticsQual(p): def p_ParamList(p): """ParamList : ParamList ',' Param - | Param - | """ + | Param + |""" if 1 == len(p): p[0] = [] elif 2 == len(p): @@ -687,9 +706,9 @@ def p_Type(p): def p_BasicType(p): """BasicType : CxxID - | CxxID '[' ']' - | CxxID '?' - | CxxUniquePtrInst""" + | CxxID '[' ']' + | CxxID '?' + | CxxUniquePtrInst""" # ID == CxxType; we forbid qnames here, # in favor of the |using| declaration if not isinstance(p[1], TypeSpec): @@ -709,8 +728,9 @@ def p_BasicType(p): def p_MaybeNullable(p): """MaybeNullable : NULLABLE - | """ - p[0] = (2 == len(p)) + |""" + p[0] = 2 == len(p) + # -------------------- # C++ stuff @@ -718,7 +738,7 @@ def p_MaybeNullable(p): def p_CxxType(p): """CxxType : QualifiedID - | CxxID""" + | CxxID""" if isinstance(p[1], QualifiedId): p[0] = TypeSpec(p[1].loc, p[1]) else: @@ -728,7 +748,7 @@ def p_CxxType(p): def p_QualifiedID(p): """QualifiedID : QualifiedID COLONCOLON CxxID - | CxxID COLONCOLON CxxID""" + | CxxID COLONCOLON CxxID""" if isinstance(p[1], QualifiedId): loc, id = p[3] p[1].qualify(id) @@ -741,7 +761,7 @@ def p_QualifiedID(p): def p_CxxID(p): """CxxID : ID - | CxxTemplateInst""" + | CxxTemplateInst""" if isinstance(p[1], tuple): p[0] = p[1] else: @@ -750,7 +770,7 @@ def p_CxxID(p): def p_CxxTemplateInst(p): """CxxTemplateInst : ID '<' ID '>'""" - p[0] = (locFromTok(p, 1), str(p[1]) + '<' + str(p[3]) + '>') + p[0] = (locFromTok(p, 1), str(p[1]) + "<" + str(p[3]) + ">") def p_CxxUniquePtrInst(p): @@ -760,5 +780,4 @@ def p_CxxUniquePtrInst(p): def p_error(t): lineno, value = _safeLinenoValue(t) - _error(Loc(Parser.current.filename, lineno), - "bad syntax near `%s'", value) + _error(Loc(Parser.current.filename, lineno), "bad syntax near `%s'", value) diff --git a/ipc/ipdl/ipdl/type.py b/ipc/ipdl/ipdl/type.py index 6fc187fc5543d9..652d0b3bbc2b3a 100644 --- a/ipc/ipdl/ipdl/type.py +++ b/ipc/ipdl/ipdl/type.py @@ -16,7 +16,7 @@ import ipdl.builtin as builtin from ipdl.util import hash_str -_DELETE_MSG = '__delete__' +_DELETE_MSG = "__delete__" class TypeVisitor: @@ -24,8 +24,9 @@ def __init__(self): self.visited = set() def defaultVisit(self, node, *args): - raise Exception("INTERNAL ERROR: no visitor for node type `%s'" % - (node.__class__.__name__)) + raise Exception( + "INTERNAL ERROR: no visitor for node type `%s'" % (node.__class__.__name__) + ) def visitVoidType(self, v, *args): pass @@ -98,8 +99,7 @@ def __cmp__(self, o): return cmp(self.fullname(), o.fullname()) def __eq__(self, o): - return (self.__class__ == o.__class__ - and self.fullname() == o.fullname()) + return self.__class__ == o.__class__ and self.fullname() == o.fullname() def __hash__(self): return hash_str(self.fullname()) @@ -107,10 +107,12 @@ def __hash__(self): # Is this a C++ type? def isCxx(self): return False + # Is this an IPDL type? def isIPDL(self): return False + # Is this type neither compound nor an array? def isAtom(self): @@ -132,9 +134,9 @@ def fullname(self): raise NotImplementedError() def accept(self, visitor, *args): - visit = getattr(visitor, 'visit' + self.__class__.__name__, None) + visit = getattr(visitor, "visit" + self.__class__.__name__, None) if visit is None: - return getattr(visitor, 'defaultVisit')(self, *args) + return getattr(visitor, "defaultVisit")(self, *args) return visit(self, *args) @@ -148,9 +150,11 @@ def isIPDL(self): def isAtom(self): return True - def name(self): return 'void' + def name(self): + return "void" - def fullname(self): return 'void' + def fullname(self): + return "void" VOID = VoidType() @@ -184,70 +188,93 @@ def name(self): def fullname(self): return str(self.qname) + # -------------------- class IPDLType(Type): - def isIPDL(self): return True + def isIPDL(self): + return True - def isMessage(self): return False + def isMessage(self): + return False - def isProtocol(self): return False + def isProtocol(self): + return False - def isActor(self): return False + def isActor(self): + return False - def isStruct(self): return False + def isStruct(self): + return False - def isUnion(self): return False + def isUnion(self): + return False - def isArray(self): return False + def isArray(self): + return False - def isMaybe(self): return False + def isMaybe(self): + return False - def isAtom(self): return True + def isAtom(self): + return True - def isCompound(self): return False + def isCompound(self): + return False - def isShmem(self): return False + def isShmem(self): + return False - def isByteBuf(self): return False + def isByteBuf(self): + return False - def isFD(self): return False + def isFD(self): + return False - def isEndpoint(self): return False + def isEndpoint(self): + return False - def isManagedEndpoint(self): return False + def isManagedEndpoint(self): + return False - def isAsync(self): return self.sendSemantics == ASYNC + def isAsync(self): + return self.sendSemantics == ASYNC - def isSync(self): return self.sendSemantics == SYNC + def isSync(self): + return self.sendSemantics == SYNC - def isInterrupt(self): return self.sendSemantics is INTR + def isInterrupt(self): + return self.sendSemantics is INTR - def hasReply(self): return (self.isSync() or self.isInterrupt()) + def hasReply(self): + return self.isSync() or self.isInterrupt() - def hasBaseType(self): return False + def hasBaseType(self): + return False @classmethod def convertsTo(cls, lesser, greater): def _unwrap(nr): if isinstance(nr, dict): - return _unwrap(nr['nested']) + return _unwrap(nr["nested"]) elif isinstance(nr, int): return nr else: - raise ValueError('Got unexpected nestedRange value: %s' % nr) + raise ValueError("Got unexpected nestedRange value: %s" % nr) lnr0, gnr0, lnr1, gnr1 = ( - _unwrap(lesser.nestedRange[0]), _unwrap(greater.nestedRange[0]), - _unwrap(lesser.nestedRange[1]), _unwrap(greater.nestedRange[1])) - if (lnr0 < gnr0 or lnr1 > gnr1): + _unwrap(lesser.nestedRange[0]), + _unwrap(greater.nestedRange[0]), + _unwrap(lesser.nestedRange[1]), + _unwrap(greater.nestedRange[1]), + ) + if lnr0 < gnr0 or lnr1 > gnr1: return False # Protocols that use intr semantics are not allowed to use # message nesting. - if (greater.isInterrupt() and - lesser.nestedRange != (NOT_NESTED, NOT_NESTED)): + if greater.isInterrupt() and lesser.nestedRange != (NOT_NESTED, NOT_NESTED): return False if lesser.isAsync(): @@ -264,9 +291,19 @@ def needsMoreJuiceThan(self, o): class MessageType(IPDLType): - def __init__(self, nested, prio, sendSemantics, direction, - ctor=False, dtor=False, cdtype=None, compress=False, - tainted=False, verify=False): + def __init__( + self, + nested, + prio, + sendSemantics, + direction, + ctor=False, + dtor=False, + cdtype=None, + compress=False, + tainted=False, + verify=False, + ): assert not (ctor and dtor) assert not (ctor or dtor) or cdtype is not None @@ -284,21 +321,29 @@ def __init__(self, nested, prio, sendSemantics, direction, self.tainted = tainted self.verify = verify - def isMessage(self): return True + def isMessage(self): + return True - def isCtor(self): return self.ctor + def isCtor(self): + return self.ctor - def isDtor(self): return self.dtor + def isDtor(self): + return self.dtor - def constructedType(self): return self.cdtype + def constructedType(self): + return self.cdtype - def isIn(self): return self.direction is IN + def isIn(self): + return self.direction is IN - def isOut(self): return self.direction is OUT + def isOut(self): + return self.direction is OUT - def isInout(self): return self.direction is INOUT + def isInout(self): + return self.direction is INOUT - def hasReply(self): return len(self.returns) or IPDLType.hasReply(self) + def hasReply(self): + return len(self.returns) or IPDLType.hasReply(self) def hasImplicitActorParam(self): return self.isCtor() or self.isDtor() @@ -309,7 +354,7 @@ def __init__(self, qname, nested, sendSemantics, refcounted): self.qname = qname self.nestedRange = (NOT_NESTED, nested) self.sendSemantics = sendSemantics - self.managers = [] # ProtocolType + self.managers = [] # ProtocolType self.manages = [] self.hasDelete = False self.refcounted = refcounted @@ -393,8 +438,8 @@ def fullname(self): class _CompoundType(IPDLType): def __init__(self): - self.defined = False # bool - self.mutualRec = set() # set(_CompoundType | ArrayType) + self.defined = False # bool + self.mutualRec = set() # set(_CompoundType | ArrayType) def isAtom(self): return False @@ -406,9 +451,9 @@ def itercomponents(self): raise Exception('"pure virtual" method') def mutuallyRecursiveWith(self, t, exploring=None): - '''|self| is mutually recursive with |t| iff |self| and |t| -are in a cycle in the type graph rooted at |self|. This function -looks for such a cycle and returns True if found.''' + """|self| is mutually recursive with |t| iff |self| and |t| + are in a cycle in the type graph rooted at |self|. This function + looks for such a cycle and returns True if found.""" if exploring is None: exploring = set() @@ -438,71 +483,88 @@ class StructType(_CompoundType): def __init__(self, qname, fields): _CompoundType.__init__(self) self.qname = qname - self.fields = fields # [ Type ] + self.fields = fields # [ Type ] - def isStruct(self): return True + def isStruct(self): + return True def itercomponents(self): for f in self.fields: yield f - def name(self): return self.qname.baseid + def name(self): + return self.qname.baseid - def fullname(self): return str(self.qname) + def fullname(self): + return str(self.qname) class UnionType(_CompoundType): def __init__(self, qname, components): _CompoundType.__init__(self) self.qname = qname - self.components = components # [ Type ] + self.components = components # [ Type ] - def isUnion(self): return True + def isUnion(self): + return True def itercomponents(self): for c in self.components: yield c - def name(self): return self.qname.baseid + def name(self): + return self.qname.baseid - def fullname(self): return str(self.qname) + def fullname(self): + return str(self.qname) class ArrayType(IPDLType): def __init__(self, basetype): self.basetype = basetype - def isAtom(self): return False + def isAtom(self): + return False - def isArray(self): return True + def isArray(self): + return True - def hasBaseType(self): return True + def hasBaseType(self): + return True - def name(self): return self.basetype.name() + '[]' + def name(self): + return self.basetype.name() + "[]" - def fullname(self): return self.basetype.fullname() + '[]' + def fullname(self): + return self.basetype.fullname() + "[]" class MaybeType(IPDLType): def __init__(self, basetype): self.basetype = basetype - def isAtom(self): return False + def isAtom(self): + return False - def isMaybe(self): return True + def isMaybe(self): + return True - def hasBaseType(self): return True + def hasBaseType(self): + return True - def name(self): return self.basetype.name() + '?' + def name(self): + return self.basetype.name() + "?" - def fullname(self): return self.basetype.fullname() + '?' + def fullname(self): + return self.basetype.fullname() + "?" class ShmemType(IPDLType): def __init__(self, qname): self.qname = qname - def isShmem(self): return True + def isShmem(self): + return True def name(self): return self.qname.baseid @@ -515,7 +577,8 @@ class ByteBufType(IPDLType): def __init__(self, qname): self.qname = qname - def isByteBuf(self): return True + def isByteBuf(self): + return True def name(self): return self.qname.baseid @@ -528,7 +591,8 @@ class FDType(IPDLType): def __init__(self, qname): self.qname = qname - def isFD(self): return True + def isFD(self): + return True def name(self): return self.qname.baseid @@ -542,7 +606,8 @@ def __init__(self, qname, actor): self.qname = qname self.actor = actor - def isEndpoint(self): return True + def isEndpoint(self): + return True def name(self): return self.qname.baseid @@ -556,7 +621,8 @@ def __init__(self, qname, actor): self.qname = qname self.actor = actor - def isManagedEndpoint(self): return True + def isManagedEndpoint(self): + return True def name(self): return self.qname.baseid @@ -569,17 +635,20 @@ class UniquePtrType(IPDLType): def __init__(self, basetype): self.basetype = basetype - def isAtom(self): return False + def isAtom(self): + return False - def isUniquePtr(self): return True + def isUniquePtr(self): + return True - def hasBaseType(self): return True + def hasBaseType(self): + return True def name(self): - return 'UniquePtr<' + self.basetype.name() + '>' + return "UniquePtr<" + self.basetype.name() + ">" def fullname(self): - return 'mozilla::UniquePtr<' + self.basetype.fullname() + '>' + return "mozilla::UniquePtr<" + self.basetype.fullname() + ">" def iteractortypes(t, visited=None): @@ -604,11 +673,14 @@ def iteractortypes(t, visited=None): def hasshmem(type): """Return true iff |type| is shmem or has it buried within.""" + class found(BaseException): pass class findShmem(TypeVisitor): - def visitShmemType(self, s): raise found() + def visitShmemType(self, s): + raise found() + try: type.accept(findShmem()) except found: @@ -617,16 +689,16 @@ def visitShmemType(self, s): raise found() # -------------------- -_builtinloc = Loc('', 0) +_builtinloc = Loc("", 0) def makeBuiltinUsing(tname): - quals = tname.split('::') + quals = tname.split("::") base = quals.pop() quals = quals[0:] - return UsingStmt(_builtinloc, - TypeSpec(_builtinloc, - QualifiedId(_builtinloc, base, quals))) + return UsingStmt( + _builtinloc, TypeSpec(_builtinloc, QualifiedId(_builtinloc, base, quals)) + ) builtinUsing = [makeBuiltinUsing(t) for t in builtin.Types] @@ -639,7 +711,8 @@ def errormsg(loc, fmt, *args): loc = Loc.NONE else: loc = loc.loc - return '%s: error: %s' % (str(loc), fmt % args) + return "%s: error: %s" % (str(loc), fmt % args) + # -------------------- @@ -647,7 +720,7 @@ def errormsg(loc, fmt, *args): class SymbolTable: def __init__(self, errors): self.errors = errors - self.scopes = [{}] # stack({}) + self.scopes = [{}] # stack({}) self.currentScope = self.scopes[0] def enterScope(self): @@ -684,10 +757,14 @@ def declare(self, decl): def tryadd(name): olddecl = self.lookup(name) if olddecl is not None: - self.errors.append(errormsg( - decl.loc, - "redeclaration of symbol `%s', first declared at %s", - name, olddecl.loc)) + self.errors.append( + errormsg( + decl.loc, + "redeclaration of symbol `%s', first declared at %s", + name, + olddecl.loc, + ) + ) return self.currentScope[name] = decl decl.scope = self.currentScope @@ -701,16 +778,16 @@ def tryadd(name): class TypeCheck: - '''This pass sets the .decl attribute of AST nodes for which that is relevant; -a decl says where, with what type, and under what name(s) a node was -declared. + """This pass sets the .decl attribute of AST nodes for which that is relevant; + a decl says where, with what type, and under what name(s) a node was + declared. -With this information, it type checks the AST.''' + With this information, it type checks the AST.""" def __init__(self): # NB: no IPDL compile will EVER print a warning. A program has # one of two attributes: it is either well typed, or not well typed. - self.errors = [] # [ string ] + self.errors = [] # [ string ] def check(self, tu, errout=sys.stderr): def runpass(tcheckpass): @@ -764,7 +841,7 @@ def declare(self, loc, type, shortname=None, fullname=None, progname=None): def visitTranslationUnit(self, tu): # all TranslationUnits declare symbols in global scope - if hasattr(tu, 'visited'): + if hasattr(tu, "visited"): return tu.visited = True savedSymtab = self.symtab @@ -777,14 +854,18 @@ def visitTranslationUnit(self, tu): # for everyone's sanity, enforce that the filename and tu name # match basefilename = os.path.basename(tu.filename) - expectedfilename = '%s.ipdl' % (tu.name) + expectedfilename = "%s.ipdl" % (tu.name) if not tu.protocol: # header - expectedfilename += 'h' + expectedfilename += "h" if basefilename != expectedfilename: - self.error(tu.loc, - "expected file for translation unit `%s' to be named `%s'; instead it's named `%s'", # NOQA: E501 - tu.name, expectedfilename, basefilename) + self.error( + tu.loc, + "expected file for translation unit `%s' to be named `%s'; instead it's named `%s'", # NOQA: E501 + tu.name, + expectedfilename, + basefilename, + ) if tu.protocol: assert tu.name == tu.protocol.name @@ -802,35 +883,54 @@ def visitTranslationUnit(self, tu): loc=p.loc, type=ProtocolType(qname, p.nested, p.sendSemantics, p.refcounted), shortname=p.name, - fullname=None if 0 == len(qname.quals) else fullname) + fullname=None if 0 == len(qname.quals) else fullname, + ) p.parentEndpointDecl = self.declare( loc=p.loc, - type=EndpointType(QualifiedId(p.loc, 'Endpoint<' + - fullname + 'Parent>', ['mozilla', 'ipc']), - ActorType(p.decl.type)), - shortname='Endpoint<' + p.name + 'Parent>') + type=EndpointType( + QualifiedId( + p.loc, "Endpoint<" + fullname + "Parent>", ["mozilla", "ipc"] + ), + ActorType(p.decl.type), + ), + shortname="Endpoint<" + p.name + "Parent>", + ) p.childEndpointDecl = self.declare( loc=p.loc, - type=EndpointType(QualifiedId(p.loc, 'Endpoint<' + - fullname + 'Child>', ['mozilla', 'ipc']), - ActorType(p.decl.type)), - shortname='Endpoint<' + p.name + 'Child>') + type=EndpointType( + QualifiedId( + p.loc, "Endpoint<" + fullname + "Child>", ["mozilla", "ipc"] + ), + ActorType(p.decl.type), + ), + shortname="Endpoint<" + p.name + "Child>", + ) p.parentManagedEndpointDecl = self.declare( loc=p.loc, - type=ManagedEndpointType(QualifiedId(p.loc, 'ManagedEndpoint<' + - fullname + 'Parent>', - ['mozilla', 'ipc']), - ActorType(p.decl.type)), - shortname='ManagedEndpoint<' + p.name + 'Parent>') + type=ManagedEndpointType( + QualifiedId( + p.loc, + "ManagedEndpoint<" + fullname + "Parent>", + ["mozilla", "ipc"], + ), + ActorType(p.decl.type), + ), + shortname="ManagedEndpoint<" + p.name + "Parent>", + ) p.childManagedEndpointDecl = self.declare( loc=p.loc, - type=ManagedEndpointType(QualifiedId(p.loc, 'ManagedEndpoint<' + - fullname + 'Child>', - ['mozilla', 'ipc']), - ActorType(p.decl.type)), - shortname='ManagedEndpoint<' + p.name + 'Child>') + type=ManagedEndpointType( + QualifiedId( + p.loc, + "ManagedEndpoint<" + fullname + "Child>", + ["mozilla", "ipc"], + ), + ActorType(p.decl.type), + ), + shortname="ManagedEndpoint<" + p.name + "Child>", + ) # XXX ugh, this sucks. but we need this information to compute # what friend decls we need in generated C++ @@ -862,7 +962,7 @@ def visitTranslationUnit(self, tu): self.symtab = savedSymtab def declareStructOrUnion(self, su): - if hasattr(su, 'decl'): + if hasattr(su, "decl"): self.symtab.declare(su.decl) return @@ -877,23 +977,22 @@ def declareStructOrUnion(self, su): elif isinstance(su, UnionDecl): sutype = UnionType(qname, []) else: - assert 0 and 'unknown type' + assert 0 and "unknown type" # XXX more suckage. this time for pickling structs/unions # declared in headers. sutype._ast = su su.decl = self.declare( - loc=su.loc, - type=sutype, - shortname=su.name, - fullname=fullname) + loc=su.loc, type=sutype, shortname=su.name, fullname=fullname + ) def visitInclude(self, inc): if inc.tu is None: self.error( inc.loc, - "(type checking here will be unreliable because of an earlier error)") + "(type checking here will be unreliable because of an earlier error)", + ) return inc.tu.accept(self) if inc.tu.protocol: @@ -912,7 +1011,7 @@ def visitInclude(self, inc): def visitStructDecl(self, sd): # If we've already processed this struct, don't do it again. - if hasattr(sd, 'visited'): + if hasattr(sd, "visited"): return stype = sd.decl.type @@ -923,15 +1022,21 @@ def visitStructDecl(self, sd): for f in sd.fields: ftypedecl = self.symtab.lookup(str(f.typespec)) if ftypedecl is None: - self.error(f.loc, "field `%s' of struct `%s' has unknown type `%s'", - f.name, sd.name, str(f.typespec)) + self.error( + f.loc, + "field `%s' of struct `%s' has unknown type `%s'", + f.name, + sd.name, + str(f.typespec), + ) continue f.decl = self.declare( loc=f.loc, type=self._canonicalType(ftypedecl.type, f.typespec), shortname=f.name, - fullname=None) + fullname=None, + ) stype.fields.append(f.decl.type) self.symtab.exitScope() @@ -946,8 +1051,9 @@ def visitUnionDecl(self, ud): for c in ud.components: cdecl = self.symtab.lookup(str(c)) if cdecl is None: - self.error(c.loc, "unknown component type `%s' of union `%s'", - str(c), ud.name) + self.error( + c.loc, "unknown component type `%s' of union `%s'", str(c), ud.name + ) continue utype.components.append(self._canonicalType(cdecl.type, c)) @@ -958,29 +1064,38 @@ def visitUsingStmt(self, using): # there is nothing to typedef. With UniquePtrs, basenames # are generic so typedefs would be illegal. fullname = None - if fullname == 'mozilla::ipc::Shmem': + if fullname == "mozilla::ipc::Shmem": ipdltype = ShmemType(using.type.spec) - elif fullname == 'mozilla::ipc::ByteBuf': + elif fullname == "mozilla::ipc::ByteBuf": ipdltype = ByteBufType(using.type.spec) - elif fullname == 'mozilla::ipc::FileDescriptor': + elif fullname == "mozilla::ipc::FileDescriptor": ipdltype = FDType(using.type.spec) else: - ipdltype = ImportedCxxType(using.type.spec, using.isRefcounted(), using.isMoveonly()) + ipdltype = ImportedCxxType( + using.type.spec, using.isRefcounted(), using.isMoveonly() + ) existingType = self.symtab.lookup(ipdltype.fullname()) if existingType and existingType.fullname == ipdltype.fullname(): if ipdltype.isRefcounted() != existingType.type.isRefcounted(): - self.error(using.loc, "inconsistent refcounted status of type `%s`", - str(using.type)) + self.error( + using.loc, + "inconsistent refcounted status of type `%s`", + str(using.type), + ) if ipdltype.isMoveonly() != existingType.type.isMoveonly(): - self.error(using.loc, "inconsistent moveonly status of type `%s`", - str(using.type)) + self.error( + using.loc, + "inconsistent moveonly status of type `%s`", + str(using.type), + ) using.decl = existingType return using.decl = self.declare( loc=using.loc, type=ipdltype, shortname=using.type.basename(), - fullname=fullname) + fullname=fullname, + ) def visitProtocol(self, p): # protocol scope @@ -989,8 +1104,7 @@ def visitProtocol(self, p): seenmgrs = set() for mgr in p.managers: if mgr.name in seenmgrs: - self.error(mgr.loc, "manager `%s' appears multiple times", - mgr.name) + self.error(mgr.loc, "manager `%s' appears multiple times", mgr.name) continue seenmgrs.add(mgr.name) @@ -1002,21 +1116,21 @@ def visitProtocol(self, p): managed.accept(self) if not (p.managers or p.messageDecls or p.managesStmts): - self.error(p.loc, - "top-level protocol `%s' cannot be empty", - p.name) + self.error(p.loc, "top-level protocol `%s' cannot be empty", p.name) - setattr(self, 'currentProtocolDecl', p.decl) + setattr(self, "currentProtocolDecl", p.decl) for msg in p.messageDecls: msg.accept(self) del self.currentProtocolDecl - p.decl.type.hasDelete = (not not self.symtab.lookup(_DELETE_MSG)) + p.decl.type.hasDelete = not not self.symtab.lookup(_DELETE_MSG) if not (p.decl.type.hasDelete or p.decl.type.isToplevel()): self.error( p.loc, "destructor declaration `%s(...)' required for managed protocol `%s'", - _DELETE_MSG, p.name) + _DELETE_MSG, + p.name, + ) # FIXME/cjones declare all the little C++ thingies that will # be generated. they're not relevant to IPDL itself, but @@ -1038,12 +1152,17 @@ def visitManager(self, mgr): self.error( loc, "protocol `%s' referenced as |manager| of `%s' has not been declared", - mgrname, pname) + mgrname, + pname, + ) elif not isinstance(mgrdecl.type, ProtocolType): self.error( loc, "entity `%s' referenced as |manager| of `%s' is not of `protocol' type; instead it is of type `%s'", # NOQA: E501 - mgrname, pname, mgrdecl.type.typename()) + mgrname, + pname, + mgrdecl.type.typename(), + ) else: mgr.decl = mgrdecl pdecl.type.addManager(mgrdecl.type) @@ -1057,14 +1176,20 @@ def visitManagesStmt(self, mgs): loc = mgs.loc if mgsdecl is None: - self.error(loc, - "protocol `%s', managed by `%s', has not been declared", - mgsname, pname) + self.error( + loc, + "protocol `%s', managed by `%s', has not been declared", + mgsname, + pname, + ) elif not isinstance(mgsdecl.type, ProtocolType): self.error( loc, "%s declares itself managing a non-`protocol' entity `%s' of type `%s'", - pname, mgsname, mgsdecl.type.typename()) + pname, + mgsname, + mgsdecl.type.typename(), + ) else: mgs.decl = mgsdecl pdecl.type.manages.append(mgsdecl.type) @@ -1080,12 +1205,16 @@ def visitMessageDecl(self, md): decl = self.symtab.lookup(msgname) if decl is not None and decl.type.isProtocol(): # probably a ctor. we'll check validity later. - msgname += 'Constructor' + msgname += "Constructor" isctor = True cdtype = decl.type elif decl is not None: - self.error(loc, "message name `%s' already declared as `%s'", - msgname, decl.type.typename()) + self.error( + loc, + "message name `%s' already declared as `%s'", + msgname, + decl.type.typename(), + ) # if we error here, no big deal; move on to find more if _DELETE_MSG == msgname: @@ -1095,9 +1224,18 @@ def visitMessageDecl(self, md): # enter message scope self.symtab.enterScope() - msgtype = MessageType(md.nested, md.prio, md.sendSemantics, md.direction, - ctor=isctor, dtor=isdtor, cdtype=cdtype, - compress=md.compress, tainted=md.tainted, verify=md.verify) + msgtype = MessageType( + md.nested, + md.prio, + md.sendSemantics, + md.direction, + ctor=isctor, + dtor=isdtor, + cdtype=cdtype, + compress=md.compress, + tainted=md.tainted, + verify=md.verify, + ) # replace inparam Param nodes with proper Decls def paramToDecl(param): @@ -1109,13 +1247,13 @@ def paramToDecl(param): self.error( ploc, "argument typename `%s' of message `%s' has not been declared", - ptname, msgname) + ptname, + msgname, + ) ptype = VOID else: ptype = self._canonicalType(ptdecl.type, param.typespec) - return self.declare(loc=ploc, - type=ptype, - progname=param.name) + return self.declare(loc=ploc, type=ptype, progname=param.name) for i, inparam in enumerate(md.inParams): pdecl = paramToDecl(inparam) @@ -1128,10 +1266,7 @@ def paramToDecl(param): self.symtab.exitScope() - md.decl = self.declare( - loc=loc, - type=msgtype, - progname=msgname) + md.decl = self.declare(loc=loc, type=msgtype, progname=msgname) md.protocolDecl = self.currentProtocolDecl md.decl._md = md @@ -1139,14 +1274,12 @@ def _canonicalType(self, itype, typespec): loc = typespec.loc if itype.isIPDL(): if itype.isProtocol(): - itype = ActorType(itype, - nullable=typespec.nullable) + itype = ActorType(itype, nullable=typespec.nullable) if typespec.nullable and not (itype.isIPDL() and itype.isActor()): self.error( - loc, - "`nullable' qualifier for type `%s' makes no sense", - itype.name()) + loc, "`nullable' qualifier for type `%s' makes no sense", itype.name() + ) if typespec.array: itype = ArrayType(itype) @@ -1162,6 +1295,7 @@ def _canonicalType(self, itype, typespec): # ----------------------------------------------------------------------------- + def checkcycles(p, stack=None): cycles = [] @@ -1189,12 +1323,11 @@ def formatcycles(cycles): def fullyDefined(t, exploring=None): - '''The rules for "full definition" of a type are - defined(atom) := true - defined(array basetype) := defined(basetype) - defined(struct f1 f2...) := defined(f1) and defined(f2) and ... - defined(union c1 c2 ...) := defined(c1) or defined(c2) or ... -''' + """The rules for "full definition" of a type are + defined(atom) := true + defined(array basetype) := defined(basetype) + defined(struct f1 f2...) := defined(f1) and defined(f2) and ... + defined(union c1 c2 ...) := defined(c1) or defined(c2) or ...""" if exploring is None: exploring = set() @@ -1243,13 +1376,11 @@ def visitInclude(self, inc): def visitStructDecl(self, sd): if not fullyDefined(sd.decl.type): - self.error(sd.decl.loc, - "struct `%s' is only partially defined", sd.name) + self.error(sd.decl.loc, "struct `%s' is only partially defined", sd.name) def visitUnionDecl(self, ud): if not fullyDefined(ud.decl.type): - self.error(ud.decl.loc, - "union `%s' is only partially defined", ud.name) + self.error(ud.decl.loc, "union `%s' is only partially defined", ud.name) def visitProtocol(self, p): self.ptype = p.decl.type @@ -1262,7 +1393,9 @@ def visitProtocol(self, p): self.error( p.decl.loc, "protocol `%s' requires more powerful send semantics than its manager `%s' provides", # NOQA: E501 - pname, mgrtype.name()) + pname, + mgrtype.name(), + ) if ptype.isToplevel(): cycles = checkcycles(p.decl.type) @@ -1270,13 +1403,13 @@ def visitProtocol(self, p): self.error( p.decl.loc, "cycle(s) detected in manager/manages hierarchy: %s", - formatcycles(cycles)) + formatcycles(cycles), + ) if 1 == len(ptype.managers) and ptype is ptype.manager(): self.error( - p.decl.loc, - "top-level protocol `%s' cannot manage itself", - p.name) + p.decl.loc, "top-level protocol `%s' cannot manage itself", p.name + ) return Visitor.visitProtocol(self, p) @@ -1297,7 +1430,9 @@ def visitManagesStmt(self, mgs): self.error( loc, "|manages| declaration in protocol `%s' does not match any |manager| declaration in protocol `%s'", # NOQA: E501 - pname, mgsname) + pname, + mgsname, + ) def visitManager(self, mgr): pdecl = mgr.of.decl @@ -1316,7 +1451,9 @@ def visitManager(self, mgr): self.error( loc, "|manager| declaration in protocol `%s' does not match any |manages| declaration in protocol `%s'", # NOQA: E501 - pname, mgrname) + pname, + mgrname, + ) def visitMessageDecl(self, md): mtype, mname = md.decl.type, md.decl.progname @@ -1328,44 +1465,59 @@ def visitMessageDecl(self, md): self.error( loc, "inside_sync nested messages must be sync (here, message `%s' in protocol `%s')", - mname, pname) + mname, + pname, + ) if mtype.nested == INSIDE_CPOW_NESTED and (mtype.isOut() or mtype.isInout()): self.error( loc, "inside_cpow nested parent-to-child messages are verboten (here, message `%s' in protocol `%s')", # NOQA: E501 - mname, pname) + mname, + pname, + ) # We allow inside_sync messages that are themselves sync to be sent from the # parent. Normal and inside_cpow nested messages that are sync can only come from # the child. - if mtype.isSync() and mtype.nested == NOT_NESTED and (mtype.isOut() or mtype.isInout()): + if ( + mtype.isSync() + and mtype.nested == NOT_NESTED + and (mtype.isOut() or mtype.isInout()) + ): self.error( loc, "sync parent-to-child messages are verboten (here, message `%s' in protocol `%s')", - mname, pname) + mname, + pname, + ) if mtype.needsMoreJuiceThan(ptype): self.error( loc, "message `%s' requires more powerful send semantics than its protocol `%s' provides", # NOQA: E501 - mname, pname) + mname, + pname, + ) if (mtype.isCtor() or mtype.isDtor()) and mtype.isAsync() and mtype.returns: - self.error(loc, - "asynchronous ctor/dtor message `%s' declares return values", - mname) + self.error( + loc, "asynchronous ctor/dtor message `%s' declares return values", mname + ) - if (mtype.compress and - (not mtype.isAsync() or mtype.isCtor() or mtype.isDtor())): + if mtype.compress and (not mtype.isAsync() or mtype.isCtor() or mtype.isDtor()): if mtype.isCtor() or mtype.isDtor(): message_type = "constructor" if mtype.isCtor() else "destructor" - error_message = ("%s messages can't use compression (here, in protocol `%s')" % - (message_type, pname)) + error_message = ( + "%s messages can't use compression (here, in protocol `%s')" + % (message_type, pname) + ) else: - error_message = ("message `%s' in protocol `%s' requests compression but is not async" % # NOQA: E501 - (mname, pname)) + error_message = ( + "message `%s' in protocol `%s' requests compression but is not async" + % (mname, pname) # NOQA: E501 + ) self.error(loc, error_message) @@ -1373,4 +1525,6 @@ def visitMessageDecl(self, md): self.error( loc, "ctor for protocol `%s', which is not managed by protocol `%s'", - mname[:-len('constructor')], pname) + mname[: -len("constructor")], + pname, + ) diff --git a/ipc/ipdl/ipdl/util.py b/ipc/ipdl/ipdl/util.py index be50e5fc6dc01e..60d9c904e254d5 100644 --- a/ipc/ipdl/ipdl/util.py +++ b/ipc/ipdl/ipdl/util.py @@ -9,4 +9,4 @@ # this instead. def hash_str(s): assert isinstance(s, str) - return zlib.adler32(s.encode('utf-8')) + return zlib.adler32(s.encode("utf-8")) diff --git a/ipc/ipdl/moz.build b/ipc/ipdl/moz.build index 64c9acdcf913c8..b5f01c5267825c 100644 --- a/ipc/ipdl/moz.build +++ b/ipc/ipdl/moz.build @@ -4,18 +4,18 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['test'] +DIRS += ["test"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") # Generated by ipdl.py -SOURCES += ['!IPCMessageTypeName.cpp'] +SOURCES += ["!IPCMessageTypeName.cpp"] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # We #include some things in the dom/plugins/ directory that rely on # toolkit libraries. -CXXFLAGS += CONFIG['TK_CFLAGS'] +CXXFLAGS += CONFIG["TK_CFLAGS"] # Add libFuzzer configuration directives -include('/tools/fuzzing/libfuzzer-config.mozbuild') +include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/ipc/ipdl/test/cxx/app/moz.build b/ipc/ipdl/test/cxx/app/moz.build index 35aaabf583a0a6..909360062a1362 100644 --- a/ipc/ipdl/test/cxx/app/moz.build +++ b/ipc/ipdl/test/cxx/app/moz.build @@ -4,17 +4,17 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeckoProgram('ipdlunittest', linkage='dependent') +GeckoProgram("ipdlunittest", linkage="dependent") SOURCES += [ - 'TestIPDL.cpp', + "TestIPDL.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/toolkit/xre', - '/xpcom/base', + "/toolkit/xre", + "/xpcom/base", ] -if CONFIG['CC_TYPE'] == 'clang-cl': - WIN32_EXE_LDFLAGS += ['-ENTRY:wmainCRTStartup'] +if CONFIG["CC_TYPE"] == "clang-cl": + WIN32_EXE_LDFLAGS += ["-ENTRY:wmainCRTStartup"] diff --git a/ipc/ipdl/test/cxx/genIPDLUnitTests.py b/ipc/ipdl/test/cxx/genIPDLUnitTests.py index 0958d0e691b4df..3636ec041683d1 100644 --- a/ipc/ipdl/test/cxx/genIPDLUnitTests.py +++ b/ipc/ipdl/test/cxx/genIPDLUnitTests.py @@ -9,7 +9,8 @@ def usage(): - print(""" + print( + """ %s template_file -t unit_tests... -e extra_protocols... TEMPLATE_FILE is used to generate to generate the unit-tester .cpp @@ -17,47 +18,64 @@ def usage(): EXTRA_PROTOCOLS are top-level protocols for subprocesses that can be spawned in tests but are not unit tests in and of themselves -""" % (sys.argv[0]), file=sys.stderr) +""" + % (sys.argv[0]), + file=sys.stderr, + ) sys.exit(1) def main(argv): template = argv[1] - if argv[2] != '-t': + if argv[2] != "-t": usage() i = 3 unittests = [] - while argv[i] != '-e': + while argv[i] != "-e": unittests.append(argv[i]) i += 1 - extras = argv[(i+1):] + extras = argv[(i + 1) :] - includes = '\n'.join([ - '#include "%s.h"' % (t) for t in unittests]) + includes = "\n".join(['#include "%s.h"' % (t) for t in unittests]) - enum_values = '\n'.join([ - ' %s,' % (t) for t in unittests+extras]) + enum_values = "\n".join([" %s," % (t) for t in unittests + extras]) last_enum = unittests[-1] - string_to_enums = '\n'.join([ - ''' else if (!strcmp(aString, "%s")) - return %s;''' % (t, t) for t in unittests+extras]) - - enum_to_strings = '\n'.join([ - ''' case %s: - return "%s";''' % (t, t) for t in unittests+extras]) - - parent_delete_cases = '\n'.join([ - ''' case %s: { + string_to_enums = "\n".join( + [ + """ else if (!strcmp(aString, "%s")) + return %s;""" + % (t, t) + for t in unittests + extras + ] + ) + + enum_to_strings = "\n".join( + [ + """ case %s: + return "%s";""" + % (t, t) + for t in unittests + extras + ] + ) + + parent_delete_cases = "\n".join( + [ + """ case %s: { delete reinterpret_cast<%sParent*>(gParentActor); return; } -''' % (t, t) for t in unittests]) - - parent_enabled_cases_proc = '\n'.join([ - ''' case %s: { +""" + % (t, t) + for t in unittests + ] + ) + + parent_enabled_cases_proc = "\n".join( + [ + """ case %s: { if (!%sParent::RunTestInProcesses()) { passed("N/A to proc"); DeferredParentShutdown(); @@ -65,20 +83,30 @@ def main(argv): } break; } -''' % (t, t) for t in unittests]) - - parent_main_cases_proc = '\n'.join([ - ''' case %s: { +""" + % (t, t) + for t in unittests + ] + ) + + parent_main_cases_proc = "\n".join( + [ + """ case %s: { %sParent** parent = reinterpret_cast<%sParent**>(&gParentActor); *parent = new %sParent(); (*parent)->Open(transport, child); return (*parent)->Main(); } -''' % (t, t, t, t) for t in unittests]) - - parent_enabled_cases_thread = '\n'.join([ - ''' case %s: { +""" + % (t, t, t, t) + for t in unittests + ] + ) + + parent_enabled_cases_thread = "\n".join( + [ + """ case %s: { if (!%sParent::RunTestInThreads()) { passed("N/A to threads"); DeferredParentShutdown(); @@ -86,10 +114,15 @@ def main(argv): } break; } -''' % (t, t) for t in unittests]) - - parent_main_cases_thread = '\n'.join([ - ''' case %s: { +""" + % (t, t) + for t in unittests + ] + ) + + parent_main_cases_thread = "\n".join( + [ + """ case %s: { %sParent** parent = reinterpret_cast<%sParent**>(&gParentActor); *parent = new %sParent(); @@ -105,30 +138,45 @@ def main(argv): (*parent)->Open(childChannel, childMessageLoop, parentSide); return (*parent)->Main(); } -''' % (t, t, t, t, t, t, t) for t in unittests]) - - child_delete_cases = '\n'.join([ - ''' case %s: { +""" + % (t, t, t, t, t, t, t) + for t in unittests + ] + ) + + child_delete_cases = "\n".join( + [ + """ case %s: { delete reinterpret_cast<%sChild*>(gChildActor); return; } -''' % (t, t) for t in unittests+extras]) - - child_init_cases = '\n'.join([ - ''' case %s: { +""" + % (t, t) + for t in unittests + extras + ] + ) + + child_init_cases = "\n".join( + [ + """ case %s: { %sChild** child = reinterpret_cast<%sChild**>(&gChildActor); *child = new %sChild(); (*child)->Open(transport, parentPid, worker); return; } -''' % (t, t, t, t) for t in unittests+extras]) +""" + % (t, t, t, t) + for t in unittests + extras + ] + ) - templatefile = open(template, 'r', encoding='utf-8') + templatefile = open(template, "r", encoding="utf-8") sys.stdout.write( string.Template(templatefile.read()).substitute( INCLUDES=includes, - ENUM_VALUES=enum_values, LAST_ENUM=last_enum, + ENUM_VALUES=enum_values, + LAST_ENUM=last_enum, STRING_TO_ENUMS=string_to_enums, ENUM_TO_STRINGS=enum_to_strings, PARENT_DELETE_CASES=parent_delete_cases, @@ -137,9 +185,11 @@ def main(argv): PARENT_ENABLED_CASES_THREAD=parent_enabled_cases_thread, PARENT_MAIN_CASES_THREAD=parent_main_cases_thread, CHILD_DELETE_CASES=child_delete_cases, - CHILD_INIT_CASES=child_init_cases)) + CHILD_INIT_CASES=child_init_cases, + ) + ) templatefile.close() -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv) diff --git a/ipc/ipdl/test/cxx/moz.build b/ipc/ipdl/test/cxx/moz.build index 3cf0934b3f1f46..591fa269a953ce 100644 --- a/ipc/ipdl/test/cxx/moz.build +++ b/ipc/ipdl/test/cxx/moz.build @@ -4,170 +4,170 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -DIRS += ['app'] +DIRS += ["app"] EXPORTS.mozilla._ipdltest += [ - 'IPDLUnitTestProcessChild.h', - 'IPDLUnitTests.h', - 'IPDLUnitTestTypes.h', - 'IPDLUnitTestUtils.h', - 'TestActorPunning.h', - 'TestAsyncReturns.h', - 'TestBadActor.h', - 'TestCancel.h', - 'TestCrashCleanup.h', - 'TestDataStructures.h', - 'TestDemon.h', - 'TestDesc.h', - 'TestEndpointBridgeMain.h', - 'TestEndpointOpens.h', - 'TestFailedCtor.h', - 'TestHangs.h', - 'TestHighestPrio.h', - 'TestInterruptErrorCleanup.h', - 'TestInterruptRaces.h', - 'TestInterruptShutdownRace.h', - 'TestJSON.h', - 'TestLatency.h', - 'TestManyChildAllocs.h', - 'TestMultiMgrs.h', - 'TestNestedLoops.h', - 'TestOffMainThreadPainting.h', - 'TestRaceDeadlock.h', - 'TestRaceDeferral.h', - 'TestRacyInterruptReplies.h', - 'TestRacyReentry.h', - 'TestRacyUndefer.h', - 'TestRPC.h', - 'TestSanity.h', - 'TestSelfManageRoot.h', - 'TestShmem.h', - 'TestShutdown.h', - 'TestStackHooks.h', - 'TestSyncError.h', - 'TestSyncHang.h', - 'TestSyncWakeup.h', - 'TestUniquePtrIPC.h', - 'TestUrgency.h', - 'TestUrgentHangs.h', + "IPDLUnitTestProcessChild.h", + "IPDLUnitTests.h", + "IPDLUnitTestTypes.h", + "IPDLUnitTestUtils.h", + "TestActorPunning.h", + "TestAsyncReturns.h", + "TestBadActor.h", + "TestCancel.h", + "TestCrashCleanup.h", + "TestDataStructures.h", + "TestDemon.h", + "TestDesc.h", + "TestEndpointBridgeMain.h", + "TestEndpointOpens.h", + "TestFailedCtor.h", + "TestHangs.h", + "TestHighestPrio.h", + "TestInterruptErrorCleanup.h", + "TestInterruptRaces.h", + "TestInterruptShutdownRace.h", + "TestJSON.h", + "TestLatency.h", + "TestManyChildAllocs.h", + "TestMultiMgrs.h", + "TestNestedLoops.h", + "TestOffMainThreadPainting.h", + "TestRaceDeadlock.h", + "TestRaceDeferral.h", + "TestRacyInterruptReplies.h", + "TestRacyReentry.h", + "TestRacyUndefer.h", + "TestRPC.h", + "TestSanity.h", + "TestSelfManageRoot.h", + "TestShmem.h", + "TestShutdown.h", + "TestStackHooks.h", + "TestSyncError.h", + "TestSyncHang.h", + "TestSyncWakeup.h", + "TestUniquePtrIPC.h", + "TestUrgency.h", + "TestUrgentHangs.h", ] SOURCES += [ - 'TestActorPunning.cpp', - 'TestAsyncReturns.cpp', - 'TestBadActor.cpp', - 'TestCancel.cpp', - 'TestCrashCleanup.cpp', - 'TestDataStructures.cpp', - 'TestDemon.cpp', - 'TestDesc.cpp', - 'TestEndpointBridgeMain.cpp', - 'TestEndpointOpens.cpp', - 'TestFailedCtor.cpp', - 'TestHangs.cpp', - 'TestHighestPrio.cpp', - 'TestInterruptErrorCleanup.cpp', - 'TestInterruptRaces.cpp', - 'TestInterruptShutdownRace.cpp', - 'TestJSON.cpp', - 'TestLatency.cpp', - 'TestManyChildAllocs.cpp', - 'TestMultiMgrs.cpp', - 'TestNestedLoops.cpp', - 'TestOffMainThreadPainting.cpp', - 'TestRaceDeadlock.cpp', - 'TestRaceDeferral.cpp', - 'TestRacyInterruptReplies.cpp', - 'TestRacyReentry.cpp', - 'TestRacyUndefer.cpp', - 'TestRPC.cpp', - 'TestSanity.cpp', - 'TestSelfManageRoot.cpp', - 'TestShmem.cpp', - 'TestShutdown.cpp', - 'TestStackHooks.cpp', - 'TestSyncError.cpp', - 'TestSyncHang.cpp', - 'TestSyncWakeup.cpp', - 'TestUniquePtrIPC.cpp', - 'TestUrgency.cpp', - 'TestUrgentHangs.cpp', + "TestActorPunning.cpp", + "TestAsyncReturns.cpp", + "TestBadActor.cpp", + "TestCancel.cpp", + "TestCrashCleanup.cpp", + "TestDataStructures.cpp", + "TestDemon.cpp", + "TestDesc.cpp", + "TestEndpointBridgeMain.cpp", + "TestEndpointOpens.cpp", + "TestFailedCtor.cpp", + "TestHangs.cpp", + "TestHighestPrio.cpp", + "TestInterruptErrorCleanup.cpp", + "TestInterruptRaces.cpp", + "TestInterruptShutdownRace.cpp", + "TestJSON.cpp", + "TestLatency.cpp", + "TestManyChildAllocs.cpp", + "TestMultiMgrs.cpp", + "TestNestedLoops.cpp", + "TestOffMainThreadPainting.cpp", + "TestRaceDeadlock.cpp", + "TestRaceDeferral.cpp", + "TestRacyInterruptReplies.cpp", + "TestRacyReentry.cpp", + "TestRacyUndefer.cpp", + "TestRPC.cpp", + "TestSanity.cpp", + "TestSelfManageRoot.cpp", + "TestShmem.cpp", + "TestShutdown.cpp", + "TestStackHooks.cpp", + "TestSyncError.cpp", + "TestSyncHang.cpp", + "TestSyncWakeup.cpp", + "TestUniquePtrIPC.cpp", + "TestUrgency.cpp", + "TestUrgentHangs.cpp", ] SOURCES += [ - '!IPDLUnitTests.cpp', - 'IPDLUnitTestProcessChild.cpp', - 'IPDLUnitTestSubprocess.cpp', + "!IPDLUnitTests.cpp", + "IPDLUnitTestProcessChild.cpp", + "IPDLUnitTestSubprocess.cpp", ] IPDL_SOURCES += [ - 'PTestActorPunning.ipdl', - 'PTestActorPunningPunned.ipdl', - 'PTestActorPunningSub.ipdl', - 'PTestAsyncReturns.ipdl', - 'PTestBadActor.ipdl', - 'PTestBadActorSub.ipdl', - 'PTestCancel.ipdl', - 'PTestCrashCleanup.ipdl', - 'PTestDataStructures.ipdl', - 'PTestDataStructuresCommon.ipdlh', - 'PTestDataStructuresSub.ipdl', - 'PTestDemon.ipdl', - 'PTestDesc.ipdl', - 'PTestDescSub.ipdl', - 'PTestDescSubsub.ipdl', - 'PTestEndpointBridgeMain.ipdl', - 'PTestEndpointBridgeMainSub.ipdl', - 'PTestEndpointBridgeSub.ipdl', - 'PTestEndpointOpens.ipdl', - 'PTestEndpointOpensOpened.ipdl', - 'PTestFailedCtor.ipdl', - 'PTestFailedCtorSub.ipdl', - 'PTestFailedCtorSubsub.ipdl', - 'PTestHandle.ipdl', - 'PTestHangs.ipdl', - 'PTestHighestPrio.ipdl', - 'PTestIndirectProtocolParam.ipdlh', - 'PTestIndirectProtocolParamFirst.ipdl', - 'PTestIndirectProtocolParamManage.ipdl', - 'PTestIndirectProtocolParamSecond.ipdl', - 'PTestInterruptErrorCleanup.ipdl', - 'PTestInterruptRaces.ipdl', - 'PTestInterruptShutdownRace.ipdl', - 'PTestJSON.ipdl', - 'PTestLatency.ipdl', - 'PTestLayoutThread.ipdl', - 'PTestManyChildAllocs.ipdl', - 'PTestManyChildAllocsSub.ipdl', - 'PTestMultiMgrs.ipdl', - 'PTestMultiMgrsBottom.ipdl', - 'PTestMultiMgrsLeft.ipdl', - 'PTestMultiMgrsRight.ipdl', - 'PTestNestedLoops.ipdl', - 'PTestPaintThread.ipdl', - 'PTestPriority.ipdl', - 'PTestRaceDeadlock.ipdl', - 'PTestRaceDeferral.ipdl', - 'PTestRacyInterruptReplies.ipdl', - 'PTestRacyReentry.ipdl', - 'PTestRacyUndefer.ipdl', - 'PTestRPC.ipdl', - 'PTestSanity.ipdl', - 'PTestSelfManage.ipdl', - 'PTestSelfManageRoot.ipdl', - 'PTestShmem.ipdl', - 'PTestShutdown.ipdl', - 'PTestShutdownSub.ipdl', - 'PTestShutdownSubsub.ipdl', - 'PTestStackHooks.ipdl', - 'PTestSyncError.ipdl', - 'PTestSyncHang.ipdl', - 'PTestSyncWakeup.ipdl', - 'PTestUniquePtrIPC.ipdl', - 'PTestUrgency.ipdl', - 'PTestUrgentHangs.ipdl', + "PTestActorPunning.ipdl", + "PTestActorPunningPunned.ipdl", + "PTestActorPunningSub.ipdl", + "PTestAsyncReturns.ipdl", + "PTestBadActor.ipdl", + "PTestBadActorSub.ipdl", + "PTestCancel.ipdl", + "PTestCrashCleanup.ipdl", + "PTestDataStructures.ipdl", + "PTestDataStructuresCommon.ipdlh", + "PTestDataStructuresSub.ipdl", + "PTestDemon.ipdl", + "PTestDesc.ipdl", + "PTestDescSub.ipdl", + "PTestDescSubsub.ipdl", + "PTestEndpointBridgeMain.ipdl", + "PTestEndpointBridgeMainSub.ipdl", + "PTestEndpointBridgeSub.ipdl", + "PTestEndpointOpens.ipdl", + "PTestEndpointOpensOpened.ipdl", + "PTestFailedCtor.ipdl", + "PTestFailedCtorSub.ipdl", + "PTestFailedCtorSubsub.ipdl", + "PTestHandle.ipdl", + "PTestHangs.ipdl", + "PTestHighestPrio.ipdl", + "PTestIndirectProtocolParam.ipdlh", + "PTestIndirectProtocolParamFirst.ipdl", + "PTestIndirectProtocolParamManage.ipdl", + "PTestIndirectProtocolParamSecond.ipdl", + "PTestInterruptErrorCleanup.ipdl", + "PTestInterruptRaces.ipdl", + "PTestInterruptShutdownRace.ipdl", + "PTestJSON.ipdl", + "PTestLatency.ipdl", + "PTestLayoutThread.ipdl", + "PTestManyChildAllocs.ipdl", + "PTestManyChildAllocsSub.ipdl", + "PTestMultiMgrs.ipdl", + "PTestMultiMgrsBottom.ipdl", + "PTestMultiMgrsLeft.ipdl", + "PTestMultiMgrsRight.ipdl", + "PTestNestedLoops.ipdl", + "PTestPaintThread.ipdl", + "PTestPriority.ipdl", + "PTestRaceDeadlock.ipdl", + "PTestRaceDeferral.ipdl", + "PTestRacyInterruptReplies.ipdl", + "PTestRacyReentry.ipdl", + "PTestRacyUndefer.ipdl", + "PTestRPC.ipdl", + "PTestSanity.ipdl", + "PTestSelfManage.ipdl", + "PTestSelfManageRoot.ipdl", + "PTestShmem.ipdl", + "PTestShutdown.ipdl", + "PTestShutdownSub.ipdl", + "PTestShutdownSubsub.ipdl", + "PTestStackHooks.ipdl", + "PTestSyncError.ipdl", + "PTestSyncHang.ipdl", + "PTestSyncWakeup.ipdl", + "PTestUniquePtrIPC.ipdl", + "PTestUrgency.ipdl", + "PTestUrgentHangs.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/ipc/ipdl/test/ipdl/IPDLCompile.py b/ipc/ipdl/test/ipdl/IPDLCompile.py index 0d595783c77d05..af45a70faa31b4 100644 --- a/ipc/ipdl/test/ipdl/IPDLCompile.py +++ b/ipc/ipdl/test/ipdl/IPDLCompile.py @@ -9,7 +9,7 @@ class IPDLCompile: - def __init__(self, specfilename, ipdlargv=['python', 'ipdl.py']): + def __init__(self, specfilename, ipdlargv=["python", "ipdl.py"]): self.argv = copy.deepcopy(ipdlargv) self.specfilename = specfilename self.stdout = None @@ -17,20 +17,20 @@ def __init__(self, specfilename, ipdlargv=['python', 'ipdl.py']): self.returncode = None def run(self): - '''Run |self.specfilename| through the IPDL compiler.''' + """Run |self.specfilename| through the IPDL compiler.""" assert self.returncode is None - tmpoutdir = tempfile.mkdtemp(prefix='ipdl_unit_test') + tmpoutdir = tempfile.mkdtemp(prefix="ipdl_unit_test") try: - self.argv.extend([ - '-d', tmpoutdir, - self.specfilename - ]) + self.argv.extend(["-d", tmpoutdir, self.specfilename]) proc = subprocess.Popen( - args=self.argv, stdout=subprocess.PIPE, - stderr=subprocess.PIPE, universal_newlines=True) + args=self.argv, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + universal_newlines=True, + ) self.stdout, self.stderr = proc.communicate() self.returncode = proc.returncode @@ -48,30 +48,31 @@ def run(self): proc.kill() def completed(self): - return (self.returncode is not None - and isinstance(self.stdout, str) - and isinstance(self.stderr, str)) + return ( + self.returncode is not None + and isinstance(self.stdout, str) + and isinstance(self.stderr, str) + ) def error(self, expectedError): - '''Return True iff compiling self.specstring resulted in an -IPDL compiler error.''' + """Return True iff compiling self.specstring resulted in an + IPDL compiler error.""" assert self.completed() errorRe = re.compile(re.escape(expectedError)) return None is not re.search(errorRe, self.stderr) def exception(self): - '''Return True iff compiling self.specstring resulted in a Python -exception being raised.''' + """Return True iff compiling self.specstring resulted in a Python + exception being raised.""" assert self.completed() - return None is not re.search(r'Traceback (most recent call last):', - self.stderr) + return None is not re.search(r"Traceback (most recent call last):", self.stderr) def ok(self): - '''Return True iff compiling self.specstring was successful.''' + """Return True iff compiling self.specstring was successful.""" assert self.completed() - return (not self.exception() - and not self.error("error:") - and (0 == self.returncode)) + return ( + not self.exception() and not self.error("error:") and (0 == self.returncode) + ) diff --git a/ipc/ipdl/test/ipdl/moz.build b/ipc/ipdl/test/ipdl/moz.build index 28919c271d3326..568f361a54234b 100644 --- a/ipc/ipdl/test/ipdl/moz.build +++ b/ipc/ipdl/test/ipdl/moz.build @@ -3,4 +3,3 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. - diff --git a/ipc/ipdl/test/ipdl/runtests.py b/ipc/ipdl/test/ipdl/runtests.py index 91c9ca19acf83c..7338c7087a67dc 100644 --- a/ipc/ipdl/test/ipdl/runtests.py +++ b/ipc/ipdl/test/ipdl/runtests.py @@ -6,7 +6,7 @@ class IPDLTestCase(unittest.TestCase): def __init__(self, ipdlargv, filename): - unittest.TestCase.__init__(self, 'test') + unittest.TestCase.__init__(self, "test") self.filename = filename self.compile = IPDLCompile(filename, ipdlargv) @@ -16,25 +16,32 @@ def test(self): self.checkPassed() def mkCustomMsg(self, msg): - return ''' + return """ ### Command: %s ### %s ### stderr: -%s''' % (' '.join(self.compile.argv), msg, self.compile.stderr) +%s""" % ( + " ".join(self.compile.argv), + msg, + self.compile.stderr, + ) def mkFailMsg(self): - return ''' + return """ ### Command: %s ### stderr: -%s''' % (' '.join(self.compile.argv), self.compile.stderr) +%s""" % ( + " ".join(self.compile.argv), + self.compile.stderr, + ) def shortDescription(self): return '%s test of "%s"' % (self.__class__.__name__, self.filename) class OkTestCase(IPDLTestCase): - '''An invocation of the IPDL compiler on a valid specification. -The IPDL compiler should not produce errors or exceptions.''' + """An invocation of the IPDL compiler on a valid specification. + The IPDL compiler should not produce errors or exceptions.""" def __init__(self, ipdlargv, filename): IPDLTestCase.__init__(self, ipdlargv, filename) @@ -44,14 +51,14 @@ def checkPassed(self): class ErrorTestCase(IPDLTestCase): - '''An invocation of the IPDL compiler on an *invalid* specification. -The IPDL compiler *should* produce errors but not exceptions.''' + """An invocation of the IPDL compiler on an *invalid* specification. + The IPDL compiler *should* produce errors but not exceptions.""" def __init__(self, ipdlargv, filename): IPDLTestCase.__init__(self, ipdlargv, filename) # Look for expected errors in the input file. - f = open(filename, 'r') + f = open(filename, "r") self.expectedErrorMessage = [] for l in f: if l.startswith("//error:"): @@ -59,18 +66,24 @@ def __init__(self, ipdlargv, filename): f.close() def checkPassed(self): - self.assertNotEqual(self.expectedErrorMessage, [], - self.mkCustomMsg("Error test should contain at least " + - "one line starting with //error: " + - "that indicates the expected failure.")) + self.assertNotEqual( + self.expectedErrorMessage, + [], + self.mkCustomMsg( + "Error test should contain at least " + + "one line starting with //error: " + + "that indicates the expected failure." + ), + ) for e in self.expectedErrorMessage: - self.assertTrue(self.compile.error(e), - self.mkCustomMsg('Did not see expected error "' + - e + '"')) + self.assertTrue( + self.compile.error(e), + self.mkCustomMsg('Did not see expected error "' + e + '"'), + ) -if __name__ == '__main__': +if __name__ == "__main__": import sys okdir = sys.argv[1] @@ -87,21 +100,17 @@ def checkPassed(self): if errortests: # The extra subdirectory is used for non-failing files we want # to include from failing files. - errorIncludes = ['-I', os.path.join(errordir, 'extra'), - '-I', errordir] - errorsuite.addTest(ErrorTestCase(ipdlargv + errorIncludes, - arg)) + errorIncludes = ["-I", os.path.join(errordir, "extra"), "-I", errordir] + errorsuite.addTest(ErrorTestCase(ipdlargv + errorIncludes, arg)) elif oktests: - if 'ERRORTESTS' == arg: + if "ERRORTESTS" == arg: errortests = True continue - oksuite.addTest(OkTestCase(ipdlargv + ['-I', okdir], - arg)) + oksuite.addTest(OkTestCase(ipdlargv + ["-I", okdir], arg)) else: - if 'OKTESTS' == arg: + if "OKTESTS" == arg: oktests = True continue ipdlargv.append(arg) - (unittest.TextTestRunner()).run( - unittest.TestSuite([oksuite, errorsuite])) + (unittest.TextTestRunner()).run(unittest.TestSuite([oksuite, errorsuite])) diff --git a/ipc/ipdl/test/moz.build b/ipc/ipdl/test/moz.build index 5d5caacf863b34..7c0986edb1b241 100644 --- a/ipc/ipdl/test/moz.build +++ b/ipc/ipdl/test/moz.build @@ -6,7 +6,7 @@ # we ignore MOZ_IPDL_TESTS for the IPDL-compiler-only tests, since they're # quick and painless -DIRS += ['ipdl'] +DIRS += ["ipdl"] -if CONFIG['MOZ_IPDL_TESTS']: - DIRS += ['cxx'] +if CONFIG["MOZ_IPDL_TESTS"]: + DIRS += ["cxx"] diff --git a/ipc/moz.build b/ipc/moz.build index f0b1f7f676773d..f9e9996c908202 100644 --- a/ipc/moz.build +++ b/ipc/moz.build @@ -5,18 +5,18 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. DIRS += [ - 'app', - 'chromium', - 'glue', - 'ipdl', - 'testshell', + "app", + "chromium", + "glue", + "ipdl", + "testshell", ] -if CONFIG['OS_ARCH'] == 'WINNT': - DIRS += ['mscom'] +if CONFIG["OS_ARCH"] == "WINNT": + DIRS += ["mscom"] TEST_DIRS += [ - 'gtest', + "gtest", ] with Files("**"): diff --git a/ipc/mscom/moz.build b/ipc/mscom/moz.build index 8d428c5f9932f0..78d2dd6d25ffb2 100644 --- a/ipc/mscom/moz.build +++ b/ipc/mscom/moz.build @@ -5,88 +5,88 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.mscom += [ - 'Aggregation.h', - 'AgileReference.h', - 'ApartmentRegion.h', - 'AsyncInvoker.h', - 'COMPtrHolder.h', - 'EnsureMTA.h', - 'Objref.h', - 'PassthruProxy.h', - 'ProcessRuntime.h', - 'ProfilerMarkers.h', - 'ProxyStream.h', - 'Ptr.h', - 'Utils.h', + "Aggregation.h", + "AgileReference.h", + "ApartmentRegion.h", + "AsyncInvoker.h", + "COMPtrHolder.h", + "EnsureMTA.h", + "Objref.h", + "PassthruProxy.h", + "ProcessRuntime.h", + "ProfilerMarkers.h", + "ProxyStream.h", + "Ptr.h", + "Utils.h", ] DIRS += [ - 'mozglue', + "mozglue", ] SOURCES += [ - 'VTableBuilder.c', + "VTableBuilder.c", ] UNIFIED_SOURCES += [ - 'AgileReference.cpp', - 'EnsureMTA.cpp', - 'Objref.cpp', - 'PassthruProxy.cpp', - 'ProcessRuntime.cpp', - 'ProfilerMarkers.cpp', - 'ProxyStream.cpp', - 'RegistrationAnnotator.cpp', - 'Utils.cpp', + "AgileReference.cpp", + "EnsureMTA.cpp", + "Objref.cpp", + "PassthruProxy.cpp", + "ProcessRuntime.cpp", + "ProfilerMarkers.cpp", + "ProxyStream.cpp", + "RegistrationAnnotator.cpp", + "Utils.cpp", ] -if CONFIG['ACCESSIBILITY']: +if CONFIG["ACCESSIBILITY"]: DIRS += [ - 'oop', + "oop", ] EXPORTS.mozilla.mscom += [ - 'ActivationContext.h', - 'DispatchForwarder.h', - 'FastMarshaler.h', - 'IHandlerProvider.h', - 'Interceptor.h', - 'InterceptorLog.h', - 'MainThreadHandoff.h', - 'MainThreadInvoker.h', - 'Registration.h', - 'SpinEvent.h', - 'StructStream.h', - 'WeakRef.h', + "ActivationContext.h", + "DispatchForwarder.h", + "FastMarshaler.h", + "IHandlerProvider.h", + "Interceptor.h", + "InterceptorLog.h", + "MainThreadHandoff.h", + "MainThreadInvoker.h", + "Registration.h", + "SpinEvent.h", + "StructStream.h", + "WeakRef.h", ] SOURCES += [ - 'Interceptor.cpp', - 'MainThreadHandoff.cpp', - 'Registration.cpp', - 'SpinEvent.cpp', - 'WeakRef.cpp', + "Interceptor.cpp", + "MainThreadHandoff.cpp", + "Registration.cpp", + "SpinEvent.cpp", + "WeakRef.cpp", ] UNIFIED_SOURCES += [ - 'ActivationContext.cpp', - 'DispatchForwarder.cpp', - 'FastMarshaler.cpp', - 'InterceptorLog.cpp', - 'MainThreadInvoker.cpp', - 'StructStream.cpp', + "ActivationContext.cpp", + "DispatchForwarder.cpp", + "FastMarshaler.cpp", + "InterceptorLog.cpp", + "MainThreadInvoker.cpp", + "StructStream.cpp", ] LOCAL_INCLUDES += [ - '/xpcom/base', - '/xpcom/build', + "/xpcom/base", + "/xpcom/build", ] -DEFINES['MOZ_MSCOM_REMARSHAL_NO_HANDLER'] = True +DEFINES["MOZ_MSCOM_REMARSHAL_NO_HANDLER"] = True -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" with Files("**"): BUG_COMPONENT = ("Core", "IPC: MSCOM") diff --git a/ipc/mscom/mozglue/moz.build b/ipc/mscom/mozglue/moz.build index a8c497df9b0115..f52a77232450e4 100644 --- a/ipc/mscom/mozglue/moz.build +++ b/ipc/mscom/mozglue/moz.build @@ -4,12 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'mozglue' +FINAL_LIBRARY = "mozglue" EXPORTS.mozilla.mscom += [ - 'ProcessRuntimeShared.h', + "ProcessRuntimeShared.h", ] UNIFIED_SOURCES += [ - 'ProcessRuntimeShared.cpp', + "ProcessRuntimeShared.cpp", ] diff --git a/ipc/mscom/oop/moz.build b/ipc/mscom/oop/moz.build index 8be7842b6e076e..987c899ce7786b 100644 --- a/ipc/mscom/oop/moz.build +++ b/ipc/mscom/oop/moz.build @@ -4,31 +4,31 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -Library('mscom_oop') +Library("mscom_oop") SOURCES += [ - '../ActivationContext.cpp', - '../Objref.cpp', - '../Registration.cpp', - '../StructStream.cpp', - '../Utils.cpp', + "../ActivationContext.cpp", + "../Objref.cpp", + "../Registration.cpp", + "../StructStream.cpp", + "../Utils.cpp", ] UNIFIED_SOURCES += [ - 'Handler.cpp', - 'Module.cpp', + "Handler.cpp", + "Module.cpp", ] OS_LIBS += [ - 'ole32', - 'oleaut32', - 'shlwapi', + "ole32", + "oleaut32", + "shlwapi", ] -LIBRARY_DEFINES['UNICODE'] = True -LIBRARY_DEFINES['_UNICODE'] = True -LIBRARY_DEFINES['MOZ_NO_MOZALLOC'] = True -LIBRARY_DEFINES['MOZ_MSCOM_REMARSHAL_NO_HANDLER'] = True +LIBRARY_DEFINES["UNICODE"] = True +LIBRARY_DEFINES["_UNICODE"] = True +LIBRARY_DEFINES["MOZ_NO_MOZALLOC"] = True +LIBRARY_DEFINES["MOZ_MSCOM_REMARSHAL_NO_HANDLER"] = True DisableStlWrapping() NO_EXPAND_LIBS = True diff --git a/ipc/pull-chromium.py b/ipc/pull-chromium.py index 812eda789c735b..26205001caf08b 100644 --- a/ipc/pull-chromium.py +++ b/ipc/pull-chromium.py @@ -19,29 +19,37 @@ topsrcdir, chromiumtree, rev = sys.argv[1:] -if not os.path.exists(os.path.join(topsrcdir, 'client.py')): +if not os.path.exists(os.path.join(topsrcdir, "client.py")): print >>sys.stderr, "Incorrect topsrcdir" sys.exit(1) -if not os.path.exists(os.path.join(chromiumtree, 'src/DEPS')): +if not os.path.exists(os.path.join(chromiumtree, "src/DEPS")): print >>sys.stderr, "Incorrect chromium directory, missing DEPS" sys.exit(1) -check_call(['gclient', 'sync', '--force', '--revision=src@%s' % rev], cwd=chromiumtree) +check_call(["gclient", "sync", "--force", "--revision=src@%s" % rev], cwd=chromiumtree) -chromiumsrc = os.path.join(topsrcdir, 'ipc/chromium/src') +chromiumsrc = os.path.join(topsrcdir, "ipc/chromium/src") os.path.exists(chromiumsrc) and rmtree(chromiumsrc) def doexport(svnpath): localpath = os.path.join(chromiumsrc, svnpath) os.makedirs(os.path.dirname(localpath)) - check_call(['svn', 'export', '-r', 'BASE', os.path.join(chromiumtree, 'src', svnpath), - localpath]) - - -doexport('base') -doexport('chrome/common') -doexport('build/build_config.h') -doexport('testing/gtest/include') -doexport('third_party/libevent') + check_call( + [ + "svn", + "export", + "-r", + "BASE", + os.path.join(chromiumtree, "src", svnpath), + localpath, + ] + ) + + +doexport("base") +doexport("chrome/common") +doexport("build/build_config.h") +doexport("testing/gtest/include") +doexport("third_party/libevent") diff --git a/ipc/testshell/moz.build b/ipc/testshell/moz.build index 982178843b3b23..7e33ee4bbfe5eb 100644 --- a/ipc/testshell/moz.build +++ b/ipc/testshell/moz.build @@ -5,36 +5,36 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS.mozilla.ipc += [ - 'TestShellChild.h', - 'TestShellParent.h', - 'XPCShellEnvironment.h', + "TestShellChild.h", + "TestShellParent.h", + "XPCShellEnvironment.h", ] -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell.ini"] SOURCES += [ - 'TestShellChild.cpp', - 'TestShellParent.cpp', - 'XPCShellEnvironment.cpp', + "TestShellChild.cpp", + "TestShellParent.cpp", + "XPCShellEnvironment.cpp", ] IPDL_SOURCES = [ - 'PTestShell.ipdl', - 'PTestShellCommand.ipdl', + "PTestShell.ipdl", + "PTestShellCommand.ipdl", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" # For xpcshell error messages and nsAutoJSString LOCAL_INCLUDES += [ - '/dom/base', - '/js/xpconnect/src', + "/dom/base", + "/js/xpconnect/src", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] -with Files("**"): - BUG_COMPONENT = ("Core", "XPConnect") +with Files("**"): + BUG_COMPONENT = ("Core", "XPConnect") diff --git a/js/ductwork/debugger/moz.build b/js/ductwork/debugger/moz.build index 6309c202c23194..31b1fc6744126e 100644 --- a/js/ductwork/debugger/moz.build +++ b/js/ductwork/debugger/moz.build @@ -8,23 +8,23 @@ with Files("**"): BUG_COMPONENT = ("Core", "JavaScript Engine") XPIDL_SOURCES += [ - 'IJSDebugger.idl', + "IJSDebugger.idl", ] -XPIDL_MODULE = 'jsdebugger' +XPIDL_MODULE = "jsdebugger" -XPCSHELL_TESTS_MANIFESTS += ['tests/xpcshell.ini'] +XPCSHELL_TESTS_MANIFESTS += ["tests/xpcshell.ini"] SOURCES += [ - 'JSDebugger.cpp', + "JSDebugger.cpp", ] XPCOM_MANIFESTS += [ - 'components.conf', + "components.conf", ] EXTRA_JS_MODULES += [ - 'jsdebugger.jsm', + "jsdebugger.jsm", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" diff --git a/js/ffi.configure b/js/ffi.configure index d84ca9eddc36ad..a2b9b3b8a27a21 100644 --- a/js/ffi.configure +++ b/js/ffi.configure @@ -4,62 +4,62 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + @depends(target) def force_system_ffi(target): # Pre-emptively move to system ffi for non-tier one platforms. - if target.cpu not in ('x86', 'x86_64', 'arm', 'aarch64'): + if target.cpu not in ("x86", "x86_64", "arm", "aarch64"): return True -imply_option('--with-system-ffi', force_system_ffi, "target") -option('--with-system-ffi', - help='Use system libffi (located with pkgconfig)') +imply_option("--with-system-ffi", force_system_ffi, "target") + +option("--with-system-ffi", help="Use system libffi (located with pkgconfig)") -use_system_ffi = depends_if('--with-system-ffi')(lambda _: True) +use_system_ffi = depends_if("--with-system-ffi")(lambda _: True) -system_ffi = pkg_check_modules('MOZ_FFI', 'libffi > 3.0.9', - when=use_system_ffi) +system_ffi = pkg_check_modules("MOZ_FFI", "libffi > 3.0.9", when=use_system_ffi) building_ffi = depends(system_ffi)(lambda v: v is None) -set_config('MOZ_SYSTEM_FFI', depends_if(system_ffi)(lambda _: True)) +set_config("MOZ_SYSTEM_FFI", depends_if(system_ffi)(lambda _: True)) # Target selection, based on ffi/configure.ac. @depends(target, when=building_ffi) def ffi_target(target): - if target.cpu not in ('x86', 'x86_64', 'arm', 'aarch64'): - die('Building libffi from the tree is not supported on this platform. ' - 'Use --with-system-ffi instead.') + if target.cpu not in ("x86", "x86_64", "arm", "aarch64"): + die( + "Building libffi from the tree is not supported on this platform. " + "Use --with-system-ffi instead." + ) - if target.cpu == 'x86_64': - target_dir = 'x86' + if target.cpu == "x86_64": + target_dir = "x86" target_name = { - 'WINNT': 'X86_WIN64', - }.get(target.kernel, 'X86_64') + "WINNT": "X86_WIN64", + }.get(target.kernel, "X86_64") - elif target.cpu == 'x86': - target_dir = 'x86' + elif target.cpu == "x86": + target_dir = "x86" target_name = { - 'WINNT': 'X86_WIN32', - 'Darwin': 'X86_DARWIN', - 'FreeBSD': 'X86_FREEBSD', - 'OpenBSD': 'X86_FREEBSD', - }.get(target.kernel, 'X86') - - elif target.cpu == 'aarch64': - target_dir = 'aarch64' + "WINNT": "X86_WIN32", + "Darwin": "X86_DARWIN", + "FreeBSD": "X86_FREEBSD", + "OpenBSD": "X86_FREEBSD", + }.get(target.kernel, "X86") + + elif target.cpu == "aarch64": + target_dir = "aarch64" target_name = { - 'WINNT': 'ARM_WIN64', - }.get(target.kernel, 'AARCH64') + "WINNT": "ARM_WIN64", + }.get(target.kernel, "AARCH64") + + elif target.cpu == "arm": + target_dir = "arm" + target_name = "ARM" - elif target.cpu == 'arm': - target_dir = 'arm' - target_name = 'ARM' + return namespace(name=target_name, dir=target_dir) - return namespace( - name=target_name, - dir=target_dir - ) -set_config('FFI_TARGET', ffi_target.name) -set_config('FFI_TARGET_DIR', ffi_target.dir) +set_config("FFI_TARGET", ffi_target.name) +set_config("FFI_TARGET_DIR", ffi_target.dir) diff --git a/js/moz.build b/js/moz.build index 0da2f6ce8f4202..77fd41a797ae0d 100644 --- a/js/moz.build +++ b/js/moz.build @@ -1,17 +1,31 @@ -component_engine = ('Core', 'JavaScript Engine') -component_gc = ('Core', 'JavaScript: GC') +component_engine = ("Core", "JavaScript Engine") +component_gc = ("Core", "JavaScript: GC") with Files("**"): BUG_COMPONENT = component_engine -for header in ('GCAnnotations.h', 'GCAPI.h', 'HeapAPI.h', 'RootingAPI.h', 'SliceBudget.h', 'SweepingAPI.h', 'TraceKind.h', 'TracingAPI.h', 'WeakMapPtr.h', 'GCHashTable.h', 'GCPolicyAPI.h', 'GCVariant.h', 'GCVector.h'): - with Files('public/' + header): +for header in ( + "GCAnnotations.h", + "GCAPI.h", + "HeapAPI.h", + "RootingAPI.h", + "SliceBudget.h", + "SweepingAPI.h", + "TraceKind.h", + "TracingAPI.h", + "WeakMapPtr.h", + "GCHashTable.h", + "GCPolicyAPI.h", + "GCVariant.h", + "GCVector.h", +): + with Files("public/" + header): BUG_COMPONENT = component_gc with Files("src/**"): - SCHEDULES.inclusive += ['jittest', 'jsreftest'] + SCHEDULES.inclusive += ["jittest", "jsreftest"] with Files("public/**"): - SCHEDULES.inclusive += ['jittest', 'jsreftest'] + SCHEDULES.inclusive += ["jittest", "jsreftest"] SPHINX_TREES["/js"] = "src/doc" diff --git a/js/moz.configure b/js/moz.configure index 39d8e67da3951b..035be895e4b83d 100644 --- a/js/moz.configure +++ b/js/moz.configure @@ -4,168 +4,201 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + @depends(build_project) def js_standalone(build_project): - if build_project == 'js': + if build_project == "js": return True + # Branding # ============================================================== -option('--with-app-name', env='MOZ_APP_NAME', nargs=1, - help='Used for e.g. the binary program file name. If not set, ' - 'defaults to a lowercase form of MOZ_APP_BASENAME.') +option( + "--with-app-name", + env="MOZ_APP_NAME", + nargs=1, + help="Used for e.g. the binary program file name. If not set, " + "defaults to a lowercase form of MOZ_APP_BASENAME.", +) + -@depends('--with-app-name', js_standalone, moz_app_basename) +@depends("--with-app-name", js_standalone, moz_app_basename) def moz_app_name(value, js_standalone, moz_app_basename): if value: return value[0] if js_standalone: - return 'js' + return "js" return moz_app_basename.lower() -set_config('MOZ_APP_NAME', moz_app_name) + +set_config("MOZ_APP_NAME", moz_app_name) # SmooshMonkey (new frontend) # ================================================== # Define here in order to use the option from bindgen.configure. -option('--enable-smoosh', default=False, - help='Enable SmooshMonkey (new JS engine frontend)') +option( + "--enable-smoosh", + default=False, + help="Enable SmooshMonkey (new JS engine frontend)", +) + -@depends('--enable-smoosh') +@depends("--enable-smoosh") def enable_smoosh(value): if value: return True -set_config('JS_ENABLE_SMOOSH', enable_smoosh) -set_define('JS_ENABLE_SMOOSH', enable_smoosh) -include('../build/moz.configure/nspr.configure', - when='--enable-compile-environment') -include('../build/moz.configure/rust.configure', - when='--enable-compile-environment') -include('../build/moz.configure/bindgen.configure', - when='--enable-compile-environment') +set_config("JS_ENABLE_SMOOSH", enable_smoosh) +set_define("JS_ENABLE_SMOOSH", enable_smoosh) + +include("../build/moz.configure/nspr.configure", when="--enable-compile-environment") +include("../build/moz.configure/rust.configure", when="--enable-compile-environment") +include("../build/moz.configure/bindgen.configure", when="--enable-compile-environment") -set_config('JS_STANDALONE', js_standalone) -set_define('JS_STANDALONE', js_standalone) -add_old_configure_assignment('JS_STANDALONE', js_standalone) -option('--enable-js-shell', default=js_standalone, - help='{Build|Do not build} the JS shell') +set_config("JS_STANDALONE", js_standalone) +set_define("JS_STANDALONE", js_standalone) +add_old_configure_assignment("JS_STANDALONE", js_standalone) +option( + "--enable-js-shell", default=js_standalone, help="{Build|Do not build} the JS shell" +) -@depends('--enable-js-shell') + +@depends("--enable-js-shell") def js_disable_shell(value): if not value: return True -set_config('JS_DISABLE_SHELL', js_disable_shell) -set_define('JS_64BIT', depends(target)(lambda t: t.bitness == 64 or None)) +set_config("JS_DISABLE_SHELL", js_disable_shell) + +set_define("JS_64BIT", depends(target)(lambda t: t.bitness == 64 or None)) -set_define('JS_PUNBOX64', depends(target)(lambda t: t.bitness == 64 or None)) -set_define('JS_NUNBOX32', depends(target)(lambda t: t.bitness == 32 or None)) +set_define("JS_PUNBOX64", depends(target)(lambda t: t.bitness == 64 or None)) +set_define("JS_NUNBOX32", depends(target)(lambda t: t.bitness == 32 or None)) # SpiderMonkey as a shared library, and how its symbols are exported # ================================================================== -option('--disable-shared-js', when=js_standalone, - help='{Create|Do not create} a shared library') +option( + "--disable-shared-js", + when=js_standalone, + help="{Create|Do not create} a shared library", +) + +option( + "--disable-export-js", + when=js_standalone, + help="{Mark|Do not mark} JS symbols as DLL exported/visible", +) -option('--disable-export-js', when=js_standalone, - help='{Mark|Do not mark} JS symbols as DLL exported/visible') -@depends('--disable-shared-js', '--disable-export-js', when=js_standalone) +@depends("--disable-shared-js", "--disable-export-js", when=js_standalone) def shared_js(shared_js, export_js): if shared_js: if not export_js: - die('Must export JS symbols when building a shared library.') + die("Must export JS symbols when building a shared library.") return True -set_config('JS_SHARED_LIBRARY', shared_js) -add_old_configure_assignment('JS_SHARED_LIBRARY', shared_js) -@depends(shared_js, '--disable-export-js', when=js_standalone) +set_config("JS_SHARED_LIBRARY", shared_js) +add_old_configure_assignment("JS_SHARED_LIBRARY", shared_js) + + +@depends(shared_js, "--disable-export-js", when=js_standalone) def exportable_js_api(shared_js, export_js): if not shared_js and export_js: return True -set_define('STATIC_EXPORTABLE_JS_API', exportable_js_api) + +set_define("STATIC_EXPORTABLE_JS_API", exportable_js_api) + @depends(shared_js, exportable_js_api) def static_js_api(shared_js, export_js): if not shared_js and not export_js: return True -set_define('STATIC_JS_API', static_js_api) + +set_define("STATIC_JS_API", static_js_api) + @depends(shared_js) def static_js(value): if not value: return True -set_define('MOZ_STATIC_JS', static_js) +set_define("MOZ_STATIC_JS", static_js) -option(env='NO_RUST_PANIC_HOOK', when=js_standalone, - help='Disable rust panic hook') -set_define('NO_RUST_PANIC_HOOK', True, when='NO_RUST_PANIC_HOOK') +option(env="NO_RUST_PANIC_HOOK", when=js_standalone, help="Disable rust panic hook") + +set_define("NO_RUST_PANIC_HOOK", True, when="NO_RUST_PANIC_HOOK") # JIT support # ======================================================= @depends(target) def jit_default(target): - if target.cpu in ('x86', 'x86_64', 'arm', 'aarch64', 'mips32', 'mips64'): + if target.cpu in ("x86", "x86_64", "arm", "aarch64", "mips32", "mips64"): return True return False -option('--enable-jit', - default=jit_default, - help='{Enable|Disable} use of the JITs') -@deprecated_option('--enable-ion') +option("--enable-jit", default=jit_default, help="{Enable|Disable} use of the JITs") + + +@deprecated_option("--enable-ion") def report_deprecated(value): if value: - die('--enable-ion is deprecated, use --enable-jit instead') + die("--enable-ion is deprecated, use --enable-jit instead") else: - die('--disable-ion is deprecated, use --disable-jit instead') + die("--disable-ion is deprecated, use --disable-jit instead") + # JIT code simulator for cross compiles # ======================================================= -option('--enable-simulator', choices=('arm', 'arm64', 'mips32', 'mips64'), - nargs=1, - help='Enable a JIT code simulator for the specified architecture') +option( + "--enable-simulator", + choices=("arm", "arm64", "mips32", "mips64"), + nargs=1, + help="Enable a JIT code simulator for the specified architecture", +) + -@depends('--enable-jit', '--enable-simulator', target, '--help') +@depends("--enable-jit", "--enable-simulator", target, "--help") def simulator(jit_enabled, simulator_enabled, target, _): if not jit_enabled or not simulator_enabled: return sim_cpu = simulator_enabled[0] - if sim_cpu in ('arm', 'mips32'): - if target.cpu != 'x86': - die('The %s simulator only works on x86.' % sim_cpu) + if sim_cpu in ("arm", "mips32"): + if target.cpu != "x86": + die("The %s simulator only works on x86." % sim_cpu) - if sim_cpu in ('arm64', 'mips64'): - if target.cpu != 'x86_64': - die('The %s simulator only works on x86-64.' % sim_cpu) + if sim_cpu in ("arm64", "mips64"): + if target.cpu != "x86_64": + die("The %s simulator only works on x86-64." % sim_cpu) return namespace(**{sim_cpu: True}) -set_config('JS_SIMULATOR', depends_if(simulator)(lambda x: True)) -set_config('JS_SIMULATOR_ARM', simulator.arm) -set_config('JS_SIMULATOR_ARM64', simulator.arm64) -set_config('JS_SIMULATOR_MIPS32', simulator.mips32) -set_config('JS_SIMULATOR_MIPS64', simulator.mips64) -set_define('JS_SIMULATOR', depends_if(simulator)(lambda x: True)) -set_define('JS_SIMULATOR_ARM', simulator.arm) -set_define('JS_SIMULATOR_ARM64', simulator.arm64) -set_define('JS_SIMULATOR_MIPS32', simulator.mips32) -set_define('JS_SIMULATOR_MIPS64', simulator.mips64) - -@depends('--enable-jit', simulator, target) + +set_config("JS_SIMULATOR", depends_if(simulator)(lambda x: True)) +set_config("JS_SIMULATOR_ARM", simulator.arm) +set_config("JS_SIMULATOR_ARM64", simulator.arm64) +set_config("JS_SIMULATOR_MIPS32", simulator.mips32) +set_config("JS_SIMULATOR_MIPS64", simulator.mips64) +set_define("JS_SIMULATOR", depends_if(simulator)(lambda x: True)) +set_define("JS_SIMULATOR_ARM", simulator.arm) +set_define("JS_SIMULATOR_ARM64", simulator.arm64) +set_define("JS_SIMULATOR_MIPS32", simulator.mips32) +set_define("JS_SIMULATOR_MIPS64", simulator.mips64) + + +@depends("--enable-jit", simulator, target) def jit_codegen(jit_enabled, simulator, target): if not jit_enabled: return namespace(none=True) @@ -173,129 +206,156 @@ def jit_codegen(jit_enabled, simulator, target): if simulator: return simulator - if target.cpu == 'aarch64': + if target.cpu == "aarch64": return namespace(arm64=True) - elif target.cpu == 'x86_64': + elif target.cpu == "x86_64": return namespace(x64=True) return namespace(**{str(target.cpu): True}) -set_config('JS_CODEGEN_NONE', jit_codegen.none) -set_config('JS_CODEGEN_ARM', jit_codegen.arm) -set_config('JS_CODEGEN_ARM64', jit_codegen.arm64) -set_config('JS_CODEGEN_MIPS32', jit_codegen.mips32) -set_config('JS_CODEGEN_MIPS64', jit_codegen.mips64) -set_config('JS_CODEGEN_X86', jit_codegen.x86) -set_config('JS_CODEGEN_X64', jit_codegen.x64) -set_define('JS_CODEGEN_NONE', jit_codegen.none) -set_define('JS_CODEGEN_ARM', jit_codegen.arm) -set_define('JS_CODEGEN_ARM64', jit_codegen.arm64) -set_define('JS_CODEGEN_MIPS32', jit_codegen.mips32) -set_define('JS_CODEGEN_MIPS64', jit_codegen.mips64) -set_define('JS_CODEGEN_X86', jit_codegen.x86) -set_define('JS_CODEGEN_X64', jit_codegen.x64) + +set_config("JS_CODEGEN_NONE", jit_codegen.none) +set_config("JS_CODEGEN_ARM", jit_codegen.arm) +set_config("JS_CODEGEN_ARM64", jit_codegen.arm64) +set_config("JS_CODEGEN_MIPS32", jit_codegen.mips32) +set_config("JS_CODEGEN_MIPS64", jit_codegen.mips64) +set_config("JS_CODEGEN_X86", jit_codegen.x86) +set_config("JS_CODEGEN_X64", jit_codegen.x64) +set_define("JS_CODEGEN_NONE", jit_codegen.none) +set_define("JS_CODEGEN_ARM", jit_codegen.arm) +set_define("JS_CODEGEN_ARM64", jit_codegen.arm64) +set_define("JS_CODEGEN_MIPS32", jit_codegen.mips32) +set_define("JS_CODEGEN_MIPS64", jit_codegen.mips64) +set_define("JS_CODEGEN_X86", jit_codegen.x86) +set_define("JS_CODEGEN_X64", jit_codegen.x64) # Profiling # ======================================================= -option('--enable-instruments', env='MOZ_INSTRUMENTS', - help='Enable instruments remote profiling') +option( + "--enable-instruments", + env="MOZ_INSTRUMENTS", + help="Enable instruments remote profiling", +) + -@depends('--enable-instruments', target) +@depends("--enable-instruments", target) def instruments(value, target): - if value and target.os != 'OSX': - die('--enable-instruments cannot be used when targeting %s', - target.os) + if value and target.os != "OSX": + die("--enable-instruments cannot be used when targeting %s", target.os) if value: return True -set_config('MOZ_INSTRUMENTS', instruments) -set_define('MOZ_INSTRUMENTS', instruments) -add_old_configure_assignment('MOZ_INSTRUMENTS', instruments) -imply_option('--enable-profiling', instruments, reason='--enable-instruments') -option('--enable-callgrind', env='MOZ_CALLGRIND', - help='Enable callgrind profiling') +set_config("MOZ_INSTRUMENTS", instruments) +set_define("MOZ_INSTRUMENTS", instruments) +add_old_configure_assignment("MOZ_INSTRUMENTS", instruments) +imply_option("--enable-profiling", instruments, reason="--enable-instruments") + +option("--enable-callgrind", env="MOZ_CALLGRIND", help="Enable callgrind profiling") -@depends('--enable-callgrind') + +@depends("--enable-callgrind") def callgrind(value): if value: return True -set_define('MOZ_CALLGRIND', callgrind) -imply_option('--enable-profiling', callgrind) + +set_define("MOZ_CALLGRIND", callgrind) +imply_option("--enable-profiling", callgrind) + @depends(milestone) def enable_profiling(milestone): return milestone.is_nightly -option('--enable-profiling', env='MOZ_PROFILING', default=enable_profiling, - help='{Set|Do not set} compile flags necessary for using sampling ' - 'profilers (e.g. shark, perf)') -@depends('--enable-profiling') +option( + "--enable-profiling", + env="MOZ_PROFILING", + default=enable_profiling, + help="{Set|Do not set} compile flags necessary for using sampling " + "profilers (e.g. shark, perf)", +) + + +@depends("--enable-profiling") def profiling(value): if value: return True -add_old_configure_assignment('MOZ_PROFILING', profiling) -with only_when('--enable-compile-environment'): - imply_option('--enable-frame-pointers', True, when=profiling) +add_old_configure_assignment("MOZ_PROFILING", profiling) + +with only_when("--enable-compile-environment"): + imply_option("--enable-frame-pointers", True, when=profiling) @depends(profiling, target) def imply_vtune(value, target): - ok_cpu = target.cpu in ['x86', 'x86_64'] - ok_kernel = target.kernel == 'WINNT' or \ - (target.kernel == 'Linux' and target.os == 'GNU') + ok_cpu = target.cpu in ["x86", "x86_64"] + ok_kernel = target.kernel == "WINNT" or ( + target.kernel == "Linux" and target.os == "GNU" + ) if value and ok_cpu and ok_kernel: return True -set_config('MOZ_PROFILING', profiling) -set_define('MOZ_PROFILING', profiling) -imply_option('--enable-vtune', imply_vtune, reason='--enable-profiling') +set_config("MOZ_PROFILING", profiling) +set_define("MOZ_PROFILING", profiling) +imply_option("--enable-vtune", imply_vtune, reason="--enable-profiling") + + +option("--enable-vtune", env="MOZ_VTUNE", help="Enable VTune profiling") -option('--enable-vtune', env='MOZ_VTUNE', help='Enable VTune profiling') -@depends('--enable-vtune') +@depends("--enable-vtune") def vtune(value): if value: return True -set_config('MOZ_VTUNE', vtune) -set_define('MOZ_VTUNE', vtune) +set_config("MOZ_VTUNE", vtune) +set_define("MOZ_VTUNE", vtune) -option('--enable-gc-probes', env='JS_GC_PROBES', - help='Turn on probes for allocation and finalization') -@depends('--enable-gc-probes') +option( + "--enable-gc-probes", + env="JS_GC_PROBES", + help="Turn on probes for allocation and finalization", +) + + +@depends("--enable-gc-probes") def gc_probes(value): if value: return True -set_define('JS_GC_PROBES', gc_probes) + +set_define("JS_GC_PROBES", gc_probes) -option('--enable-gczeal', - default=depends(when=moz_debug)(lambda: True), - help='{Enable|Disable} zealous GCing') +option( + "--enable-gczeal", + default=depends(when=moz_debug)(lambda: True), + help="{Enable|Disable} zealous GCing", +) -set_define('JS_GC_ZEAL', - depends_if('--enable-gczeal')(lambda _: True)) +set_define("JS_GC_ZEAL", depends_if("--enable-gczeal")(lambda _: True)) # Use a smaller chunk size for GC chunks # ======================================================== # Use large (1MB) chunks by default. This option can be used to give # smaller (currently 256K) chunks. -option('--enable-small-chunk-size', - help='Allocate memory for JS GC things in smaller chunks') +option( + "--enable-small-chunk-size", + help="Allocate memory for JS GC things in smaller chunks", +) -set_define('JS_GC_SMALL_CHUNK_SIZE', - depends(when='--enable-small-chunk-size')(lambda: True)) +set_define( + "JS_GC_SMALL_CHUNK_SIZE", depends(when="--enable-small-chunk-size")(lambda: True) +) # Trace logging. @@ -305,104 +365,111 @@ def default_trace_logging(milestone): return milestone.is_nightly -option('--enable-trace-logging', - default=default_trace_logging, - help='{Enable|Disable} trace logging') +option( + "--enable-trace-logging", + default=default_trace_logging, + help="{Enable|Disable} trace logging", +) -set_config('ENABLE_TRACE_LOGGING', - depends_if('--enable-trace-logging')(lambda x: True)) -set_define('JS_TRACE_LOGGING', - depends_if('--enable-trace-logging')(lambda x: True)) +set_config("ENABLE_TRACE_LOGGING", depends_if("--enable-trace-logging")(lambda x: True)) +set_define("JS_TRACE_LOGGING", depends_if("--enable-trace-logging")(lambda x: True)) # Enable breakpoint for artificial OOMs # ======================================================= -option('--enable-oom-breakpoint', - help='Enable a breakpoint function for artificial OOMs') +option( + "--enable-oom-breakpoint", help="Enable a breakpoint function for artificial OOMs" +) -set_define('JS_OOM_BREAKPOINT', - depends_if('--enable-oom-breakpoint')(lambda _: True)) +set_define("JS_OOM_BREAKPOINT", depends_if("--enable-oom-breakpoint")(lambda _: True)) -option('--enable-perf', env='JS_ION_PERF', - help='Enable Linux perf integration') +option("--enable-perf", env="JS_ION_PERF", help="Enable Linux perf integration") -@depends('--enable-perf') + +@depends("--enable-perf") def ion_perf(value): if value: return True -set_define('JS_ION_PERF', ion_perf) + +set_define("JS_ION_PERF", ion_perf) -option('--enable-jitspew', - default=depends(when=moz_debug)(lambda: True), - help='{Enable|Disable} the Jit spew and IONFLAGS environment ' - 'variable') +option( + "--enable-jitspew", + default=depends(when=moz_debug)(lambda: True), + help="{Enable|Disable} the Jit spew and IONFLAGS environment " "variable", +) -set_define('JS_JITSPEW', - depends_if('--enable-jitspew')(lambda _: True)) -set_config('JS_JITSPEW', - depends_if('--enable-jitspew')(lambda _: True)) +set_define("JS_JITSPEW", depends_if("--enable-jitspew")(lambda _: True)) +set_config("JS_JITSPEW", depends_if("--enable-jitspew")(lambda _: True)) # Also enable the structured spewer -set_define('JS_STRUCTURED_SPEW', - depends_if('--enable-jitspew')(lambda _: True)) -set_config('JS_STRUCTURED_SPEW', - depends_if('--enable-jitspew')(lambda _: True)) +set_define("JS_STRUCTURED_SPEW", depends_if("--enable-jitspew")(lambda _: True)) +set_config("JS_STRUCTURED_SPEW", depends_if("--enable-jitspew")(lambda _: True)) + -@depends('--enable-jit', '--enable-jitspew', simulator, target, moz_debug) +@depends("--enable-jit", "--enable-jitspew", simulator, target, moz_debug) def jit_disasm_arm(jit_enabled, spew, simulator, target, debug): if not jit_enabled: return if simulator and (debug or spew): - if getattr(simulator, 'arm', None): + if getattr(simulator, "arm", None): return True - if target.cpu == 'arm' and (debug or spew): + if target.cpu == "arm" and (debug or spew): return True -set_config('JS_DISASM_ARM', jit_disasm_arm) -set_define('JS_DISASM_ARM', jit_disasm_arm) -@depends('--enable-jit', '--enable-jitspew', simulator, target, moz_debug) +set_config("JS_DISASM_ARM", jit_disasm_arm) +set_define("JS_DISASM_ARM", jit_disasm_arm) + + +@depends("--enable-jit", "--enable-jitspew", simulator, target, moz_debug) def jit_disasm_arm64(jit_enabled, spew, simulator, target, debug): if not jit_enabled: return if simulator and (debug or spew): - if getattr(simulator, 'arm64', None): + if getattr(simulator, "arm64", None): return True - if target.cpu == 'aarch64' and (debug or spew): + if target.cpu == "aarch64" and (debug or spew): return True -set_config('JS_DISASM_ARM64', jit_disasm_arm64) -set_define('JS_DISASM_ARM64', jit_disasm_arm64) + +set_config("JS_DISASM_ARM64", jit_disasm_arm64) +set_define("JS_DISASM_ARM64", jit_disasm_arm64) # When enabled, masm will generate assumeUnreachable calls that act as # assertions in the generated code. This option is worth disabling when you # have to track mutated values through the generated code, to avoid constantly # dumping registers on and off the stack. -option('--enable-masm-verbose', - default=depends(when=moz_debug)(lambda: True), - help='{Enable|Disable} MacroAssembler verbosity of generated code.') -set_define('JS_MASM_VERBOSE', - depends_if('--enable-masm-verbose')(lambda _: True)) -set_config('JS_MASM_VERBOSE', - depends_if('--enable-masm-verbose')(lambda _: True)) +option( + "--enable-masm-verbose", + default=depends(when=moz_debug)(lambda: True), + help="{Enable|Disable} MacroAssembler verbosity of generated code.", +) +set_define("JS_MASM_VERBOSE", depends_if("--enable-masm-verbose")(lambda _: True)) +set_config("JS_MASM_VERBOSE", depends_if("--enable-masm-verbose")(lambda _: True)) -option('--enable-more-deterministic', env='JS_MORE_DETERMINISTIC', - help='Enable changes that make the shell more deterministic') +option( + "--enable-more-deterministic", + env="JS_MORE_DETERMINISTIC", + help="Enable changes that make the shell more deterministic", +) -@depends('--enable-more-deterministic') + +@depends("--enable-more-deterministic") def more_deterministic(value): if value: return True -set_define('JS_MORE_DETERMINISTIC', more_deterministic) + +set_define("JS_MORE_DETERMINISTIC", more_deterministic) # CTypes @@ -411,116 +478,143 @@ set_define('JS_MORE_DETERMINISTIC', more_deterministic) def ctypes_default(js_standalone): return not js_standalone -option('--enable-ctypes', - default=ctypes_default, - help='{Enable|Disable} js-ctypes') -build_ctypes = depends_if('--enable-ctypes')(lambda _: True) +option("--enable-ctypes", default=ctypes_default, help="{Enable|Disable} js-ctypes") -set_config('BUILD_CTYPES', build_ctypes) -set_define('BUILD_CTYPES', build_ctypes) +build_ctypes = depends_if("--enable-ctypes")(lambda _: True) -set_config('JS_HAS_CTYPES', build_ctypes) -set_define('JS_HAS_CTYPES', build_ctypes) +set_config("BUILD_CTYPES", build_ctypes) +set_define("BUILD_CTYPES", build_ctypes) -@depends('--enable-ctypes', '--enable-compile-environment') +set_config("JS_HAS_CTYPES", build_ctypes) +set_define("JS_HAS_CTYPES", build_ctypes) + + +@depends("--enable-ctypes", "--enable-compile-environment") def ctypes_and_compile_environment(ctypes, compile_environment): return ctypes and compile_environment -include('ffi.configure', when=ctypes_and_compile_environment) + +include("ffi.configure", when=ctypes_and_compile_environment) # Enable pipeline operator # =================================================== -option('--enable-pipeline-operator', default=False, help='Enable pipeline operator') +option("--enable-pipeline-operator", default=False, help="Enable pipeline operator") -@depends('--enable-pipeline-operator') + +@depends("--enable-pipeline-operator") def enable_pipeline_operator(value): if value: return True -set_config('ENABLE_PIPELINE_OPERATOR', enable_pipeline_operator) -set_define('ENABLE_PIPELINE_OPERATOR', enable_pipeline_operator) + +set_config("ENABLE_PIPELINE_OPERATOR", enable_pipeline_operator) +set_define("ENABLE_PIPELINE_OPERATOR", enable_pipeline_operator) # SIMD acceleration for encoding_rs # ============================================================== -option('--enable-rust-simd', env='MOZ_RUST_SIMD', - help='Enable explicit SIMD in Rust code.') +option( + "--enable-rust-simd", env="MOZ_RUST_SIMD", help="Enable explicit SIMD in Rust code." +) + -@depends('--enable-rust-simd', target) +@depends("--enable-rust-simd", target) def rust_simd(value, target): # As of 2019-09-17, the simd-accel feature of encoding_rs has not # been properly set up outside aarch64, armv7, x86 and x86_64. - if target.cpu in ('aarch64', 'arm', 'x86', 'x86_64') and value: + if target.cpu in ("aarch64", "arm", "x86", "x86_64") and value: return True -set_config('MOZ_RUST_SIMD', rust_simd) -set_define('MOZ_RUST_SIMD', rust_simd) + +set_config("MOZ_RUST_SIMD", rust_simd) +set_define("MOZ_RUST_SIMD", rust_simd) # Support for wasm code generation with Cranelift # ============================================================== + @depends(milestone.is_nightly, target) def cranelift_default(is_nightly, target): - if is_nightly and (target.cpu == 'aarch64' or target.cpu == 'x86_64'): + if is_nightly and (target.cpu == "aarch64" or target.cpu == "x86_64"): return True -option('--enable-cranelift', - default=cranelift_default, - help='{Enable|Disable} Cranelift code generator for wasm') -set_config('ENABLE_WASM_CRANELIFT', depends_if('--enable-cranelift')(lambda x: True)) -set_define('ENABLE_WASM_CRANELIFT', depends_if('--enable-cranelift')(lambda x: True)) +option( + "--enable-cranelift", + default=cranelift_default, + help="{Enable|Disable} Cranelift code generator for wasm", +) + +set_config("ENABLE_WASM_CRANELIFT", depends_if("--enable-cranelift")(lambda x: True)) +set_define("ENABLE_WASM_CRANELIFT", depends_if("--enable-cranelift")(lambda x: True)) # Telemetry to measure compile time and generated-code runtime # ============================================================ -option('--enable-spidermonkey-telemetry', - default=milestone.is_nightly, - help='{Enable|Disable} performance telemetry for SpiderMonkey (e.g. compile and run times)') +option( + "--enable-spidermonkey-telemetry", + default=milestone.is_nightly, + help="{Enable|Disable} performance telemetry for SpiderMonkey (e.g. compile and run times)", +) -set_define('ENABLE_SPIDERMONKEY_TELEMETRY', depends_if('--enable-spidermonkey-telemetry')(lambda x: True)) +set_define( + "ENABLE_SPIDERMONKEY_TELEMETRY", + depends_if("--enable-spidermonkey-telemetry")(lambda x: True), +) # Support for debugging code generated by wasm backends # ===================================================== -option('--enable-wasm-codegen-debug', - default=depends(when=moz_debug)(lambda: True), - help='{Enable|Disable} debugging for wasm codegen') +option( + "--enable-wasm-codegen-debug", + default=depends(when=moz_debug)(lambda: True), + help="{Enable|Disable} debugging for wasm codegen", +) -set_config('WASM_CODEGEN_DEBUG', depends_if('--enable-wasm-codegen-debug')(lambda x: True)) -set_define('WASM_CODEGEN_DEBUG', depends_if('--enable-wasm-codegen-debug')(lambda x: True)) +set_config( + "WASM_CODEGEN_DEBUG", depends_if("--enable-wasm-codegen-debug")(lambda x: True) +) +set_define( + "WASM_CODEGEN_DEBUG", depends_if("--enable-wasm-codegen-debug")(lambda x: True) +) # Support for WebAssembly reference types. # ===================================================== -option('--disable-wasm-reftypes', - help='Disable WebAssembly reference types') +option("--disable-wasm-reftypes", help="Disable WebAssembly reference types") -@depends('--disable-wasm-reftypes') + +@depends("--disable-wasm-reftypes") def enable_wasm_reftypes(value): if value: return True -set_config('ENABLE_WASM_REFTYPES', enable_wasm_reftypes) -set_define('ENABLE_WASM_REFTYPES', enable_wasm_reftypes) + +set_config("ENABLE_WASM_REFTYPES", enable_wasm_reftypes) +set_define("ENABLE_WASM_REFTYPES", enable_wasm_reftypes) # Support for WebAssembly function-references. # =========================== -@depends(milestone.is_nightly, '--disable-wasm-reftypes') + +@depends(milestone.is_nightly, "--disable-wasm-reftypes") def default_wasm_function_references(is_nightly, reftypes): if is_nightly and reftypes: return True -option('--enable-wasm-function-references', - default=default_wasm_function_references, - help='{Enable|Disable} WebAssembly function-references') -@depends('--enable-wasm-function-references', '--disable-wasm-reftypes') +option( + "--enable-wasm-function-references", + default=default_wasm_function_references, + help="{Enable|Disable} WebAssembly function-references", +) + + +@depends("--enable-wasm-function-references", "--disable-wasm-reftypes") def wasm_function_references(value, reftypes): if not value: return @@ -528,24 +622,30 @@ def wasm_function_references(value, reftypes): if reftypes: return True - die('--enable-wasm-function-references only possible without --disable-wasm-reftypes') + die( + "--enable-wasm-function-references only possible without --disable-wasm-reftypes" + ) + -set_config('ENABLE_WASM_FUNCTION_REFERENCES', wasm_function_references) -set_define('ENABLE_WASM_FUNCTION_REFERENCES', wasm_function_references) +set_config("ENABLE_WASM_FUNCTION_REFERENCES", wasm_function_references) +set_define("ENABLE_WASM_FUNCTION_REFERENCES", wasm_function_references) # Support for WebAssembly GC. # =========================== -@depends(milestone.is_nightly, '--enable-wasm-function-references') + +@depends(milestone.is_nightly, "--enable-wasm-function-references") def default_wasm_gc(is_nightly, function_references): if is_nightly and function_references: return True -option('--enable-wasm-gc', - default=default_wasm_gc, - help='{Enable|Disable} WebAssembly GC') -@depends('--enable-wasm-gc', '--enable-wasm-function-references') +option( + "--enable-wasm-gc", default=default_wasm_gc, help="{Enable|Disable} WebAssembly GC" +) + + +@depends("--enable-wasm-gc", "--enable-wasm-function-references") def wasm_gc(value, function_references): if not value: return @@ -553,10 +653,11 @@ def wasm_gc(value, function_references): if function_references: return True - die('--enable-wasm-gc only possible with --enable-wasm-function-references') + die("--enable-wasm-gc only possible with --enable-wasm-function-references") -set_config('ENABLE_WASM_GC', wasm_gc) -set_define('ENABLE_WASM_GC', wasm_gc) + +set_config("ENABLE_WASM_GC", wasm_gc) +set_define("ENABLE_WASM_GC", wasm_gc) # Support for WebAssembly private ref types. @@ -564,33 +665,47 @@ set_define('ENABLE_WASM_GC', wasm_gc) # no typechecking at the JS/wasm boundary # =========================================================================== -@depends(milestone.is_nightly, '--enable-wasm-gc') + +@depends(milestone.is_nightly, "--enable-wasm-gc") def default_wasm_private_reftypes(is_nightly, gc): if gc and is_nightly: return True -option('--enable-wasm-private-reftypes', - default=default_wasm_private_reftypes, - help='{Enable|Disable} WebAssembly private reference types') -set_config('WASM_PRIVATE_REFTYPES', depends_if('--enable-wasm-private-reftypes')(lambda x: True)) -set_define('WASM_PRIVATE_REFTYPES', depends_if('--enable-wasm-private-reftypes')(lambda x: True)) +option( + "--enable-wasm-private-reftypes", + default=default_wasm_private_reftypes, + help="{Enable|Disable} WebAssembly private reference types", +) + +set_config( + "WASM_PRIVATE_REFTYPES", + depends_if("--enable-wasm-private-reftypes")(lambda x: True), +) +set_define( + "WASM_PRIVATE_REFTYPES", + depends_if("--enable-wasm-private-reftypes")(lambda x: True), +) # Support for the WebAssembly multi-value proposal. # Do not remove until Cranelift supports multi-value. # ===================================================== -option('--disable-wasm-multi-value', - help='Disable WebAssembly multi-value blocks and function calls') +option( + "--disable-wasm-multi-value", + help="Disable WebAssembly multi-value blocks and function calls", +) -@depends('--disable-wasm-multi-value') + +@depends("--disable-wasm-multi-value") def enable_wasm_multi_value(value): if value: return True -set_config('ENABLE_WASM_MULTI_VALUE', enable_wasm_multi_value) -set_define('ENABLE_WASM_MULTI_VALUE', enable_wasm_multi_value) + +set_config("ENABLE_WASM_MULTI_VALUE", enable_wasm_multi_value) +set_define("ENABLE_WASM_MULTI_VALUE", enable_wasm_multi_value) # Support for WebAssembly shared memory and atomics. @@ -600,15 +715,19 @@ set_define('ENABLE_WASM_MULTI_VALUE', enable_wasm_multi_value) # and atomics it can go away. # ===================================================== -option('--disable-shared-memory', help='Disable JS/WebAssembly shared memory and atomics') +option( + "--disable-shared-memory", help="Disable JS/WebAssembly shared memory and atomics" +) -@depends('--disable-shared-memory') + +@depends("--disable-shared-memory") def enable_shared_memory(value): if value: return True -set_config('ENABLE_SHARED_MEMORY', enable_shared_memory) -set_define('ENABLE_SHARED_MEMORY', enable_shared_memory) + +set_config("ENABLE_SHARED_MEMORY", enable_shared_memory) +set_define("ENABLE_SHARED_MEMORY", enable_shared_memory) # Support for WebAssembly SIMD @@ -618,47 +737,58 @@ set_define('ENABLE_SHARED_MEMORY', enable_shared_memory) # and not properly supported by Cranelift, but can be enabled so as # to be tested with baseline. -@depends('--enable-jit', '--enable-simulator', target) + +@depends("--enable-jit", "--enable-simulator", target) def default_wasm_simd(jit_enabled, simulator, target): if not jit_enabled or simulator: return - if target.cpu in ('x86_64', 'x86'): + if target.cpu in ("x86_64", "x86"): return True -option('--enable-wasm-simd', - default=default_wasm_simd, - help='{Enable|Disable} WebAssembly SIMD') -@depends('--enable-wasm-simd', '--enable-jit', '--enable-simulator', target) +option( + "--enable-wasm-simd", + default=default_wasm_simd, + help="{Enable|Disable} WebAssembly SIMD", +) + + +@depends("--enable-wasm-simd", "--enable-jit", "--enable-simulator", target) def wasm_simd(value, jit_enabled, simulator, target): if not value: return if jit_enabled and not simulator: - if target.cpu in ('x86_64', 'x86', 'aarch64'): + if target.cpu in ("x86_64", "x86", "aarch64"): return True - if jit_enabled and simulator and simulator[0] == 'arm64': + if jit_enabled and simulator and simulator[0] == "arm64": return True - die('--enable-wasm-simd only possible when targeting the x86_64/x86/arm64 jits') + die("--enable-wasm-simd only possible when targeting the x86_64/x86/arm64 jits") + -set_config('ENABLE_WASM_SIMD', wasm_simd) -set_define('ENABLE_WASM_SIMD', wasm_simd) +set_config("ENABLE_WASM_SIMD", wasm_simd) +set_define("ENABLE_WASM_SIMD", wasm_simd) # Experimental SIMD opcodes are Nightly-only by default -@depends(milestone.is_nightly, '--enable-wasm-simd') + +@depends(milestone.is_nightly, "--enable-wasm-simd") def default_wasm_simd_experimental(is_nightly, wasm_simd): if is_nightly and wasm_simd: return True -option('--enable-wasm-simd-experimental', - default=default_wasm_simd_experimental, - help='{Enable|Disable} WebAssembly SIMD experimental opcodes') -@depends('--enable-wasm-simd-experimental', '--enable-wasm-simd') +option( + "--enable-wasm-simd-experimental", + default=default_wasm_simd_experimental, + help="{Enable|Disable} WebAssembly SIMD experimental opcodes", +) + + +@depends("--enable-wasm-simd-experimental", "--enable-wasm-simd") def wasm_simd_experimental(value, wasm_simd): if not value: return @@ -666,134 +796,176 @@ def wasm_simd_experimental(value, wasm_simd): if wasm_simd: return True - die('--enable-wasm-simd-experimental only possible with --enable-wasm-simd') + die("--enable-wasm-simd-experimental only possible with --enable-wasm-simd") -set_config('ENABLE_WASM_SIMD_EXPERIMENTAL', wasm_simd_experimental) -set_define('ENABLE_WASM_SIMD_EXPERIMENTAL', wasm_simd_experimental) + +set_config("ENABLE_WASM_SIMD_EXPERIMENTAL", wasm_simd_experimental) +set_define("ENABLE_WASM_SIMD_EXPERIMENTAL", wasm_simd_experimental) # Options for generating the shell as a script # ============================================ -option('--with-qemu-exe', nargs=1, help='Use path as an arm emulator on host platforms') -set_config('QEMU_EXE', depends_if('--with-qemu-exe')(lambda x: x)) +option("--with-qemu-exe", nargs=1, help="Use path as an arm emulator on host platforms") +set_config("QEMU_EXE", depends_if("--with-qemu-exe")(lambda x: x)) -option('--with-cross-lib', nargs=1, default=depends(target.alias)(lambda x: '/usr/%s' % x), - help='Use dir as the location for arm libraries') -set_config('CROSS_LIB', depends_if('--with-cross-lib')(lambda x: x)) +option( + "--with-cross-lib", + nargs=1, + default=depends(target.alias)(lambda x: "/usr/%s" % x), + help="Use dir as the location for arm libraries", +) +set_config("CROSS_LIB", depends_if("--with-cross-lib")(lambda x: x)) # Enable static checking using sixgill # ==================================== -option('--with-sixgill', nargs=1, help='Enable static checking of code using sixgill') +option("--with-sixgill", nargs=1, help="Enable static checking of code using sixgill") + -@depends_if('--with-sixgill') -@imports('os') +@depends_if("--with-sixgill") +@imports("os") def sixgill(value): - for f in ('bin/xdbfind', 'gcc/xgill.so', 'scripts/wrap_gcc/g++'): + for f in ("bin/xdbfind", "gcc/xgill.so", "scripts/wrap_gcc/g++"): if not os.path.exists(os.path.join(value[0], f)): - die('The sixgill plugin and binaries are not at the specified path') + die("The sixgill plugin and binaries are not at the specified path") return value[0] -set_config('SIXGILL_PATH', sixgill) + +set_config("SIXGILL_PATH", sixgill) # Support for readline # ===================================================== -@depends('--enable-js-shell', target_is_windows, compile_environment) +@depends("--enable-js-shell", target_is_windows, compile_environment) def editline(js_shell, is_windows, compile_environment): return js_shell and not is_windows and compile_environment -option('--enable-readline', help='Link js shell to system readline library', - when=editline) -has_readline = check_symbol('readline', flags=['-lreadline'], when='--enable-readline', - onerror=lambda: die('No system readline library found')) +option( + "--enable-readline", help="Link js shell to system readline library", when=editline +) + +has_readline = check_symbol( + "readline", + flags=["-lreadline"], + when="--enable-readline", + onerror=lambda: die("No system readline library found"), +) + +set_config("EDITLINE_LIBS", ["-lreadline"], when=has_readline) -set_config('EDITLINE_LIBS', ['-lreadline'], when=has_readline) -@depends('--enable-readline', editline, when=editline) +@depends("--enable-readline", editline, when=editline) def bundled_editline(readline, editline): return editline and not readline -set_config('JS_BUNDLED_EDITLINE', bundled_editline) -set_define('EDITLINE', True, when=editline) +set_config("JS_BUNDLED_EDITLINE", bundled_editline) + +set_define("EDITLINE", True, when=editline) # JIT observers # ============= -option('--with-jitreport-granularity', default='3', choices=('0', '1', '2', '3'), - help='Default granularity at which to report JIT code to external tools ' - '(0 - no info, 1 - code ranges for while functions only, ' - '2 - per-line information, 3 - per-op information)') +option( + "--with-jitreport-granularity", + default="3", + choices=("0", "1", "2", "3"), + help="Default granularity at which to report JIT code to external tools " + "(0 - no info, 1 - code ranges for while functions only, " + "2 - per-line information, 3 - per-op information)", +) -set_define('JS_DEFAULT_JITREPORT_GRANULARITY', - depends_if('--with-jitreport-granularity')(lambda value: value[0])) +set_define( + "JS_DEFAULT_JITREPORT_GRANULARITY", + depends_if("--with-jitreport-granularity")(lambda value: value[0]), +) # ECMAScript Internationalization API Support (uses ICU) # ====================================================== -option('--with-system-icu', help='Use system ICU') +option("--with-system-icu", help="Use system ICU") + +system_icu = pkg_check_modules("MOZ_ICU", "icu-i18n >= 67.1", when="--with-system-icu") -system_icu = pkg_check_modules('MOZ_ICU', 'icu-i18n >= 67.1', when='--with-system-icu') +set_config("MOZ_SYSTEM_ICU", True, when=system_icu) +set_define("MOZ_SYSTEM_ICU", True, when=system_icu) -set_config('MOZ_SYSTEM_ICU', True, when=system_icu) -set_define('MOZ_SYSTEM_ICU', True, when=system_icu) +option("--without-intl-api", help="Disable ECMAScript Internationalization API") -option('--without-intl-api', help='Disable ECMAScript Internationalization API') -@depends('--with-intl-api', js_standalone) +@depends("--with-intl-api", js_standalone) def check_intl_api(enabled, js_standalone): if not enabled and not js_standalone: - die('--without-intl-api is not supported') + die("--without-intl-api is not supported") + -set_config('JS_HAS_INTL_API', True, when='--with-intl-api') -set_define('JS_HAS_INTL_API', True, when='--with-intl-api') +set_config("JS_HAS_INTL_API", True, when="--with-intl-api") +set_define("JS_HAS_INTL_API", True, when="--with-intl-api") -@depends(check_build_environment, when='--with-intl-api') -@imports(_from='__builtin__', _import='open') -@imports(_from='__builtin__', _import='ValueError') + +@depends(check_build_environment, when="--with-intl-api") +@imports(_from="__builtin__", _import="open") +@imports(_from="__builtin__", _import="ValueError") def icu_version(build_env): - path = os.path.join(build_env.topsrcdir, 'intl', 'icu', 'source', 'common', - 'unicode', 'uvernum.h') - with open(path, encoding='utf-8') as fh: + path = os.path.join( + build_env.topsrcdir, "intl", "icu", "source", "common", "unicode", "uvernum.h" + ) + with open(path, encoding="utf-8") as fh: for line in fh: - if line.startswith('#define'): + if line.startswith("#define"): define = line.split(None, 3) - if len(define) == 3 and define[1] == 'U_ICU_VERSION_MAJOR_NUM': + if len(define) == 3 and define[1] == "U_ICU_VERSION_MAJOR_NUM": try: return str(int(define[2])) except ValueError: pass - die('Cannot determine ICU version number from uvernum.h header file') + die("Cannot determine ICU version number from uvernum.h header file") + -set_config('MOZ_ICU_VERSION', icu_version) +set_config("MOZ_ICU_VERSION", icu_version) # Source files that use ICU should have control over which parts of the ICU # namespace they want to use. -set_define('U_USING_ICU_NAMESPACE', '0', when='--with-intl-api') +set_define("U_USING_ICU_NAMESPACE", "0", when="--with-intl-api") # We build ICU as a static library. -set_define('U_STATIC_IMPLEMENTATION', True, when=depends(system_icu)(lambda x: not x)) +set_define("U_STATIC_IMPLEMENTATION", True, when=depends(system_icu)(lambda x: not x)) + @depends(yasm, gnu_as, target, compile_environment) def can_build_data_file(yasm, gnu_as, target, compile_environment): - if not compile_environment or (target.kernel == 'WINNT' and target.cpu == 'aarch64'): + if not compile_environment or ( + target.kernel == "WINNT" and target.cpu == "aarch64" + ): return if not yasm and not gnu_as: - die('Building ICU requires either yasm or a GNU assembler. If you do not have ' - 'either of those available for this platform you must use --without-intl-api') + die( + "Building ICU requires either yasm or a GNU assembler. If you do not have " + "either of those available for this platform you must use --without-intl-api" + ) + # Initial support for WebAssembly JS-API Type Reflections # ======================================================= + @depends(milestone.is_nightly) def default_wasm_type_reflections(is_nightly): return is_nightly -option('--enable-wasm-type-reflections', - default=default_wasm_type_reflections, - help='{Enable|Disable} type reflection in WASM JS-API') -set_config('ENABLE_WASM_TYPE_REFLECTIONS', depends_if('--enable-wasm-type-reflections')(lambda x: True)) -set_define('ENABLE_WASM_TYPE_REFLECTIONS', depends_if('--enable-wasm-type-reflections')(lambda x: True)) +option( + "--enable-wasm-type-reflections", + default=default_wasm_type_reflections, + help="{Enable|Disable} type reflection in WASM JS-API", +) + +set_config( + "ENABLE_WASM_TYPE_REFLECTIONS", + depends_if("--enable-wasm-type-reflections")(lambda x: True), +) +set_define( + "ENABLE_WASM_TYPE_REFLECTIONS", + depends_if("--enable-wasm-type-reflections")(lambda x: True), +) diff --git a/js/src/build/moz.build b/js/src/build/moz.build index b311e9549fedb5..3f5dce5f73ce46 100644 --- a/js/src/build/moz.build +++ b/js/src/build/moz.build @@ -5,14 +5,14 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. CONFIGURE_SUBST_FILES += [ - 'js-config', - 'js.pc', + "js-config", + "js.pc", ] -LIBRARY_DEFINES['EXPORT_JS_API'] = True +LIBRARY_DEFINES["EXPORT_JS_API"] = True -if not CONFIG['JS_STANDALONE']: - LIBRARY_DEFINES['MOZ_HAS_MOZGLUE'] = True +if not CONFIG["JS_STANDALONE"]: + LIBRARY_DEFINES["MOZ_HAS_MOZGLUE"] = True # JavaScript must be built shared, even for static builds, as it is used by # other modules which are always built shared. Failure to do so results in @@ -22,60 +22,63 @@ if not CONFIG['JS_STANDALONE']: # In fact, we now build both a static and a shared library, as the # JS shell would like to link to the static library. -if CONFIG['JS_SHARED_LIBRARY']: - GeckoSharedLibrary('js', linkage=None) - SHARED_LIBRARY_NAME = CONFIG['JS_LIBRARY_NAME'] +if CONFIG["JS_SHARED_LIBRARY"]: + GeckoSharedLibrary("js", linkage=None) + SHARED_LIBRARY_NAME = CONFIG["JS_LIBRARY_NAME"] # Ensure symbol versions of shared library on Linux do not conflict # with those in libxul. - if CONFIG['OS_TARGET'] == 'Linux': - GeneratedFile('symverscript', script='/build/gen_symverscript.py', - inputs=['symverscript.in'], - flags=[CONFIG['JS_LIBRARY_NAME'].replace('-', '_')]) - SYMBOLS_FILE = '!symverscript' + if CONFIG["OS_TARGET"] == "Linux": + GeneratedFile( + "symverscript", + script="/build/gen_symverscript.py", + inputs=["symverscript.in"], + flags=[CONFIG["JS_LIBRARY_NAME"].replace("-", "_")], + ) + SYMBOLS_FILE = "!symverscript" else: - Library('js') + Library("js") FORCE_STATIC_LIB = True -STATIC_LIBRARY_NAME = 'js_static' +STATIC_LIBRARY_NAME = "js_static" -if CONFIG['JS_HAS_INTL_API']: +if CONFIG["JS_HAS_INTL_API"]: USE_LIBS += [ - 'icu', + "icu", ] USE_LIBS += [ - 'nspr', - 'zlib', + "nspr", + "zlib", ] -if CONFIG['OS_ARCH'] not in ('WINNT', 'HP-UX'): +if CONFIG["OS_ARCH"] not in ("WINNT", "HP-UX"): OS_LIBS += [ - 'm', + "m", ] -if CONFIG['OS_ARCH'] == 'FreeBSD': +if CONFIG["OS_ARCH"] == "FreeBSD": OS_LIBS += [ - '-pthread', + "-pthread", ] -if CONFIG['OS_ARCH'] == 'Linux': +if CONFIG["OS_ARCH"] == "Linux": OS_LIBS += [ - 'dl', + "dl", ] -if CONFIG['OS_ARCH'] == 'SunOS': +if CONFIG["OS_ARCH"] == "SunOS": OS_LIBS += [ - 'posix4', - 'dl', - 'nsl', - 'socket', + "posix4", + "dl", + "nsl", + "socket", ] -if CONFIG['MOZ_NEEDS_LIBATOMIC']: - OS_LIBS += ['atomic'] +if CONFIG["MOZ_NEEDS_LIBATOMIC"]: + OS_LIBS += ["atomic"] -OS_LIBS += CONFIG['REALTIME_LIBS'] +OS_LIBS += CONFIG["REALTIME_LIBS"] NO_EXPAND_LIBS = True @@ -84,10 +87,12 @@ DIST_INSTALL = True # Run SpiderMonkey style checker after linking the static library. This avoids # running the script for no-op builds. GeneratedFile( - 'spidermonkey_checks', script='/config/run_spidermonkey_checks.py', + "spidermonkey_checks", + script="/config/run_spidermonkey_checks.py", inputs=[ - '!%sjs_static.%s' % (CONFIG['LIB_PREFIX'], CONFIG['LIB_SUFFIX']), - '/config/check_spidermonkey_style.py', - '/config/check_macroassembler_style.py', - '/config/check_js_opcode.py' - ]) + "!%sjs_static.%s" % (CONFIG["LIB_PREFIX"], CONFIG["LIB_SUFFIX"]), + "/config/check_spidermonkey_style.py", + "/config/check_macroassembler_style.py", + "/config/check_js_opcode.py", + ], +) diff --git a/js/src/builtin/embedjs.py b/js/src/builtin/embedjs.py index 3c56b69ce56609..abe9e9344e751c 100644 --- a/js/src/builtin/embedjs.py +++ b/js/src/builtin/embedjs.py @@ -90,33 +90,48 @@ def ToCArray(lines): """ # NOQA: E501 -def embed(cxx, preprocessorOption, cppflags, msgs, sources, c_out, js_out, namespace, env): +def embed( + cxx, preprocessorOption, cppflags, msgs, sources, c_out, js_out, namespace, env +): objdir = os.getcwd() # Use relative pathnames to avoid path translation issues in WSL. - combinedSources = '\n'.join([msgs] + ['#include "%(s)s"' % - {'s': mozpath.relpath(source, objdir)} - for source in sources]) - args = cppflags + ['-D%(k)s=%(v)s' % {'k': k, 'v': env[k]} for k in env] + combinedSources = "\n".join( + [msgs] + + [ + '#include "%(s)s"' % {"s": mozpath.relpath(source, objdir)} + for source in sources + ] + ) + args = cppflags + ["-D%(k)s=%(v)s" % {"k": k, "v": env[k]} for k in env] preprocessed = preprocess(cxx, preprocessorOption, combinedSources, args) - processed = '\n'.join([line for line in preprocessed.splitlines() if - (line.strip() and not line.startswith('#'))]) + processed = "\n".join( + [ + line + for line in preprocessed.splitlines() + if (line.strip() and not line.startswith("#")) + ] + ) js_out.write(processed) import zlib + compressed = zlib.compress(ensure_bytes(processed)) data = ToCArray(compressed) - c_out.write(HEADER_TEMPLATE % { - 'sources_type': 'unsigned char', - 'sources_data': data, - 'sources_name': 'compressedSources', - 'compressed_total_length': len(compressed), - 'raw_total_length': len(processed), - 'namespace': namespace - }) + c_out.write( + HEADER_TEMPLATE + % { + "sources_type": "unsigned char", + "sources_data": data, + "sources_name": "compressedSources", + "compressed_total_length": len(compressed), + "raw_total_length": len(processed), + "namespace": namespace, + } + ) def preprocess(cxx, preprocessorOption, source, args=[]): - if (not os.path.exists(cxx[0])): + if not os.path.exists(cxx[0]): binary = cxx[0] cxx[0] = which(binary) if not cxx[0]: @@ -124,17 +139,17 @@ def preprocess(cxx, preprocessorOption, source, args=[]): # Clang seems to complain and not output anything if the extension of the # input is not something it recognizes, so just fake a .cpp here. - tmpIn = 'self-hosting-cpp-input.cpp' - tmpOut = 'self-hosting-preprocessed.pp' + tmpIn = "self-hosting-cpp-input.cpp" + tmpOut = "self-hosting-preprocessed.pp" outputArg = shlex.split(preprocessorOption + tmpOut) - with open(tmpIn, 'wb') as input: + with open(tmpIn, "wb") as input: input.write(ensure_bytes(source)) - print(' '.join(cxx + outputArg + args + [tmpIn])) + print(" ".join(cxx + outputArg + args + [tmpIn])) result = subprocess.Popen(cxx + outputArg + args + [tmpIn]).wait() - if (result != 0): + if result != 0: sys.exit(result) - with open(tmpOut, 'r') as output: + with open(tmpOut, "r") as output: processed = output.read() os.remove(tmpIn) os.remove(tmpOut) @@ -150,37 +165,39 @@ def messages(jsmsg): else: # Make sure that MSG_DEF isn't preceded by whitespace assert not line.strip().startswith("MSG_DEF") - return '\n'.join(defines) + return "\n".join(defines) def get_config_defines(buildconfig): # Collect defines equivalent to ACDEFINES and add MOZ_DEBUG_DEFINES. - env = buildconfig.defines['ALLDEFINES'] - for define in buildconfig.substs['MOZ_DEBUG_DEFINES']: + env = buildconfig.defines["ALLDEFINES"] + for define in buildconfig.substs["MOZ_DEBUG_DEFINES"]: env[define] = 1 return env def process_inputs(namespace, c_out, msg_file, inputs): deps = [path for path in inputs if path.endswith(".h") or path.endswith(".h.js")] - sources = [path for path in inputs if path.endswith(".js") and not path.endswith(".h.js")] + sources = [ + path for path in inputs if path.endswith(".js") and not path.endswith(".h.js") + ] assert len(deps) + len(sources) == len(inputs) - cxx = shlex.split(buildconfig.substs['CXX']) - pp_option = buildconfig.substs['PREPROCESS_OPTION'] - cppflags = buildconfig.substs['OS_CPPFLAGS'] - cppflags += shlex.split(buildconfig.substs['WARNINGS_AS_ERRORS']) + cxx = shlex.split(buildconfig.substs["CXX"]) + pp_option = buildconfig.substs["PREPROCESS_OPTION"] + cppflags = buildconfig.substs["OS_CPPFLAGS"] + cppflags += shlex.split(buildconfig.substs["WARNINGS_AS_ERRORS"]) env = get_config_defines(buildconfig) js_path = re.sub(r"\.out\.h$", "", c_out.name) + ".js" msgs = messages(msg_file) - with open(js_path, 'w') as js_out: + with open(js_path, "w") as js_out: embed(cxx, pp_option, cppflags, msgs, sources, c_out, js_out, namespace, env) def generate_selfhosted(c_out, msg_file, *inputs): # Called from moz.build to embed selfhosted JS. - process_inputs('selfhosted', c_out, msg_file, inputs) + process_inputs("selfhosted", c_out, msg_file, inputs) def generate_shellmoduleloader(c_out, msg_file, *inputs): # Called from moz.build to embed shell module loader JS. - process_inputs('moduleloader', c_out, msg_file, inputs) + process_inputs("moduleloader", c_out, msg_file, inputs) diff --git a/js/src/builtin/intl/make_intl_data.py b/js/src/builtin/intl/make_intl_data.py index 44392785dea254..6f59d1cd345cbc 100755 --- a/js/src/builtin/intl/make_intl_data.py +++ b/js/src/builtin/intl/make_intl_data.py @@ -56,8 +56,12 @@ from zipfile import ZipFile if sys.version_info.major == 2: - from itertools import ifilter as filter, ifilterfalse as filterfalse, imap as map,\ - izip_longest as zip_longest + from itertools import ( + ifilter as filter, + ifilterfalse as filterfalse, + imap as map, + izip_longest as zip_longest, + ) from urllib2 import urlopen, Request as UrlRequest from urlparse import urlsplit else: @@ -84,11 +88,11 @@ def writeMappingHeader(println, description, source, url): def writeMappingsVar(println, mapping, name, description, source, url): - """ Writes a variable definition with a mapping table. + """Writes a variable definition with a mapping table. - Writes the contents of dictionary |mapping| through the |println| - function with the given variable name and a comment with description, - fileDate, and URL. + Writes the contents of dictionary |mapping| through the |println| + function with the given variable name and a comment with description, + fileDate, and URL. """ println(u"") writeMappingHeader(println, description, source, url) @@ -98,32 +102,53 @@ def writeMappingsVar(println, mapping, name, description, source, url): println(u"};") -def writeMappingsBinarySearch(println, fn_name, type_name, name, validate_fn, validate_case_fn, - mappings, tag_maxlength, description, source, url): - """ Emit code to perform a binary search on language tag subtags. - - Uses the contents of |mapping|, which can either be a dictionary or set, - to emit a mapping function to find subtag replacements. +def writeMappingsBinarySearch( + println, + fn_name, + type_name, + name, + validate_fn, + validate_case_fn, + mappings, + tag_maxlength, + description, + source, + url, +): + """Emit code to perform a binary search on language tag subtags. + + Uses the contents of |mapping|, which can either be a dictionary or set, + to emit a mapping function to find subtag replacements. """ println(u"") writeMappingHeader(println, description, source, url) - println(u""" + println( + u""" bool js::intl::LanguageTag::{0}({1} {2}) {{ MOZ_ASSERT({3}({2}.span())); MOZ_ASSERT({4}({2}.span())); -""".format(fn_name, type_name, name, validate_fn, validate_case_fn).strip()) +""".format( + fn_name, type_name, name, validate_fn, validate_case_fn + ).strip() + ) def write_array(subtags, name, length, fixed): if fixed: - println(u" static const char {}[{}][{}] = {{".format(name, len(subtags), - length + 1)) + println( + u" static const char {}[{}][{}] = {{".format( + name, len(subtags), length + 1 + ) + ) else: println(u" static const char* {}[{}] = {{".format(name, len(subtags))) # Group in pairs of ten to not exceed the 80 line column limit. for entries in grouper(subtags, 10): - entries = (u"\"{}\"".format(tag).rjust(length + 2) - for tag in entries if tag is not None) + entries = ( + u'"{}"'.format(tag).rjust(length + 2) + for tag in entries + if tag is not None + ) println(u" {},".format(u", ".join(entries))) println(u" };") @@ -137,14 +162,24 @@ def write_array(subtags, name, length, fixed): for (length, subtags) in groupby(sorted(mappings_keys, key=len), len): # Omit the length check if the current length is the maximum length. if length != tag_maxlength: - println(u""" + println( + u""" if ({}.length() == {}) {{ -""".format(name, length).rstrip("\n")) +""".format( + name, length + ).rstrip( + "\n" + ) + ) else: trailing_return = False - println(u""" + println( + u""" { -""".rstrip("\n")) +""".rstrip( + "\n" + ) + ) # The subtags need to be sorted for binary search to work. subtags = sorted(subtags) @@ -155,80 +190,129 @@ def equals(subtag): # Don't emit a binary search for short lists. if len(subtags) == 1: if type(mappings) == dict: - println(u""" + println( + u""" if ({}) {{ {}.set("{}"); return true; }} return false; -""".format(equals(subtags[0]), name, mappings[subtags[0]]).strip("\n")) +""".format( + equals(subtags[0]), name, mappings[subtags[0]] + ).strip( + "\n" + ) + ) else: - println(u""" + println( + u""" return {}; -""".format(equals(subtags[0])).strip("\n")) +""".format( + equals(subtags[0]) + ).strip( + "\n" + ) + ) elif len(subtags) <= 4: if type(mappings) == dict: for subtag in subtags: - println(u""" + println( + u""" if ({}) {{ {}.set("{}"); return true; }} -""".format(equals(subtag), name, mappings[subtag]).strip("\n")) - - println(u""" +""".format( + equals(subtag), name, mappings[subtag] + ).strip( + "\n" + ) + ) + + println( + u""" return false; -""".strip("\n")) +""".strip( + "\n" + ) + ) else: cond = (equals(subtag) for subtag in subtags) cond = (u" ||\n" + u" " * (4 + len("return "))).join(cond) - println(u""" + println( + u""" return {}; -""".format(cond).strip("\n")) +""".format( + cond + ).strip( + "\n" + ) + ) else: write_array(subtags, name + "s", length, True) if type(mappings) == dict: write_array([mappings[k] for k in subtags], u"aliases", length, False) - println(u""" + println( + u""" if (const char* replacement = SearchReplacement({0}s, aliases, {0})) {{ {0}.set(mozilla::MakeStringSpan(replacement)); return true; }} return false; -""".format(name).rstrip()) +""".format( + name + ).rstrip() + ) else: - println(u""" + println( + u""" return HasReplacement({0}s, {0}); -""".format(name).rstrip()) +""".format( + name + ).rstrip() + ) - println(u""" + println( + u""" } -""".strip("\n")) +""".strip( + "\n" + ) + ) if trailing_return: - println(u""" - return false;""") - - println(u""" -}""".lstrip("\n")) + println( + u""" + return false;""" + ) + + println( + u""" +}""".lstrip( + "\n" + ) + ) -def writeComplexLanguageTagMappings(println, complex_language_mappings, - description, source, url): +def writeComplexLanguageTagMappings( + println, complex_language_mappings, description, source, url +): println(u"") writeMappingHeader(println, description, source, url) - println(u""" + println( + u""" void js::intl::LanguageTag::performComplexLanguageMappings() { MOZ_ASSERT(IsStructurallyValidLanguageTag(language().span())); MOZ_ASSERT(IsCanonicallyCasedLanguageTag(language().span())); -""".lstrip()) +""".lstrip() + ) # Merge duplicate language entries. language_aliases = {} - for (deprecated_language, (language, script, region)) in ( - sorted(complex_language_mappings.items(), key=itemgetter(0)) + for (deprecated_language, (language, script, region)) in sorted( + complex_language_mappings.items(), key=itemgetter(0) ): key = (language, script, region) if key not in language_aliases: @@ -237,8 +321,8 @@ def writeComplexLanguageTagMappings(println, complex_language_mappings, language_aliases[key].append(deprecated_language) first_language = True - for (deprecated_language, (language, script, region)) in ( - sorted(complex_language_mappings.items(), key=itemgetter(0)) + for (deprecated_language, (language, script, region)) in sorted( + complex_language_mappings.items(), key=itemgetter(0) ): key = (language, script, region) if deprecated_language in language_aliases[key]: @@ -247,45 +331,82 @@ def writeComplexLanguageTagMappings(println, complex_language_mappings, if_kind = u"if" if first_language else u"else if" first_language = False - cond = (u"language().equalTo(\"{}\")".format(lang) - for lang in [deprecated_language] + language_aliases[key]) + cond = ( + u'language().equalTo("{}")'.format(lang) + for lang in [deprecated_language] + language_aliases[key] + ) cond = (u" ||\n" + u" " * (2 + len(if_kind) + 2)).join(cond) - println(u""" - {} ({}) {{""".format(if_kind, cond).strip("\n")) - - println(u""" - setLanguage("{}");""".format(language).strip("\n")) + println( + u""" + {} ({}) {{""".format( + if_kind, cond + ).strip( + "\n" + ) + ) + + println( + u""" + setLanguage("{}");""".format( + language + ).strip( + "\n" + ) + ) if script is not None: - println(u""" + println( + u""" if (script().missing()) {{ setScript("{}"); - }}""".format(script).strip("\n")) + }}""".format( + script + ).strip( + "\n" + ) + ) if region is not None: - println(u""" + println( + u""" if (region().missing()) {{ setRegion("{}"); - }}""".format(region).strip("\n")) - println(u""" - }""".strip("\n")) - - println(u""" + }}""".format( + region + ).strip( + "\n" + ) + ) + println( + u""" + }""".strip( + "\n" + ) + ) + + println( + u""" } -""".strip("\n")) +""".strip( + "\n" + ) + ) -def writeComplexRegionTagMappings(println, complex_region_mappings, - description, source, url): +def writeComplexRegionTagMappings( + println, complex_region_mappings, description, source, url +): println(u"") writeMappingHeader(println, description, source, url) - println(u""" + println( + u""" void js::intl::LanguageTag::performComplexRegionMappings() { MOZ_ASSERT(IsStructurallyValidLanguageTag(language().span())); MOZ_ASSERT(IsCanonicallyCasedLanguageTag(language().span())); MOZ_ASSERT(IsStructurallyValidRegionTag(region().span())); MOZ_ASSERT(IsCanonicallyCasedRegionTag(region().span())); -""".lstrip()) +""".lstrip() + ) # |non_default_replacements| is a list and hence not hashable. Convert it # to a string to get a proper hashable value. @@ -294,8 +415,8 @@ def hash_key(default, non_default_replacements): # Merge duplicate region entries. region_aliases = {} - for (deprecated_region, (default, non_default_replacements)) in ( - sorted(complex_region_mappings.items(), key=itemgetter(0)) + for (deprecated_region, (default, non_default_replacements)) in sorted( + complex_region_mappings.items(), key=itemgetter(0) ): key = hash_key(default, non_default_replacements) if key not in region_aliases: @@ -304,8 +425,8 @@ def hash_key(default, non_default_replacements): region_aliases[key].append(deprecated_region) first_region = True - for (deprecated_region, (default, non_default_replacements)) in ( - sorted(complex_region_mappings.items(), key=itemgetter(0)) + for (deprecated_region, (default, non_default_replacements)) in sorted( + complex_region_mappings.items(), key=itemgetter(0) ): key = hash_key(default, non_default_replacements) if deprecated_region in region_aliases[key]: @@ -314,56 +435,85 @@ def hash_key(default, non_default_replacements): if_kind = u"if" if first_region else u"else if" first_region = False - cond = (u"region().equalTo(\"{}\")".format(region) - for region in [deprecated_region] + region_aliases[key]) + cond = ( + u'region().equalTo("{}")'.format(region) + for region in [deprecated_region] + region_aliases[key] + ) cond = (u" ||\n" + u" " * (2 + len(if_kind) + 2)).join(cond) - println(u""" - {} ({}) {{""".format(if_kind, cond).strip("\n")) + println( + u""" + {} ({}) {{""".format( + if_kind, cond + ).strip( + "\n" + ) + ) - replacement_regions = sorted({region for (_, _, region) in non_default_replacements}) + replacement_regions = sorted( + {region for (_, _, region) in non_default_replacements} + ) first_case = True for replacement_region in replacement_regions: - replacement_language_script = sorted((language, script) - for (language, script, region) in ( - non_default_replacements - ) - if region == replacement_region) + replacement_language_script = sorted( + (language, script) + for (language, script, region) in (non_default_replacements) + if region == replacement_region + ) if_kind = u"if" if first_case else u"else if" first_case = False def compare_tags(language, script): if script is None: - return u"language().equalTo(\"{}\")".format(language) - return u"(language().equalTo(\"{}\") && script().equalTo(\"{}\"))".format( - language, script) - - cond = (compare_tags(language, script) - for (language, script) in replacement_language_script) + return u'language().equalTo("{}")'.format(language) + return u'(language().equalTo("{}") && script().equalTo("{}"))'.format( + language, script + ) + + cond = ( + compare_tags(language, script) + for (language, script) in replacement_language_script + ) cond = (u" ||\n" + u" " * (4 + len(if_kind) + 2)).join(cond) - println(u""" + println( + u""" {} ({}) {{ setRegion("{}"); - }}""".format(if_kind, cond, replacement_region).rstrip().strip("\n")) - - println(u""" + }}""".format( + if_kind, cond, replacement_region + ) + .rstrip() + .strip("\n") + ) + + println( + u""" else {{ setRegion("{}"); }} - }}""".format(default).rstrip().strip("\n")) - - println(u""" + }}""".format( + default + ) + .rstrip() + .strip("\n") + ) + + println( + u""" } -""".strip("\n")) +""".strip( + "\n" + ) + ) -def writeVariantTagMappings(println, variant_mappings, description, source, - url): +def writeVariantTagMappings(println, variant_mappings, description, source, url): """ Writes a function definition that maps variant subtags. """ - println(u""" + println( + u""" static const char* ToCharPointer(const char* str) { return str; } @@ -376,9 +526,11 @@ def writeVariantTagMappings(println, variant_mappings, description, source, static bool IsLessThan(const T& a, const U& b) { return strcmp(ToCharPointer(a), ToCharPointer(b)) < 0; } -""") +""" + ) writeMappingHeader(println, description, source, url) - println(u""" + println( + u""" bool js::intl::LanguageTag::performVariantMappings(JSContext* cx) { // The variant subtags need to be sorted for binary search. MOZ_ASSERT(std::is_sorted(variants_.begin(), variants_.end(), @@ -405,57 +557,92 @@ def writeVariantTagMappings(println, variant_mappings, description, source, for (size_t i = 0; i < variants_.length(); ) { auto& variant = variants_[i]; MOZ_ASSERT(IsCanonicallyCasedVariantTag(mozilla::MakeStringSpan(variant.get()))); -""".lstrip()) +""".lstrip() + ) first_variant = True - for (deprecated_variant, (type, replacement)) in ( - sorted(variant_mappings.items(), key=itemgetter(0)) + for (deprecated_variant, (type, replacement)) in sorted( + variant_mappings.items(), key=itemgetter(0) ): if_kind = u"if" if first_variant else u"else if" first_variant = False - println(u""" + println( + u""" {} (strcmp(variant.get(), "{}") == 0) {{ variants_.erase(variants_.begin() + i); -""".format(if_kind, deprecated_variant).strip("\n")) +""".format( + if_kind, deprecated_variant + ).strip( + "\n" + ) + ) if type == "language": - println(u""" + println( + u""" setLanguage("{}"); -""".format(replacement).strip("\n")) +""".format( + replacement + ).strip( + "\n" + ) + ) elif type == "region": - println(u""" + println( + u""" setRegion("{}"); -""".format(replacement).strip("\n")) +""".format( + replacement + ).strip( + "\n" + ) + ) else: assert type == "variant" - println(u""" + println( + u""" if (!insertVariantSortedIfNotPresent("{}")) {{ return false; }} -""".format(replacement).strip("\n")) - - println(u""" +""".format( + replacement + ).strip( + "\n" + ) + ) + + println( + u""" } -""".strip("\n")) +""".strip( + "\n" + ) + ) - println(u""" + println( + u""" else { i++; } } return true; } -""".strip("\n")) +""".strip( + "\n" + ) + ) -def writeGrandfatheredMappingsFunction(println, grandfathered_mappings, - description, source, url): +def writeGrandfatheredMappingsFunction( + println, grandfathered_mappings, description, source, url +): """ Writes a function definition that maps grandfathered language tags. """ println(u"") writeMappingHeader(println, description, source, url) - println(u"""\ + println( + u"""\ bool js::intl::LanguageTag::updateGrandfatheredMappings(JSContext* cx) { // We're mapping regular grandfathered tags to non-grandfathered form here. // Other tags remain unchanged. @@ -496,7 +683,8 @@ def writeGrandfatheredMappingsFunction(println, grandfathered_mappings, auto variantEqualTo = [this](const char* variant) { return strcmp(variants()[0].get(), variant) == 0; - };""") + };""" + ) # From Unicode BCP 47 locale identifier . # @@ -524,7 +712,9 @@ def writeGrandfatheredMappingsFunction(println, grandfathered_mappings, # pu_extensions = sep [xX] (sep alphanum{1,8})+ (?:-(?Px(-[a-z0-9]{1,8})+))? $ - """, re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, + ) is_first = True @@ -533,19 +723,20 @@ def writeGrandfatheredMappingsFunction(println, grandfathered_mappings, assert tag_match is not None tag_language = tag_match.group("language") - assert tag_match.group("script") is None, ( - "{} does not contain a script subtag".format(tag)) - assert tag_match.group("region") is None, ( - "{} does not contain a region subtag".format(tag)) + assert ( + tag_match.group("script") is None + ), "{} does not contain a script subtag".format(tag) + assert ( + tag_match.group("region") is None + ), "{} does not contain a region subtag".format(tag) tag_variants = tag_match.group("variants") - assert tag_variants is not None, ( - "{} contains a variant subtag".format(tag)) - assert tag_match.group("privateuse") is None, ( - "{} does not contain a privateuse subtag".format(tag)) + assert tag_variants is not None, "{} contains a variant subtag".format(tag) + assert ( + tag_match.group("privateuse") is None + ), "{} does not contain a privateuse subtag".format(tag) tag_variant = tag_variants[1:] - assert "-" not in tag_variant, ( - "{} contains only a single variant".format(tag)) + assert "-" not in tag_variant, "{} contains only a single variant".format(tag) modern_match = re_unicode_locale_id.match(modern) assert modern_match is not None @@ -556,70 +747,113 @@ def writeGrandfatheredMappingsFunction(println, grandfathered_mappings, modern_variants = modern_match.group("variants") modern_privateuse = modern_match.group("privateuse") - println(u""" + println( + u""" // {} -> {} -""".format(tag, modern).rstrip()) +""".format( + tag, modern + ).rstrip() + ) - println(u""" + println( + u""" {}if (language().equalTo("{}") && variantEqualTo("{}")) {{ - """.format("" if is_first else "else ", - tag_language, - tag_variant).rstrip().strip("\n")) + """.format( + "" if is_first else "else ", tag_language, tag_variant + ) + .rstrip() + .strip("\n") + ) is_first = False - println(u""" + println( + u""" setLanguage("{}"); - """.format(modern_language).rstrip().strip("\n")) + """.format( + modern_language + ) + .rstrip() + .strip("\n") + ) if modern_script is not None: - println(u""" + println( + u""" setScript("{}"); - """.format(modern_script).rstrip().strip("\n")) + """.format( + modern_script + ) + .rstrip() + .strip("\n") + ) if modern_region is not None: - println(u""" + println( + u""" setRegion("{}"); - """.format(modern_region).rstrip().strip("\n")) - - assert modern_variants is None, ( - "all regular grandfathered tags' modern forms do not contain variant subtags") - - println(u""" + """.format( + modern_region + ) + .rstrip() + .strip("\n") + ) + + assert ( + modern_variants is None + ), "all regular grandfathered tags' modern forms do not contain variant subtags" + + println( + u""" clearVariants(); - """.rstrip().strip("\n")) + """.rstrip().strip( + "\n" + ) + ) if modern_privateuse is not None: - println(u""" + println( + u""" auto privateuse = DuplicateString(cx, "{}"); if (!privateuse) {{ return false; }} setPrivateuse(std::move(privateuse)); - """.format(modern_privateuse).rstrip().rstrip("\n")) - - println(u""" + """.format( + modern_privateuse + ) + .rstrip() + .rstrip("\n") + ) + + println( + u""" return true; - }""".rstrip().strip("\n")) + }""".rstrip().strip( + "\n" + ) + ) - println(u""" + println( + u""" return true; -}""") +}""" + ) def readSupplementalData(core_file): - """ Reads CLDR Supplemental Data and extracts information for Intl.js. - - Information extracted: - - grandfatheredMappings: mappings from grandfathered tags to preferred - complete language tags - - languageMappings: mappings from language subtags to preferred subtags - - complexLanguageMappings: mappings from language subtags with complex rules - - regionMappings: mappings from region subtags to preferred subtags - - complexRegionMappings: mappings from region subtags with complex rules - - variantMappings: mappings from variant subtags to preferred subtags - - likelySubtags: likely subtags used for generating test data only - Returns these mappings as dictionaries. + """Reads CLDR Supplemental Data and extracts information for Intl.js. + + Information extracted: + - grandfatheredMappings: mappings from grandfathered tags to preferred + complete language tags + - languageMappings: mappings from language subtags to preferred subtags + - complexLanguageMappings: mappings from language subtags with complex rules + - regionMappings: mappings from region subtags to preferred subtags + - complexRegionMappings: mappings from region subtags with complex rules + - variantMappings: mappings from variant subtags to preferred subtags + - likelySubtags: likely subtags used for generating test data only + Returns these mappings as dictionaries. """ import xml.etree.ElementTree as ET @@ -643,7 +877,9 @@ def readSupplementalData(core_file): # unicode_variant_subtag = (alphanum{5,8} | digit alphanum{3}) (?P(-([a-z0-9]{5,8}|[0-9][a-z0-9]{3}))+)? $ - """, re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, + ) re_unicode_language_subtag = re.compile( r""" @@ -651,7 +887,9 @@ def readSupplementalData(core_file): # unicode_language_subtag = alpha{2,3} | alpha{5,8} ([a-z]{2,3}|[a-z]{5,8}) $ - """, re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, + ) re_unicode_region_subtag = re.compile( r""" @@ -659,7 +897,9 @@ def readSupplementalData(core_file): # unicode_region_subtag = (alpha{2} | digit{3}) ([a-z]{2}|[0-9]{3}) $ - """, re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, + ) re_unicode_variant_subtag = re.compile( r""" @@ -667,7 +907,9 @@ def readSupplementalData(core_file): # unicode_variant_subtag = (alphanum{5,8} | digit alphanum{3}) ([a-z0-9]{5,8}|(?:[0-9][a-z0-9]{3})) $ - """, re.IGNORECASE | re.VERBOSE) + """, + re.IGNORECASE | re.VERBOSE, + ) # The fixed list of BCP 47 grandfathered language tags. grandfathered_tags = ( @@ -700,8 +942,9 @@ def readSupplementalData(core_file): ) # The list of grandfathered tags which are valid Unicode BCP 47 locale identifiers. - unicode_bcp47_grandfathered_tags = {tag for tag in grandfathered_tags - if re_unicode_language_id.match(tag)} + unicode_bcp47_grandfathered_tags = { + tag for tag in grandfathered_tags if re_unicode_language_id.match(tag) + } # Dictionary of simple language subtag mappings, e.g. "in" -> "id". language_mappings = {} @@ -737,9 +980,11 @@ def bcp47_canonical(language, script, region): # Canonical case for language subtags is lower case. # Canonical case for script subtags is title case. # Canonical case for region subtags is upper case. - return (language.lower() if language else None, - script.title() if script else None, - region.upper() if region else None) + return ( + language.lower() if language else None, + script.title() if script else None, + region.upper() if region else None, + ) tree = ET.parse(core_file.open("common/supplemental/supplementalMetadata.xml")) @@ -764,14 +1009,18 @@ def bcp47_canonical(language, script, region): language_mappings[type] = replacement.lower() else: replacement_match = re_unicode_language_id.match(replacement) - assert replacement_match is not None, ( - "{} invalid Unicode BCP 47 locale identifier".format(replacement)) - assert replacement_match.group("variants") is None, ( - "{}: unexpected variant subtags in {}".format(type, replacement)) - - complex_language_mappings[type] = bcp47_canonical(replacement_match.group("language"), - replacement_match.group("script"), - replacement_match.group("region")) + assert ( + replacement_match is not None + ), "{} invalid Unicode BCP 47 locale identifier".format(replacement) + assert ( + replacement_match.group("variants") is None + ), "{}: unexpected variant subtags in {}".format(type, replacement) + + complex_language_mappings[type] = bcp47_canonical( + replacement_match.group("language"), + replacement_match.group("script"), + replacement_match.group("region"), + ) for territory_alias in tree.iterfind(".//territoryAlias"): type = territory_alias.get("type") @@ -799,8 +1048,9 @@ def bcp47_canonical(language, script, region): type = variant_alias.get("type") replacement = variant_alias.get("replacement") - assert re_unicode_variant_subtag.match(type) is not None, ( - "{} invalid variant subtag".format(type)) + assert ( + re_unicode_variant_subtag.match(type) is not None + ), "{} invalid variant subtag".format(type) # Normalize the case, because some variants are in upper case. type = type.lower() @@ -809,17 +1059,22 @@ def bcp47_canonical(language, script, region): # Language and region subtags are case normalized, variant subtags can # be in any case. - if re_unicode_language_subtag.match(replacement) is not None and replacement.islower(): + if ( + re_unicode_language_subtag.match(replacement) is not None + and replacement.islower() + ): variant_mappings[type] = ("language", replacement) elif re_unicode_region_subtag.match(replacement) is not None: - assert replacement.isupper() or replacement.isdigit(), ( - "{} invalid variant subtag replacement".format(replacement)) + assert ( + replacement.isupper() or replacement.isdigit() + ), "{} invalid variant subtag replacement".format(replacement) variant_mappings[type] = ("region", replacement) else: - assert re_unicode_variant_subtag.match(replacement) is not None, ( - "{} invalid variant subtag replacement".format(replacement)) + assert ( + re_unicode_variant_subtag.match(replacement) is not None + ), "{} invalid variant subtag replacement".format(replacement) variant_mappings[type] = ("variant", replacement.lower()) tree = ET.parse(core_file.open("common/supplemental/likelySubtags.xml")) @@ -829,25 +1084,33 @@ def bcp47_canonical(language, script, region): for likely_subtag in tree.iterfind(".//likelySubtag"): from_tag = bcp47_id(likely_subtag.get("from")) from_match = re_unicode_language_id.match(from_tag) - assert from_match is not None, ( - "{} invalid Unicode BCP 47 locale identifier".format(from_tag)) - assert from_match.group("variants") is None, ( - "unexpected variant subtags in {}".format(from_tag)) + assert ( + from_match is not None + ), "{} invalid Unicode BCP 47 locale identifier".format(from_tag) + assert ( + from_match.group("variants") is None + ), "unexpected variant subtags in {}".format(from_tag) to_tag = bcp47_id(likely_subtag.get("to")) to_match = re_unicode_language_id.match(to_tag) - assert to_match is not None, ( - "{} invalid Unicode BCP 47 locale identifier".format(to_tag)) - assert to_match.group("variants") is None, ( - "unexpected variant subtags in {}".format(to_tag)) - - from_canonical = bcp47_canonical(from_match.group("language"), - from_match.group("script"), - from_match.group("region")) - - to_canonical = bcp47_canonical(to_match.group("language"), - to_match.group("script"), - to_match.group("region")) + assert ( + to_match is not None + ), "{} invalid Unicode BCP 47 locale identifier".format(to_tag) + assert ( + to_match.group("variants") is None + ), "unexpected variant subtags in {}".format(to_tag) + + from_canonical = bcp47_canonical( + from_match.group("language"), + from_match.group("script"), + from_match.group("region"), + ) + + to_canonical = bcp47_canonical( + to_match.group("language"), + to_match.group("script"), + to_match.group("region"), + ) likely_subtags[from_canonical] = to_canonical @@ -856,41 +1119,53 @@ def bcp47_canonical(language, script, region): for (deprecated_region, replacements) in complex_region_mappings.items(): # Find all likely subtag entries which don't already contain a region # subtag and whose target region is in the list of replacement regions. - region_likely_subtags = [(from_language, from_script, to_region) - for ((from_language, from_script, from_region), - (_, _, to_region)) in likely_subtags.items() - if from_region is None and to_region in replacements] + region_likely_subtags = [ + (from_language, from_script, to_region) + for ( + (from_language, from_script, from_region), + (_, _, to_region), + ) in likely_subtags.items() + if from_region is None and to_region in replacements + ] # The first replacement entry is the default region. default = replacements[0] # Find all likely subtag entries whose region matches the default region. - default_replacements = {(language, script) - for (language, script, region) in region_likely_subtags - if region == default} + default_replacements = { + (language, script) + for (language, script, region) in region_likely_subtags + if region == default + } # And finally find those entries which don't use the default region. # These are the entries we're actually interested in, because those need # to be handled specially when selecting the correct preferred region. - non_default_replacements = [(language, script, region) - for (language, script, region) in region_likely_subtags - if (language, script) not in default_replacements] + non_default_replacements = [ + (language, script, region) + for (language, script, region) in region_likely_subtags + if (language, script) not in default_replacements + ] # If there are no non-default replacements, we can handle the region as # part of the simple region mapping. if non_default_replacements: - complex_region_mappings_final[deprecated_region] = (default, non_default_replacements) + complex_region_mappings_final[deprecated_region] = ( + default, + non_default_replacements, + ) else: region_mappings[deprecated_region] = default - return {"grandfatheredMappings": grandfathered_mappings, - "languageMappings": language_mappings, - "complexLanguageMappings": complex_language_mappings, - "regionMappings": region_mappings, - "complexRegionMappings": complex_region_mappings_final, - "variantMappings": variant_mappings, - "likelySubtags": likely_subtags, - } + return { + "grandfatheredMappings": grandfathered_mappings, + "languageMappings": language_mappings, + "complexLanguageMappings": complex_language_mappings, + "regionMappings": region_mappings, + "complexRegionMappings": complex_region_mappings_final, + "variantMappings": variant_mappings, + "likelySubtags": likely_subtags, + } def readUnicodeExtensions(core_file): @@ -909,7 +1184,6 @@ def readUnicodeExtensions(core_file): mapping = { # Unicode BCP 47 U Extension "u": {}, - # Unicode BCP 47 T Extension "t": {}, } @@ -918,8 +1192,9 @@ def readBCP47File(file): tree = ET.parse(file) for keyword in tree.iterfind(".//keyword/key"): extension = keyword.get("extension", "u") - assert extension == "u" or extension == "t", ( - "unknown extension type: {}".format(extension)) + assert ( + extension == "u" or extension == "t" + ), "unknown extension type: {}".format(extension) extension_name = keyword.get("name") @@ -937,13 +1212,19 @@ def readBCP47File(file): # - # - # - - if name in ("CODEPOINTS", "REORDER_CODE", "RG_KEY_VALUE", "SUBDIVISION_CODE", - "PRIVATE_USE"): + if name in ( + "CODEPOINTS", + "REORDER_CODE", + "RG_KEY_VALUE", + "SUBDIVISION_CODE", + "PRIVATE_USE", + ): continue # All other names should match the 'type' production. - assert typeRE.match(name) is not None, ( - "{} matches the 'type' production".format(name)) + assert ( + typeRE.match(name) is not None + ), "{} matches the 'type' production".format(name) # : # @@ -987,8 +1268,10 @@ def readBCP47File(file): continue # See comment above when 'alias' and 'preferred' are both present. - if (preferred is not None and - name in mapping[extension][extension_name]): + if ( + preferred is not None + and name in mapping[extension][extension_name] + ): continue # Skip over entries where 'name' and 'alias' are equal. @@ -1000,7 +1283,9 @@ def readBCP47File(file): if name == alias_name: continue - mapping[extension].setdefault(extension_name, {})[alias_name] = name + mapping[extension].setdefault(extension_name, {})[ + alias_name + ] = name def readSupplementalMetadata(file): # Find subdivision and region replacements. @@ -1013,8 +1298,9 @@ def readSupplementalMetadata(file): tree = ET.parse(file) for alias in tree.iterfind(".//subdivisionAlias"): type = alias.get("type") - assert typeRE.match(type) is not None, ( - "{} matches the 'type' production".format(type)) + assert ( + typeRE.match(type) is not None + ), "{} matches the 'type' production".format(type) # Take the first replacement when multiple ones are present. replacement = alias.get("replacement").split(" ")[0].lower() @@ -1036,7 +1322,9 @@ def readSupplementalMetadata(file): if bcpFileRE.match(name): readBCP47File(core_file.open(name)) - readSupplementalMetadata(core_file.open("common/supplemental/supplementalMetadata.xml")) + readSupplementalMetadata( + core_file.open("common/supplemental/supplementalMetadata.xml") + ) return { "unicodeMappings": mapping["u"], @@ -1051,7 +1339,8 @@ def writeCLDRLanguageTagData(println, data, url): println(u"// Version: CLDR-{}".format(data["version"])) println(u"// URL: {}".format(url)) - println(u""" + println( + u""" #include "mozilla/Assertions.h" #include "mozilla/Span.h" #include "mozilla/TextUtils.h" @@ -1149,7 +1438,8 @@ def writeCLDRLanguageTagData(println, data, url): return std::all_of(type.begin(), type.end(), IsAsciiLowercaseAlphanumericOrDash); } #endif -""".rstrip()) +""".rstrip() + ) source = u"CLDR Supplemental Data, version {}".format(data["version"]) grandfathered_mappings = data["grandfatheredMappings"] @@ -1167,42 +1457,89 @@ def writeCLDRLanguageTagData(println, data, url): # unicode_region_subtag = (alpha{2} | digit{3}) ; region_maxlength = 3 - writeMappingsBinarySearch(println, "languageMapping", - "LanguageSubtag&", "language", - "IsStructurallyValidLanguageTag", - "IsCanonicallyCasedLanguageTag", - language_mappings, language_maxlength, - "Mappings from language subtags to preferred values.", source, url) - writeMappingsBinarySearch(println, "complexLanguageMapping", - "const LanguageSubtag&", "language", - "IsStructurallyValidLanguageTag", - "IsCanonicallyCasedLanguageTag", - complex_language_mappings.keys(), language_maxlength, - "Language subtags with complex mappings.", source, url) - writeMappingsBinarySearch(println, "regionMapping", - "RegionSubtag&", "region", - "IsStructurallyValidRegionTag", - "IsCanonicallyCasedRegionTag", - region_mappings, region_maxlength, - "Mappings from region subtags to preferred values.", source, url) - writeMappingsBinarySearch(println, "complexRegionMapping", - "const RegionSubtag&", "region", - "IsStructurallyValidRegionTag", - "IsCanonicallyCasedRegionTag", - complex_region_mappings.keys(), region_maxlength, - "Region subtags with complex mappings.", source, url) - - writeComplexLanguageTagMappings(println, complex_language_mappings, - "Language subtags with complex mappings.", source, url) - writeComplexRegionTagMappings(println, complex_region_mappings, - "Region subtags with complex mappings.", source, url) - - writeVariantTagMappings(println, variant_mappings, - "Mappings from variant subtags to preferred values.", source, url) - - writeGrandfatheredMappingsFunction(println, grandfathered_mappings, - "Canonicalize grandfathered locale identifiers.", source, - url) + writeMappingsBinarySearch( + println, + "languageMapping", + "LanguageSubtag&", + "language", + "IsStructurallyValidLanguageTag", + "IsCanonicallyCasedLanguageTag", + language_mappings, + language_maxlength, + "Mappings from language subtags to preferred values.", + source, + url, + ) + writeMappingsBinarySearch( + println, + "complexLanguageMapping", + "const LanguageSubtag&", + "language", + "IsStructurallyValidLanguageTag", + "IsCanonicallyCasedLanguageTag", + complex_language_mappings.keys(), + language_maxlength, + "Language subtags with complex mappings.", + source, + url, + ) + writeMappingsBinarySearch( + println, + "regionMapping", + "RegionSubtag&", + "region", + "IsStructurallyValidRegionTag", + "IsCanonicallyCasedRegionTag", + region_mappings, + region_maxlength, + "Mappings from region subtags to preferred values.", + source, + url, + ) + writeMappingsBinarySearch( + println, + "complexRegionMapping", + "const RegionSubtag&", + "region", + "IsStructurallyValidRegionTag", + "IsCanonicallyCasedRegionTag", + complex_region_mappings.keys(), + region_maxlength, + "Region subtags with complex mappings.", + source, + url, + ) + + writeComplexLanguageTagMappings( + println, + complex_language_mappings, + "Language subtags with complex mappings.", + source, + url, + ) + writeComplexRegionTagMappings( + println, + complex_region_mappings, + "Region subtags with complex mappings.", + source, + url, + ) + + writeVariantTagMappings( + println, + variant_mappings, + "Mappings from variant subtags to preferred values.", + source, + url, + ) + + writeGrandfatheredMappingsFunction( + println, + grandfathered_mappings, + "Canonicalize grandfathered locale identifiers.", + source, + url, + ) writeUnicodeExtensionsMappings(println, unicode_mappings, "Unicode") writeUnicodeExtensionsMappings(println, transform_mappings, "Transform") @@ -1222,9 +1559,9 @@ def writeCLDRLanguageTagLikelySubtagsTest(println, data, url): def bcp47(tag): (language, script, region) = tag - return "{}{}{}".format(language, - "-" + script if script else "", - "-" + region if region else "") + return "{}{}{}".format( + language, "-" + script if script else "", "-" + region if region else "" + ) def canonical(tag): (language, script, region) = tag @@ -1234,17 +1571,20 @@ def canonical(tag): language = language_mappings[language] elif language in complex_language_mappings: (language2, script2, region2) = complex_language_mappings[language] - (language, script, region) = (language2, - script if script else script2, - region if region else region2) + (language, script, region) = ( + language2, + script if script else script2, + region if region else region2, + ) # Map deprecated region subtags. if region in region_mappings: region = region_mappings[region] else: # Assume no complex region mappings are needed for now. - assert region not in complex_region_mappings,\ - "unexpected region with complex mappings: {}".format(region) + assert ( + region not in complex_region_mappings + ), "unexpected region with complex mappings: {}".format(region) return (language, script, region) @@ -1259,20 +1599,24 @@ def addLikelySubtags(tag): region = None # Step 2: Lookup. - searches = ((language, script, region), - (language, None, region), - (language, script, None), - (language, None, None), - ("und", script, None)) + searches = ( + (language, script, region), + (language, None, region), + (language, script, None), + (language, None, None), + ("und", script, None), + ) search = next(search for search in searches if search in likely_subtags) (language_s, script_s, region_s) = search (language_m, script_m, region_m) = likely_subtags[search] # Step 3: Return. - return (language if language != language_s else language_m, - script if script != script_s else script_m, - region if region != region_s else region_m) + return ( + language if language != language_s else language_m, + script if script != script_s else script_m, + region if region != region_s else region_m, + ) # https://unicode.org/reports/tr35/#Likely_Subtags def removeLikelySubtags(tag): @@ -1283,7 +1627,11 @@ def removeLikelySubtags(tag): # Step 3: Find a match. (language, script, region) = max - for trial in ((language, None, None), (language, None, region), (language, script, None)): + for trial in ( + (language, None, None), + (language, None, region), + (language, script, None), + ): if addLikelySubtags(trial) == max: return trial @@ -1307,32 +1655,54 @@ def likely_canonical(from_tag, to_tag): return to_canonical # |likely_subtags| contains non-canonicalized tags, so canonicalize it first. - likely_subtags_canonical = {k: likely_canonical(k, v) for (k, v) in likely_subtags.items()} + likely_subtags_canonical = { + k: likely_canonical(k, v) for (k, v) in likely_subtags.items() + } # Add test data for |Intl.Locale.prototype.maximize()|. - writeMappingsVar(println, {bcp47(k): bcp47(v) for (k, v) in likely_subtags_canonical.items()}, - "maxLikelySubtags", "Extracted from likelySubtags.xml.", source, url) + writeMappingsVar( + println, + {bcp47(k): bcp47(v) for (k, v) in likely_subtags_canonical.items()}, + "maxLikelySubtags", + "Extracted from likelySubtags.xml.", + source, + url, + ) # Use the maximalized tags as the input for the remove likely-subtags test. - minimized = {tag: removeLikelySubtags(tag) for tag in likely_subtags_canonical.values()} + minimized = { + tag: removeLikelySubtags(tag) for tag in likely_subtags_canonical.values() + } # Add test data for |Intl.Locale.prototype.minimize()|. - writeMappingsVar(println, {bcp47(k): bcp47(v) for (k, v) in minimized.items()}, - "minLikelySubtags", "Extracted from likelySubtags.xml.", source, url) + writeMappingsVar( + println, + {bcp47(k): bcp47(v) for (k, v) in minimized.items()}, + "minLikelySubtags", + "Extracted from likelySubtags.xml.", + source, + url, + ) - println(u""" + println( + u""" for (let [tag, maximal] of Object.entries(maxLikelySubtags)) { assertEq(new Intl.Locale(tag).maximize().toString(), maximal); -}""") +}""" + ) - println(u""" + println( + u""" for (let [tag, minimal] of Object.entries(minLikelySubtags)) { assertEq(new Intl.Locale(tag).minimize().toString(), minimal); -}""") +}""" + ) - println(u""" + println( + u""" if (typeof reportCompare === "function") - reportCompare(0, 0);""") + reportCompare(0, 0);""" + ) def readCLDRVersionFromICU(): @@ -1403,8 +1773,10 @@ def readFiles(cldr_file): print("Writing Intl test data...") js_src_builtin_intl_dir = os.path.dirname(os.path.abspath(__file__)) - test_file = os.path.join(js_src_builtin_intl_dir, - "../../tests/non262/Intl/Locale/likely-subtags-generated.js") + test_file = os.path.join( + js_src_builtin_intl_dir, + "../../tests/non262/Intl/Locale/likely-subtags-generated.js", + ) with io.open(test_file, mode="w", encoding="utf-8", newline="") as f: println = partial(print, file=f) @@ -1459,7 +1831,9 @@ class TzDataFile(object): """ tzdata source from a file (tar or gzipped). """ def __init__(self, obj): - self.name = lambda: os.path.splitext(os.path.splitext(os.path.basename(obj))[0])[0] + self.name = lambda: os.path.splitext( + os.path.splitext(os.path.basename(obj))[0] + )[0] self.resolve = obj.getmember self.basename = attrgetter("name") self.isfile = tarfile.TarInfo.isfile @@ -1482,13 +1856,16 @@ def validateTimeZones(zones, links): zoneNames = {z.name for z in zones} linkTargets = set(links.values()) if not linkTargets.issubset(zoneNames): - raise RuntimeError("Link targets not found: %s" % linkTargets.difference(zoneNames)) + raise RuntimeError( + "Link targets not found: %s" % linkTargets.difference(zoneNames) + ) def partition(iterable, *predicates): def innerPartition(pred, it): it1, it2 = tee(it) return (filter(pred, it1), filterfalse(pred, it2)) + if len(predicates) == 0: return iterable (left, right) = innerPartition(predicates[0], iterable) @@ -1500,16 +1877,20 @@ def innerPartition(pred, it): def listIANAFiles(tzdataDir): def isTzFile(d, m, f): return m(f) and d.isfile(d.resolve(f)) - return filter(partial(isTzFile, tzdataDir, re.compile("^[a-z0-9]+$").match), - tzdataDir.listdir()) + + return filter( + partial(isTzFile, tzdataDir, re.compile("^[a-z0-9]+$").match), + tzdataDir.listdir(), + ) def readIANAFiles(tzdataDir, files): """ Read all IANA time zone files from the given iterable. """ nameSyntax = "[\w/+\-]+" pZone = re.compile(r"Zone\s+(?P%s)\s+.*" % nameSyntax) - pLink = re.compile(r"Link\s+(?P%s)\s+(?P%s)(?:\s+#.*)?" % - (nameSyntax, nameSyntax)) + pLink = re.compile( + r"Link\s+(?P%s)\s+(?P%s)(?:\s+#.*)?" % (nameSyntax, nameSyntax) + ) def createZone(line, fname): match = pZone.match(line) @@ -1552,7 +1933,9 @@ def readIANATimeZones(tzdataDir, ignoreBackzone, ignoreFactory): # Merge with backzone data. if not ignoreBackzone: zones |= backzones - links = {name: target for name, target in links.items() if name not in backzones} + links = { + name: target for name, target in links.items() if name not in backzones + } links.update(backlinks) validateTimeZones(zones, links) @@ -1561,15 +1944,17 @@ def readIANATimeZones(tzdataDir, ignoreBackzone, ignoreFactory): def readICUResourceFile(filename): - """ Read an ICU resource file. + """Read an ICU resource file. - Yields (, , ) for each table. + Yields (, , ) for each table. """ numberValue = r"-?\d+" stringValue = r'".+?"' - def asVector(val): return r"%s(?:\s*,\s*%s)*" % (val, val) + def asVector(val): + return r"%s(?:\s*,\s*%s)*" % (val, val) + numberVector = asVector(numberValue) stringVector = asVector(stringValue) @@ -1612,7 +1997,9 @@ def line(*args): tables = [] - def currentTable(): return "|".join(tables) + def currentTable(): + return "|".join(tables) + values = [] for line in flines(filename, "utf-8-sig"): line = line.strip() @@ -1653,22 +2040,23 @@ def currentTable(): return "|".join(tables) def readICUTimeZonesFromTimezoneTypes(icuTzDir): - """ Read the ICU time zone information from `icuTzDir`/timezoneTypes.txt - and returns the tuple (zones, links). + """Read the ICU time zone information from `icuTzDir`/timezoneTypes.txt + and returns the tuple (zones, links). """ typeMapTimeZoneKey = "timezoneTypes:table(nofallback)|typeMap|timezone|" typeAliasTimeZoneKey = "timezoneTypes:table(nofallback)|typeAlias|timezone|" - def toTimeZone(name): return Zone(name.replace(":", "/")) + def toTimeZone(name): + return Zone(name.replace(":", "/")) zones = set() links = dict() for name, value in readICUResourceFile(os.path.join(icuTzDir, "timezoneTypes.txt")): if name.startswith(typeMapTimeZoneKey): - zones.add(toTimeZone(name[len(typeMapTimeZoneKey):])) + zones.add(toTimeZone(name[len(typeMapTimeZoneKey) :])) if name.startswith(typeAliasTimeZoneKey): - links[toTimeZone(name[len(typeAliasTimeZoneKey):])] = value + links[toTimeZone(name[len(typeAliasTimeZoneKey) :])] = value # Remove the ICU placeholder time zone "Etc/Unknown". zones.remove(Zone("Etc/Unknown")) @@ -1684,8 +2072,8 @@ def toTimeZone(name): return Zone(name.replace(":", "/")) def readICUTimeZonesFromZoneInfo(icuTzDir, ignoreFactory): - """ Read the ICU time zone information from `icuTzDir`/zoneinfo64.txt - and returns the tuple (zones, links). + """Read the ICU time zone information from `icuTzDir`/zoneinfo64.txt + and returns the tuple (zones, links). """ zoneKey = "zoneinfo64:table(nofallback)|Zones:array|:table" linkKey = "zoneinfo64:table(nofallback)|Zones:array|:int" @@ -1727,7 +2115,9 @@ def readICUTimeZonesFromZoneInfo(icuTzDir, ignoreFactory): def readICUTimeZones(icuDir, icuTzDir, ignoreFactory): # zoneinfo64.txt contains the supported time zones by ICU. This data is # generated from tzdata files, it doesn't include "backzone" in stock ICU. - (zoneinfoZones, zoneinfoLinks) = readICUTimeZonesFromZoneInfo(icuTzDir, ignoreFactory) + (zoneinfoZones, zoneinfoLinks) = readICUTimeZonesFromZoneInfo( + icuTzDir, ignoreFactory + ) # timezoneTypes.txt contains the canonicalization information for ICU. This # data is generated from CLDR files. It includes data about time zones from @@ -1735,40 +2125,58 @@ def readICUTimeZones(icuDir, icuTzDir, ignoreFactory): (typesZones, typesLinks) = readICUTimeZonesFromTimezoneTypes(icuTzDir) # Information in zoneinfo64 should be a superset of timezoneTypes. - def inZoneInfo64(zone): return zone in zoneinfoZones or zone in zoneinfoLinks + def inZoneInfo64(zone): + return zone in zoneinfoZones or zone in zoneinfoLinks # Remove legacy ICU time zones from zoneinfo64 data. (legacyZones, legacyLinks) = readICULegacyZones(icuDir) zoneinfoZones = {zone for zone in zoneinfoZones if zone not in legacyZones} - zoneinfoLinks = {zone: target for (zone, target) in zoneinfoLinks.items() - if zone not in legacyLinks} + zoneinfoLinks = { + zone: target + for (zone, target) in zoneinfoLinks.items() + if zone not in legacyLinks + } notFoundInZoneInfo64 = [zone for zone in typesZones if not inZoneInfo64(zone)] if notFoundInZoneInfo64: - raise RuntimeError("Missing time zones in zoneinfo64.txt: %s" % notFoundInZoneInfo64) + raise RuntimeError( + "Missing time zones in zoneinfo64.txt: %s" % notFoundInZoneInfo64 + ) - notFoundInZoneInfo64 = [zone for zone in typesLinks.keys() if not inZoneInfo64(zone)] + notFoundInZoneInfo64 = [ + zone for zone in typesLinks.keys() if not inZoneInfo64(zone) + ] if notFoundInZoneInfo64: - raise RuntimeError("Missing time zones in zoneinfo64.txt: %s" % notFoundInZoneInfo64) + raise RuntimeError( + "Missing time zones in zoneinfo64.txt: %s" % notFoundInZoneInfo64 + ) # zoneinfo64.txt only defines the supported time zones by ICU, the canonicalization # rules are defined through timezoneTypes.txt. Merge both to get the actual zones # and links used by ICU. - icuZones = set(chain( - (zone for zone in zoneinfoZones if zone not in typesLinks), - (zone for zone in typesZones) - )) - icuLinks = dict(chain( - ((zone, target) for (zone, target) in zoneinfoLinks.items() if zone not in typesZones), - ((zone, target) for (zone, target) in typesLinks.items()) - )) + icuZones = set( + chain( + (zone for zone in zoneinfoZones if zone not in typesLinks), + (zone for zone in typesZones), + ) + ) + icuLinks = dict( + chain( + ( + (zone, target) + for (zone, target) in zoneinfoLinks.items() + if zone not in typesZones + ), + ((zone, target) for (zone, target) in typesLinks.items()), + ) + ) return (icuZones, icuLinks) def readICULegacyZones(icuDir): - """ Read the ICU legacy time zones from `icuTzDir`/tools/tzcode/icuzones - and returns the tuple (zones, links). + """Read the ICU legacy time zones from `icuTzDir`/tools/tzcode/icuzones + and returns the tuple (zones, links). """ tzdir = TzDataDir(os.path.join(icuDir, "tools/tzcode")) (zones, links) = readIANAFiles(tzdir, ["icuzones"]) @@ -1786,6 +2194,7 @@ def readICULegacyZones(icuDir): def icuTzDataVersion(icuTzDir): """ Read the ICU time zone version from `icuTzDir`/zoneinfo64.txt. """ + def searchInFile(pattern, f): p = re.compile(pattern) for line in flines(f, "utf-8-sig"): @@ -1799,17 +2208,23 @@ def searchInFile(pattern, f): raise RuntimeError("file not found: %s" % zoneinfo) version = searchInFile("^//\s+tz version:\s+([0-9]{4}[a-z])$", zoneinfo) if version is None: - raise RuntimeError("%s does not contain a valid tzdata version string" % zoneinfo) + raise RuntimeError( + "%s does not contain a valid tzdata version string" % zoneinfo + ) return version def findIncorrectICUZones(ianaZones, ianaLinks, icuZones, icuLinks, ignoreBackzone): """ Find incorrect ICU zone entries. """ - def isIANATimeZone(zone): return zone in ianaZones or zone in ianaLinks - def isICUTimeZone(zone): return zone in icuZones or zone in icuLinks + def isIANATimeZone(zone): + return zone in ianaZones or zone in ianaLinks + + def isICUTimeZone(zone): + return zone in icuZones or zone in icuLinks - def isICULink(zone): return zone in icuLinks + def isICULink(zone): + return zone in icuLinks # All IANA zones should be present in ICU. missingTimeZones = [zone for zone in ianaZones if not isICUTimeZone(zone)] @@ -1818,14 +2233,18 @@ def isICULink(zone): return zone in icuLinks # zone, this zone is only present in the backzone file. expectedMissing = [] if ignoreBackzone else [Zone("Asia/Hanoi")] if missingTimeZones != expectedMissing: - raise RuntimeError("Not all zones are present in ICU, did you forget " - "to run intl/update-tzdata.sh? %s" % missingTimeZones) + raise RuntimeError( + "Not all zones are present in ICU, did you forget " + "to run intl/update-tzdata.sh? %s" % missingTimeZones + ) # Zones which are only present in ICU? additionalTimeZones = [zone for zone in icuZones if not isIANATimeZone(zone)] if additionalTimeZones: - raise RuntimeError("Additional zones present in ICU, did you forget " - "to run intl/update-tzdata.sh? %s" % additionalTimeZones) + raise RuntimeError( + "Additional zones present in ICU, did you forget " + "to run intl/update-tzdata.sh? %s" % additionalTimeZones + ) # Zones which are marked as links in ICU. result = ((zone, icuLinks[zone]) for zone in ianaZones if isICULink(zone)) @@ -1839,40 +2258,57 @@ def isICULink(zone): return zone in icuLinks def findIncorrectICULinks(ianaZones, ianaLinks, icuZones, icuLinks): """ Find incorrect ICU link entries. """ - def isIANATimeZone(zone): return zone in ianaZones or zone in ianaLinks - def isICUTimeZone(zone): return zone in icuZones or zone in icuLinks + def isIANATimeZone(zone): + return zone in ianaZones or zone in ianaLinks - def isICULink(zone): return zone in icuLinks + def isICUTimeZone(zone): + return zone in icuZones or zone in icuLinks - def isICUZone(zone): return zone in icuZones + def isICULink(zone): + return zone in icuLinks + + def isICUZone(zone): + return zone in icuZones # All links should be present in ICU. missingTimeZones = [zone for zone in ianaLinks.keys() if not isICUTimeZone(zone)] if missingTimeZones: - raise RuntimeError("Not all zones are present in ICU, did you forget " - "to run intl/update-tzdata.sh? %s" % missingTimeZones) + raise RuntimeError( + "Not all zones are present in ICU, did you forget " + "to run intl/update-tzdata.sh? %s" % missingTimeZones + ) # Links which are only present in ICU? additionalTimeZones = [zone for zone in icuLinks.keys() if not isIANATimeZone(zone)] if additionalTimeZones: - raise RuntimeError("Additional links present in ICU, did you forget " - "to run intl/update-tzdata.sh? %s" % additionalTimeZones) + raise RuntimeError( + "Additional links present in ICU, did you forget " + "to run intl/update-tzdata.sh? %s" % additionalTimeZones + ) result = chain( # IANA links which have a different target in ICU. - ((zone, target, icuLinks[zone]) for (zone, target) in ianaLinks.items() - if isICULink(zone) and target != icuLinks[zone]), - + ( + (zone, target, icuLinks[zone]) + for (zone, target) in ianaLinks.items() + if isICULink(zone) and target != icuLinks[zone] + ), # IANA links which are zones in ICU. - ((zone, target, zone.name) for (zone, target) in ianaLinks.items() if isICUZone(zone)) + ( + (zone, target, zone.name) + for (zone, target) in ianaLinks.items() + if isICUZone(zone) + ), ) # Remove unnecessary UTC mappings. utcnames = ["Etc/UTC", "Etc/UCT", "Etc/GMT"] - result = ((zone, target, icuTarget) - for (zone, target, icuTarget) in result - if target not in utcnames or icuTarget not in utcnames) + result = ( + (zone, target, icuTarget) + for (zone, target, icuTarget) in result + if target not in utcnames or icuTarget not in utcnames + ) return sorted(result, key=itemgetter(0)) @@ -1881,7 +2317,9 @@ def isICUZone(zone): return zone in icuZones tzdataVersionComment = u"// tzdata version = {0}" -def processTimeZones(tzdataDir, icuDir, icuTzDir, version, ignoreBackzone, ignoreFactory, out): +def processTimeZones( + tzdataDir, icuDir, icuTzDir, version, ignoreBackzone, ignoreFactory, out +): """ Read the time zone info and create a new time zone cpp file. """ print("Processing tzdata mapping...") (ianaZones, ianaLinks) = readIANATimeZones(tzdataDir, ignoreBackzone, ignoreFactory) @@ -1889,7 +2327,8 @@ def processTimeZones(tzdataDir, icuDir, icuTzDir, version, ignoreBackzone, ignor (legacyZones, legacyLinks) = readICULegacyZones(icuDir) incorrectZones = findIncorrectICUZones( - ianaZones, ianaLinks, icuZones, icuLinks, ignoreBackzone) + ianaZones, ianaLinks, icuZones, icuLinks, ignoreBackzone + ) if not incorrectZones: print("<<< No incorrect ICU time zones found, please update Intl.js! >>>") print("<<< Maybe https://ssl.icu-project.org/trac/ticket/12044 was fixed? >>>") @@ -1933,13 +2372,20 @@ def processTimeZones(tzdataDir, icuDir, icuTzDir, version, ignoreBackzone, ignor println(u"") println(u"const LinkAndTarget ianaLinksCanonicalizedDifferentlyByICU[] = {") for (zone, target, icuTarget) in incorrectLinks: - println(u' { "%s", "%s" }, // %s [%s]' % (zone, target, icuTarget, zone.filename)) + println( + u' { "%s", "%s" }, // %s [%s]' + % (zone, target, icuTarget, zone.filename) + ) println(u"};") println(u"") - println(u"// Legacy ICU time zones, these are not valid IANA time zone names. We also") + println( + u"// Legacy ICU time zones, these are not valid IANA time zone names. We also" + ) println(u"// disallow the old and deprecated System V time zones.") - println(u"// https://ssl.icu-project.org/repos/icu/trunk/icu4c/source/tools/tzcode/icuzones") # NOQA: E501 + println( + u"// https://ssl.icu-project.org/repos/icu/trunk/icu4c/source/tools/tzcode/icuzones" + ) # NOQA: E501 println(u"const char* const legacyICUTimeZones[] = {") for zone in chain(sorted(legacyLinks.keys()), sorted(legacyZones)): println(u' "%s",' % zone) @@ -1953,7 +2399,8 @@ def processTimeZones(tzdataDir, icuDir, icuTzDir, version, ignoreBackzone, ignor def updateBackzoneLinks(tzdataDir, links): - def withZone(fn): return lambda zone_target: fn(zone_target[0]) + def withZone(fn): + return lambda zone_target: fn(zone_target[0]) (backzoneZones, backzoneLinks) = readIANAFiles(tzdataDir, ["backzone"]) (stableZones, updatedLinks, updatedZones) = partition( @@ -1964,27 +2411,33 @@ def withZone(fn): return lambda zone_target: fn(zone_target[0]) withZone(lambda zone: zone in backzoneLinks), ) # Keep stable zones and links with updated target. - return dict(chain( - stableZones, - map(withZone(lambda zone: (zone, backzoneLinks[zone])), updatedLinks) - )) + return dict( + chain( + stableZones, + map(withZone(lambda zone: (zone, backzoneLinks[zone])), updatedLinks), + ) + ) def generateTzDataLinkTestContent(testDir, version, fileName, description, links): - with io.open(os.path.join(testDir, fileName), mode="w", encoding="utf-8", newline="") as f: + with io.open( + os.path.join(testDir, fileName), mode="w", encoding="utf-8", newline="" + ) as f: println = partial(print, file=f) println(u'// |reftest| skip-if(!this.hasOwnProperty("Intl"))') println(u"") println(generatedFileWarning) println(tzdataVersionComment.format(version)) - println(u""" + println( + u""" const tzMapper = [ x => x, x => x.toUpperCase(), x => x.toLowerCase(), ]; -""") +""" + ) println(description) println(u"const links = {") @@ -1992,7 +2445,8 @@ def generateTzDataLinkTestContent(testDir, version, fileName, description, links println(u' "%s": "%s",' % (zone, target)) println(u"};") - println(u""" + println( + u""" for (let [linkName, target] of Object.entries(links)) { if (target === "Etc/UTC" || target === "Etc/GMT") target = "UTC"; @@ -2003,11 +2457,14 @@ def generateTzDataLinkTestContent(testDir, version, fileName, description, links assertEq(resolvedTimeZone, target, `${linkName} -> ${target}`); } } -""") - println(u""" +""" + ) + println( + u""" if (typeof reportCompare === "function") reportCompare(0, 0, "ok"); -""") +""" + ) def generateTzDataTestBackwardLinks(tzdataDir, version, ignoreBackzone, testDir): @@ -2018,25 +2475,29 @@ def generateTzDataTestBackwardLinks(tzdataDir, version, ignoreBackzone, testDir) links = updateBackzoneLinks(tzdataDir, links) generateTzDataLinkTestContent( - testDir, version, + testDir, + version, "timeZone_backward_links.js", u"// Link names derived from IANA Time Zone Database, backward file.", - links.items() + links.items(), ) def generateTzDataTestNotBackwardLinks(tzdataDir, version, ignoreBackzone, testDir): - tzfiles = filterfalse({"backward", "backzone"}.__contains__, listIANAFiles(tzdataDir)) + tzfiles = filterfalse( + {"backward", "backzone"}.__contains__, listIANAFiles(tzdataDir) + ) (zones, links) = readIANAFiles(tzdataDir, tzfiles) if not ignoreBackzone: links = updateBackzoneLinks(tzdataDir, links) generateTzDataLinkTestContent( - testDir, version, + testDir, + version, "timeZone_notbackward_links.js", u"// Link names derived from IANA Time Zone Database, excluding backward file.", - links.items() + links.items(), ) @@ -2064,11 +2525,15 @@ def generateTzDataTestBackzone(tzdataDir, version, ignoreBackzone, testDir): """ generateTzDataLinkTestContent( - testDir, version, + testDir, + version, "timeZone_backzone.js", comment + u"// Backzone zones derived from IANA Time Zone Database.", - ((zone, zone if not ignoreBackzone else links[zone]) - for zone in backzones if zone in links) + ( + (zone, zone if not ignoreBackzone else links[zone]) + for zone in backzones + if zone in links + ), ) @@ -2097,18 +2562,23 @@ def generateTzDataTestBackzoneLinks(tzdataDir, version, ignoreBackzone, testDir) """ generateTzDataLinkTestContent( - testDir, version, + testDir, + version, "timeZone_backzone_links.js", comment + u"// Backzone links derived from IANA Time Zone Database.", - ((zone, target if not ignoreBackzone else links[zone]) - for (zone, target) in backlinks.items()) + ( + (zone, target if not ignoreBackzone else links[zone]) + for (zone, target) in backlinks.items() + ), ) def generateTzDataTestVersion(tzdataDir, version, testDir): fileName = "timeZone_version.js" - with io.open(os.path.join(testDir, fileName), mode="w", encoding="utf-8", newline="") as f: + with io.open( + os.path.join(testDir, fileName), mode="w", encoding="utf-8", newline="" + ) as f: println = partial(print, file=f) println(u'// |reftest| skip-if(!this.hasOwnProperty("Intl"))') @@ -2117,7 +2587,8 @@ def generateTzDataTestVersion(tzdataDir, version, testDir): println(tzdataVersionComment.format(version)) println(u"""const tzdata = "{0}";""".format(version)) - println(u""" + println( + u""" if (typeof getICUOptions === "undefined") { var getICUOptions = SpecialPowers.Cu.getJSTestingFunctions().getICUOptions; } @@ -2128,7 +2599,8 @@ def generateTzDataTestVersion(tzdataDir, version, testDir): if (typeof reportCompare === "function") reportCompare(0, 0, "ok"); -""") +""" + ) def generateTzDataTests(tzdataDir, version, ignoreBackzone, testDir): @@ -2150,7 +2622,9 @@ def updateTzdata(topsrcdir, args): if not os.path.isdir(icuTzDir): raise RuntimeError("not a directory: %s" % icuTzDir) - dateTimeFormatTestDir = os.path.join(topsrcdir, "js/src/tests/non262/Intl/DateTimeFormat") + dateTimeFormatTestDir = os.path.join( + topsrcdir, "js/src/tests/non262/Intl/DateTimeFormat" + ) if not os.path.isdir(dateTimeFormatTestDir): raise RuntimeError("not a directory: %s" % dateTimeFormatTestDir) @@ -2163,7 +2637,9 @@ def updateTzdata(topsrcdir, args): out = args.out version = icuTzDataVersion(icuTzDir) - url = "https://www.iana.org/time-zones/repository/releases/tzdata%s.tar.gz" % version + url = ( + "https://www.iana.org/time-zones/repository/releases/tzdata%s.tar.gz" % version + ) print("Arguments:") print("\ttzdata version: %s" % version) @@ -2178,14 +2654,31 @@ def updateTzdata(topsrcdir, args): def updateFrom(f): if os.path.isfile(f) and tarfile.is_tarfile(f): with tarfile.open(f, "r:*") as tar: - processTimeZones(TzDataFile(tar), icuDir, icuTzDir, version, - ignoreBackzone, ignoreFactory, out) - generateTzDataTests(TzDataFile(tar), version, - ignoreBackzone, dateTimeFormatTestDir) + processTimeZones( + TzDataFile(tar), + icuDir, + icuTzDir, + version, + ignoreBackzone, + ignoreFactory, + out, + ) + generateTzDataTests( + TzDataFile(tar), version, ignoreBackzone, dateTimeFormatTestDir + ) elif os.path.isdir(f): - processTimeZones(TzDataDir(f), icuDir, icuTzDir, version, - ignoreBackzone, ignoreFactory, out) - generateTzDataTests(TzDataDir(f), version, ignoreBackzone, dateTimeFormatTestDir) + processTimeZones( + TzDataDir(f), + icuDir, + icuTzDir, + version, + ignoreBackzone, + ignoreFactory, + out, + ) + generateTzDataTests( + TzDataDir(f), version, ignoreBackzone, dateTimeFormatTestDir + ) else: raise RuntimeError("unknown format") @@ -2230,16 +2723,20 @@ def writeCurrencyFile(published, currencies, out): println(generatedFileWarning) println(u"// Version: {}".format(published)) - println(u""" + println( + u""" /** * Mapping from currency codes to the number of decimal digits used for them. * Default is 2 digits. * * Spec: ISO 4217 Currency and Funds Code List. * http://www.currency-iso.org/en/home/tables/table-a1.html - */""") + */""" + ) println(u"var currencyDigits = {") - for (currency, entries) in groupby(sorted(currencies, key=itemgetter(0)), itemgetter(0)): + for (currency, entries) in groupby( + sorted(currencies, key=itemgetter(0)), itemgetter(0) + ): for (_, minorUnits, currencyName, countryName) in entries: println(u" // {} ({})".format(currencyName, countryName)) println(u" {}: {},".format(currency, minorUnits)) @@ -2277,8 +2774,11 @@ def updateFrom(currencyFile): print("Downloading currency & funds code list...") request = UrlRequest(url) request.add_header( - "User-agent", "Mozilla/5.0 (Mobile; rv:{0}.0) Gecko/{0}.0 Firefox/{0}.0".format( - randint(1, 999))) + "User-agent", + "Mozilla/5.0 (Mobile; rv:{0}.0) Gecko/{0}.0 Firefox/{0}.0".format( + randint(1, 999) + ), + ) with closing(urlopen(request)) as currencyFile: fname = urlsplit(currencyFile.geturl()).path.split("/")[-1] with tempfile.NamedTemporaryFile(suffix=fname) as currencyTmpFile: @@ -2289,7 +2789,8 @@ def updateFrom(currencyFile): def writeUnicodeExtensionsMappings(println, mapping, extension): - println(u""" + println( + u""" template static inline bool Is{0}Key( mozilla::Span key, const char (&str)[Length]) {{ @@ -2306,15 +2807,23 @@ def writeUnicodeExtensionsMappings(println, mapping, extension): return type.size() == (Length - 1) && memcmp(type.data(), str, Length - 1) == 0; }} -""".format(extension).rstrip("\n")) +""".format( + extension + ).rstrip( + "\n" + ) + ) linear_search_max_length = 4 - needs_binary_search = any(len(replacements.items()) > linear_search_max_length - for replacements in mapping.values()) + needs_binary_search = any( + len(replacements.items()) > linear_search_max_length + for replacements in mapping.values() + ) if needs_binary_search: - println(u""" + println( + u""" static int32_t Compare{0}Type(const char* a, mozilla::Span b) {{ MOZ_ASSERT(!std::char_traits::find(b.data(), b.size(), '\\0'), "unexpected null-character in string"); @@ -2348,9 +2857,15 @@ def writeUnicodeExtensionsMappings(println, mapping, extension): }} return nullptr; }} -""".format(extension).rstrip("\n")) - - println(u""" +""".format( + extension + ).rstrip( + "\n" + ) + ) + + println( + u""" /** * Mapping from deprecated BCP 47 {0} extension types to their preferred * values. @@ -2365,7 +2880,10 @@ def writeUnicodeExtensionsMappings(println, mapping, extension): MOZ_ASSERT(type.size() > {0}KeyLength); MOZ_ASSERT(IsCanonicallyCased{0}Type(type)); -""".format(extension)) +""".format( + extension + ) + ) def to_hash_key(replacements): return str(sorted(replacements.items())) @@ -2376,8 +2894,11 @@ def write_array(subtags, name, length): println(u" static const char* {}[{}] = {{".format(name, len(subtags))) for entries in grouper(subtags, max_entries): - entries = (u"\"{}\"".format(tag).rjust(length + 2) - for tag in entries if tag is not None) + entries = ( + u'"{}"'.format(tag).rjust(length + 2) + for tag in entries + if tag is not None + ) println(u" {},".format(u", ".join(entries))) println(u" };") @@ -2397,13 +2918,21 @@ def write_array(subtags, name, length): if key in key_aliases[hash_key]: continue - cond = (u"Is{}Key(key, \"{}\")".format(extension, k) - for k in [key] + key_aliases[hash_key]) + cond = ( + u'Is{}Key(key, "{}")'.format(extension, k) + for k in [key] + key_aliases[hash_key] + ) if_kind = u"if" if first_key else u"else if" cond = (u" ||\n" + u" " * (2 + len(if_kind) + 2)).join(cond) - println(u""" - {} ({}) {{""".format(if_kind, cond).strip("\n")) + println( + u""" + {} ({}) {{""".format( + if_kind, cond + ).strip( + "\n" + ) + ) first_key = False replacements = sorted(replacements.items(), key=itemgetter(0)) @@ -2415,57 +2944,77 @@ def write_array(subtags, name, length): write_array(types, "types", max_len) write_array(preferred, "aliases", max_len) - println(u""" + println( + u""" return Search{}Replacement(types, aliases, type); -""".format(extension).strip("\n")) +""".format( + extension + ).strip( + "\n" + ) + ) else: for (type, replacement) in replacements: - println(u""" + println( + u""" if (Is{}Type(type, "{}")) {{ return "{}"; - }}""".format(extension, type, replacement).strip("\n")) - - println(u""" - }""".lstrip("\n")) - - println(u""" + }}""".format( + extension, type, replacement + ).strip( + "\n" + ) + ) + + println( + u""" + }""".lstrip( + "\n" + ) + ) + + println( + u""" return nullptr; } -""".strip("\n")) +""".strip( + "\n" + ) + ) def readICUUnitResourceFile(filepath): - """ Return a set of unit descriptor pairs where the first entry denotes the unit type and the - second entry the unit name. + """Return a set of unit descriptor pairs where the first entry denotes the unit type and the + second entry the unit name. - Example: + Example: - root{ - units{ - compound{ - } - coordinate{ - } - length{ - meter{ - } + root{ + units{ + compound{ + } + coordinate{ + } + length{ + meter{ } } - unitsNarrow:alias{"/LOCALE/unitsShort"} - unitsShort{ - duration{ - day{ - } - day-person:alias{"/LOCALE/unitsShort/duration/day"} + } + unitsNarrow:alias{"/LOCALE/unitsShort"} + unitsShort{ + duration{ + day{ } - length{ - meter{ - } + day-person:alias{"/LOCALE/unitsShort/duration/day"} + } + length{ + meter{ } } } + } - Returns {("length", "meter"), ("duration", "day"), ("duration", "day-person")} + Returns {("length", "meter"), ("duration", "day"), ("duration", "day-person")} """ start_table_re = re.compile(r"^([\w\-%:\"]+)\{$") @@ -2532,23 +3081,27 @@ def readICUUnitResourceFile(filepath): # Add all units for the three display formats "units", "unitsNarrow", and "unitsShort". # But exclude the pseudo-units "compound" and "ccoordinate". - return {(unit_type, unit_name if not unit_name.endswith(":alias") else unit_name[:-6]) - for unit_display in ("units", "unitsNarrow", "unitsShort") - if unit_display in unit_table - for (unit_type, unit_names) in unit_table[unit_display].items() - if unit_type != "compound" and unit_type != "coordinate" - for unit_name in unit_names.keys()} + return { + (unit_type, unit_name if not unit_name.endswith(":alias") else unit_name[:-6]) + for unit_display in ("units", "unitsNarrow", "unitsShort") + if unit_display in unit_table + for (unit_type, unit_names) in unit_table[unit_display].items() + if unit_type != "compound" and unit_type != "coordinate" + for unit_name in unit_names.keys() + } def computeSupportedUnits(all_units, sanctioned_units): - """ Given the set of all possible ICU unit identifiers and the set of sanctioned unit - identifiers, compute the set of effectively supported ICU unit identifiers. + """Given the set of all possible ICU unit identifiers and the set of sanctioned unit + identifiers, compute the set of effectively supported ICU unit identifiers. """ def find_match(unit): - unit_match = [(unit_type, unit_name) - for (unit_type, unit_name) in all_units - if unit_name == unit] + unit_match = [ + (unit_type, unit_name) + for (unit_type, unit_name) in all_units + if unit_name == unit + ] if unit_match: assert len(unit_match) == 1 return unit_match[0] @@ -2562,10 +3115,11 @@ def compound_unit_identifiers(): supported_simple_units = {find_match(unit) for unit in sanctioned_units} assert None not in supported_simple_units - supported_compound_units = {unit_match - for unit_match in (find_match(unit) - for unit in compound_unit_identifiers()) - if unit_match} + supported_compound_units = { + unit_match + for unit_match in (find_match(unit) for unit in compound_unit_identifiers()) + if unit_match + } return supported_simple_units | supported_compound_units @@ -2575,9 +3129,11 @@ def readICUDataFilterForUnits(data_filter_file): data_filter = json.load(f) # Find the rule set for the "unit_tree". - unit_tree_rules = [entry["rules"] - for entry in data_filter["resourceFilters"] - if entry["categories"] == ["unit_tree"]] + unit_tree_rules = [ + entry["rules"] + for entry in data_filter["resourceFilters"] + if entry["categories"] == ["unit_tree"] + ] assert len(unit_tree_rules) == 1 # Compute the list of included units from that rule set. The regular expression must match @@ -2592,36 +3148,50 @@ def writeSanctionedSimpleUnitIdentifiersFiles(all_units, sanctioned_units): js_src_builtin_intl_dir = os.path.dirname(os.path.abspath(__file__)) def find_unit_type(unit): - result = [unit_type for (unit_type, unit_name) in all_units if unit_name == unit] + result = [ + unit_type for (unit_type, unit_name) in all_units if unit_name == unit + ] assert result and len(result) == 1 return result[0] - sanctioned_js_file = os.path.join(js_src_builtin_intl_dir, - "SanctionedSimpleUnitIdentifiersGenerated.js") + sanctioned_js_file = os.path.join( + js_src_builtin_intl_dir, "SanctionedSimpleUnitIdentifiersGenerated.js" + ) with io.open(sanctioned_js_file, mode="w", encoding="utf-8", newline="") as f: println = partial(print, file=f) - sanctioned_units_object = json.dumps({unit: True for unit in sorted(sanctioned_units)}, - sort_keys=True, indent=4, separators=(',', ': ')) + sanctioned_units_object = json.dumps( + {unit: True for unit in sorted(sanctioned_units)}, + sort_keys=True, + indent=4, + separators=(",", ": "), + ) println(generatedFileWarning) - println(u""" + println( + u""" /** * The list of currently supported simple unit identifiers. * * Intl.NumberFormat Unified API Proposal - */""") + */""" + ) - println(u"var sanctionedSimpleUnitIdentifiers = {};".format(sanctioned_units_object)) + println( + u"var sanctionedSimpleUnitIdentifiers = {};".format(sanctioned_units_object) + ) - sanctioned_cpp_file = os.path.join(js_src_builtin_intl_dir, "MeasureUnitGenerated.h") + sanctioned_cpp_file = os.path.join( + js_src_builtin_intl_dir, "MeasureUnitGenerated.h" + ) with io.open(sanctioned_cpp_file, mode="w", encoding="utf-8", newline="") as f: println = partial(print, file=f) println(generatedFileWarning) - println(u""" + println( + u""" struct MeasureUnit { const char* const type; const char* const name; @@ -2633,14 +3203,19 @@ def find_unit_type(unit): * The list must be kept in alphabetical order of |name|. */ inline constexpr MeasureUnit simpleMeasureUnits[] = { - // clang-format off""") + // clang-format off""" + ) for unit_name in sorted(sanctioned_units): println(u' {{"{}", "{}"}},'.format(find_unit_type(unit_name), unit_name)) - println(u""" + println( + u""" // clang-format on -};""".lstrip("\n")) +};""".lstrip( + "\n" + ) + ) writeUnitTestFiles(all_units, sanctioned_units) @@ -2649,7 +3224,9 @@ def writeUnitTestFiles(all_units, sanctioned_units): """ Generate test files for unit number formatters. """ js_src_builtin_intl_dir = os.path.dirname(os.path.abspath(__file__)) - test_dir = os.path.join(js_src_builtin_intl_dir, "../../tests/non262/Intl/NumberFormat") + test_dir = os.path.join( + js_src_builtin_intl_dir, "../../tests/non262/Intl/NumberFormat" + ) def write_test(file_name, test_content, indent=4): file_path = os.path.join(test_dir, file_name) @@ -2661,18 +3238,31 @@ def write_test(file_name, test_content, indent=4): println(generatedFileWarning) println(u"") - sanctioned_units_array = json.dumps([unit for unit in sorted(sanctioned_units)], - indent=indent, separators=(',', ': ')) + sanctioned_units_array = json.dumps( + [unit for unit in sorted(sanctioned_units)], + indent=indent, + separators=(",", ": "), + ) - println(u"const sanctionedSimpleUnitIdentifiers = {};".format(sanctioned_units_array)) + println( + u"const sanctionedSimpleUnitIdentifiers = {};".format( + sanctioned_units_array + ) + ) println(test_content) - println(u""" + println( + u""" if (typeof reportCompare === "function") -{}reportCompare(true, true);""".format(" " * indent)) - - write_test("unit-compound-combinations.js", u""" +{}reportCompare(true, true);""".format( + " " * indent + ) + ) + + write_test( + "unit-compound-combinations.js", + u""" // Test all simple unit identifier combinations are allowed. for (const numerator of sanctionedSimpleUnitIdentifiers) { @@ -2682,14 +3272,21 @@ def write_test(file_name, test_content, indent=4): assertEq(nf.format(1), nf.formatToParts(1).map(p => p.value).join("")); } -}""") +}""", + ) - all_units_array = json.dumps(["-".join(unit) for unit in sorted(all_units)], - indent=4, separators=(',', ': ')) + all_units_array = json.dumps( + ["-".join(unit) for unit in sorted(all_units)], indent=4, separators=(",", ": ") + ) - write_test("unit-well-formed.js", u""" + write_test( + "unit-well-formed.js", + u""" const allUnits = {}; -""".format(all_units_array) + u""" +""".format( + all_units_array + ) + + u""" // Test only sanctioned unit identifiers are allowed. for (const typeAndUnit of allUnits) { @@ -2711,9 +3308,12 @@ def write_test(file_name, test_content, indent=4): assertThrowsInstanceOf(() => new Intl.NumberFormat("en", {style: "unit", unit}), RangeError, `Missing error for "${typeAndUnit}"`); } -}""") +}""", + ) - write_test("unit-formatToParts-has-unit-field.js", u""" + write_test( + "unit-formatToParts-has-unit-field.js", + u""" // Test only English and Chinese to keep the overall runtime reasonable. // // Chinese is included because it contains more than one "unit" element for @@ -2748,14 +3348,18 @@ def write_test(file_name, test_content, indent=4): } } } -}""", indent=2) +}""", + indent=2, + ) def updateUnits(topsrcdir, args): icu_path = os.path.join(topsrcdir, "intl", "icu") icu_unit_path = os.path.join(icu_path, "source", "data", "unit") - with io.open("SanctionedSimpleUnitIdentifiers.yaml", mode="r", encoding="utf-8") as f: + with io.open( + "SanctionedSimpleUnitIdentifiers.yaml", mode="r", encoding="utf-8" + ) as f: sanctioned_units = yaml.safe_load(f) # Read all possible ICU unit identifiers from the "unit/root.txt" resource. @@ -2771,6 +3375,7 @@ def updateUnits(topsrcdir, args): # Both sets must match to avoid resource loading errors at runtime. if supported_units != filtered_units: + def units_to_string(units): return ", ".join("/".join(u) for u in units) @@ -2788,28 +3393,28 @@ def units_to_string(units): def readICUNumberingSystemsResourceFile(filepath): - """ Returns a dictionary of numbering systems where the key denotes the numbering system name - and the value a dictionary with additional numbering system data. + """Returns a dictionary of numbering systems where the key denotes the numbering system name + and the value a dictionary with additional numbering system data. - Example: + Example: - numberingSystems:table(nofallback){ - numberingSystems{ - latn{ - algorithmic:int{0} - desc{"0123456789"} - radix:int{10} - } - roman{ - algorithmic:int{1} - desc{"%roman-upper"} - radix:int{10} - } + numberingSystems:table(nofallback){ + numberingSystems{ + latn{ + algorithmic:int{0} + desc{"0123456789"} + radix:int{10} + } + roman{ + algorithmic:int{1} + desc{"%roman-upper"} + radix:int{10} } } + } - Returns {"latn": {"digits": "0123456789", "algorithmic": False}, - "roman": {"algorithmic": True}} + Returns {"latn": {"digits": "0123456789", "algorithmic": False}, + "roman": {"algorithmic": True}} """ start_table_re = re.compile(r"^(\w+)(?:\:[\w\(\)]+)?\{$") @@ -2862,7 +3467,9 @@ def readICUNumberingSystemsResourceFile(filepath): match = table_entry_re.match(line) if match: entry_key = match.group(1) - entry_value = match.group(2) if match.group(2) is not None else int(match.group(3)) + entry_value = ( + match.group(2) if match.group(2) is not None else int(match.group(3)) + ) table[entry_key] = entry_value continue @@ -2879,39 +3486,53 @@ def readICUNumberingSystemsResourceFile(filepath): assert all(ns["radix"] == 10 for ns in numbering_systems.values()) # Return the numbering systems. - return {key: {"digits": value["desc"], "algorithmic": False} - if not bool(value["algorithmic"]) - else {"algorithmic": True} - for (key, value) in numbering_systems.items()} + return { + key: {"digits": value["desc"], "algorithmic": False} + if not bool(value["algorithmic"]) + else {"algorithmic": True} + for (key, value) in numbering_systems.items() + } def writeNumberingSystemFiles(numbering_systems): js_src_builtin_intl_dir = os.path.dirname(os.path.abspath(__file__)) - numbering_systems_js_file = os.path.join(js_src_builtin_intl_dir, - "NumberingSystemsGenerated.h") - with io.open(numbering_systems_js_file, mode="w", encoding="utf-8", newline="") as f: + numbering_systems_js_file = os.path.join( + js_src_builtin_intl_dir, "NumberingSystemsGenerated.h" + ) + with io.open( + numbering_systems_js_file, mode="w", encoding="utf-8", newline="" + ) as f: println = partial(print, file=f) println(generatedFileWarning) - println(u""" + println( + u""" /** * The list of numbering systems with simple digit mappings. */ #ifndef builtin_intl_NumberingSystemsGenerated_h #define builtin_intl_NumberingSystemsGenerated_h -""") +""" + ) - simple_numbering_systems = sorted(name - for (name, value) in numbering_systems.items() - if not value["algorithmic"]) + simple_numbering_systems = sorted( + name + for (name, value) in numbering_systems.items() + if not value["algorithmic"] + ) println(u"// clang-format off") println(u"#define NUMBERING_SYSTEMS_WITH_SIMPLE_DIGIT_MAPPINGS \\") - println(u"{}".format(", \\\n".join(u' "{}"'.format(name) - for name in simple_numbering_systems))) + println( + u"{}".format( + ", \\\n".join( + u' "{}"'.format(name) for name in simple_numbering_systems + ) + ) + ) println(u"// clang-format on") println(u"") @@ -2927,17 +3548,23 @@ def writeNumberingSystemFiles(numbering_systems): println(generatedFileWarning) - println(u""" + println( + u""" // source: CLDR file common/bcp47/number.xml; version CLDR {}. // https://github.com/unicode-org/cldr/blob/master/common/bcp47/number.xml // https://github.com/unicode-org/cldr/blob/master/common/supplemental/numberingSystems.xml -""".format(readCLDRVersionFromICU()).rstrip()) - - numbering_systems_object = json.dumps(numbering_systems, - indent=2, - separators=(',', ': '), - sort_keys=True, - ensure_ascii=False) +""".format( + readCLDRVersionFromICU() + ).rstrip() + ) + + numbering_systems_object = json.dumps( + numbering_systems, + indent=2, + separators=(",", ": "), + sort_keys=True, + ensure_ascii=False, + ) println(u"const numberingSystems = {};".format(numbering_systems_object)) @@ -2952,19 +3579,24 @@ def updateNumberingSystems(topsrcdir, args): misc_ns_file = os.path.join(icu_misc_path, "numberingSystems.txt") all_numbering_systems = readICUNumberingSystemsResourceFile(misc_ns_file) - all_numbering_systems_simple_digits = {name for (name, value) in all_numbering_systems.items() - if not value["algorithmic"]} + all_numbering_systems_simple_digits = { + name + for (name, value) in all_numbering_systems.items() + if not value["algorithmic"] + } # Assert ICU includes support for all required numbering systems. If this assertion fails, # something is broken in ICU. - assert all_numbering_systems_simple_digits.issuperset(numbering_systems), ( - "{}".format(numbering_systems.difference(all_numbering_systems_simple_digits))) + assert all_numbering_systems_simple_digits.issuperset( + numbering_systems + ), "{}".format(numbering_systems.difference(all_numbering_systems_simple_digits)) # Assert the spec requires support for all numbering systems with simple digit mappings. If # this assertion fails, file a PR at to include any new # numbering systems. - assert all_numbering_systems_simple_digits.issubset(numbering_systems), ( - "{}".format(all_numbering_systems_simple_digits.difference(numbering_systems))) + assert all_numbering_systems_simple_digits.issubset(numbering_systems), "{}".format( + all_numbering_systems_simple_digits.difference(numbering_systems) + ) writeNumberingSystemFiles(all_numbering_systems) @@ -2987,65 +3619,85 @@ def EnsureHttps(v): parser = argparse.ArgumentParser(description="Update intl data.") subparsers = parser.add_subparsers(help="Select update mode") - parser_cldr_tags = subparsers.add_parser("langtags", - help="Update CLDR language tags data") - parser_cldr_tags.add_argument("--version", - metavar="VERSION", - help="CLDR version number") - parser_cldr_tags.add_argument("--url", - metavar="URL", - default="https://unicode.org/Public/cldr//core.zip", - type=EnsureHttps, - help="Download url CLDR data (default: %(default)s)") - parser_cldr_tags.add_argument("--out", - default="LanguageTagGenerated.cpp", - help="Output file (default: %(default)s)") - parser_cldr_tags.add_argument("file", - nargs="?", - help="Local cldr-core.zip file, if omitted uses ") + parser_cldr_tags = subparsers.add_parser( + "langtags", help="Update CLDR language tags data" + ) + parser_cldr_tags.add_argument( + "--version", metavar="VERSION", help="CLDR version number" + ) + parser_cldr_tags.add_argument( + "--url", + metavar="URL", + default="https://unicode.org/Public/cldr//core.zip", + type=EnsureHttps, + help="Download url CLDR data (default: %(default)s)", + ) + parser_cldr_tags.add_argument( + "--out", + default="LanguageTagGenerated.cpp", + help="Output file (default: %(default)s)", + ) + parser_cldr_tags.add_argument( + "file", nargs="?", help="Local cldr-core.zip file, if omitted uses " + ) parser_cldr_tags.set_defaults(func=updateCLDRLangTags) parser_tz = subparsers.add_parser("tzdata", help="Update tzdata") - parser_tz.add_argument("--tz", - help="Local tzdata directory or file, if omitted downloads tzdata " - "distribution from https://www.iana.org/time-zones/") + parser_tz.add_argument( + "--tz", + help="Local tzdata directory or file, if omitted downloads tzdata " + "distribution from https://www.iana.org/time-zones/", + ) # ICU doesn't include the backzone file by default, but we still like to # use the backzone time zone names to avoid user confusion. This does lead # to formatting "historic" dates (pre-1970 era) with the wrong time zone, # but that's probably acceptable for now. - parser_tz.add_argument("--ignore-backzone", - action="store_true", - help="Ignore tzdata's 'backzone' file. Can be enabled to generate more " - "accurate time zone canonicalization reflecting the actual time " - "zones as used by ICU.") - parser_tz.add_argument("--out", - default="TimeZoneDataGenerated.h", - help="Output file (default: %(default)s)") + parser_tz.add_argument( + "--ignore-backzone", + action="store_true", + help="Ignore tzdata's 'backzone' file. Can be enabled to generate more " + "accurate time zone canonicalization reflecting the actual time " + "zones as used by ICU.", + ) + parser_tz.add_argument( + "--out", + default="TimeZoneDataGenerated.h", + help="Output file (default: %(default)s)", + ) parser_tz.set_defaults(func=partial(updateTzdata, topsrcdir)) - parser_currency = subparsers.add_parser("currency", help="Update currency digits mapping") - parser_currency.add_argument("--url", - metavar="URL", - default="https://www.currency-iso.org/dam/downloads/lists/list_one.xml", # NOQA: E501 - type=EnsureHttps, - help="Download url for the currency & funds code list (default: " - "%(default)s)") - parser_currency.add_argument("--out", - default="CurrencyDataGenerated.js", - help="Output file (default: %(default)s)") - parser_currency.add_argument("file", - nargs="?", - help="Local currency code list file, if omitted uses ") + parser_currency = subparsers.add_parser( + "currency", help="Update currency digits mapping" + ) + parser_currency.add_argument( + "--url", + metavar="URL", + default="https://www.currency-iso.org/dam/downloads/lists/list_one.xml", # NOQA: E501 + type=EnsureHttps, + help="Download url for the currency & funds code list (default: " + "%(default)s)", + ) + parser_currency.add_argument( + "--out", + default="CurrencyDataGenerated.js", + help="Output file (default: %(default)s)", + ) + parser_currency.add_argument( + "file", nargs="?", help="Local currency code list file, if omitted uses " + ) parser_currency.set_defaults(func=partial(updateCurrency, topsrcdir)) - parser_units = subparsers.add_parser("units", - help="Update sanctioned unit identifiers mapping") + parser_units = subparsers.add_parser( + "units", help="Update sanctioned unit identifiers mapping" + ) parser_units.set_defaults(func=partial(updateUnits, topsrcdir)) - parser_numbering_systems = subparsers.add_parser("numbering", - help="Update numbering systems with simple " - "digit mappings") - parser_numbering_systems.set_defaults(func=partial(updateNumberingSystems, topsrcdir)) + parser_numbering_systems = subparsers.add_parser( + "numbering", help="Update numbering systems with simple " "digit mappings" + ) + parser_numbering_systems.set_defaults( + func=partial(updateNumberingSystems, topsrcdir) + ) args = parser.parse_args() args.func(args) diff --git a/js/src/debugger/moz.build b/js/src/debugger/moz.build index 0e8e3377858b41..c8c162bcbdd282 100644 --- a/js/src/debugger/moz.build +++ b/js/src/debugger/moz.build @@ -10,25 +10,22 @@ # js/src/builtin/Object.cpp had better not smash each other's .o files when # unified sources are disabled. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!..', - '..' -] +LOCAL_INCLUDES += ["!..", ".."] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") UNIFIED_SOURCES = [ - 'Debugger.cpp', - 'DebuggerMemory.cpp', - 'DebugScript.cpp', - 'Environment.cpp', - 'Frame.cpp', - 'NoExecute.cpp', - 'Object.cpp', - 'Script.cpp', - 'Source.cpp', + "Debugger.cpp", + "DebuggerMemory.cpp", + "DebugScript.cpp", + "Environment.cpp", + "Frame.cpp", + "NoExecute.cpp", + "Object.cpp", + "Script.cpp", + "Source.cpp", ] diff --git a/js/src/devtools/automation/autospider.py b/js/src/devtools/automation/autospider.py index 027631090e5306..5d7de891daa7ea 100755 --- a/js/src/devtools/automation/autospider.py +++ b/js/src/devtools/automation/autospider.py @@ -22,17 +22,19 @@ from subprocess import Popen from threading import Timer -Dirs = namedtuple('Dirs', ['scripts', 'js_src', 'source', 'tooltool', 'fetches']) +Dirs = namedtuple("Dirs", ["scripts", "js_src", "source", "tooltool", "fetches"]) def directories(pathmodule, cwd, fixup=lambda s: s): scripts = pathmodule.join(fixup(cwd), fixup(pathmodule.dirname(__file__))) js_src = pathmodule.abspath(pathmodule.join(scripts, "..", "..")) source = pathmodule.abspath(pathmodule.join(js_src, "..", "..")) - tooltool = pathmodule.abspath(env.get('TOOLTOOL_CHECKOUT', - pathmodule.join(source, "..", ".."))) - fetches = pathmodule.abspath(env.get('MOZ_FETCHES_DIR', - pathmodule.join(source, "..", ".."))) + tooltool = pathmodule.abspath( + env.get("TOOLTOOL_CHECKOUT", pathmodule.join(source, "..", "..")) + ) + fetches = pathmodule.abspath( + env.get("MOZ_FETCHES_DIR", pathmodule.join(source, "..", "..")) + ) return Dirs(scripts, js_src, source, tooltool, fetches) @@ -40,108 +42,169 @@ def directories(pathmodule, cwd, fixup=lambda s: s): # paths. So for direct subprocess.* invocation, use normal paths from # DIR, but when running under the shell, use POSIX style paths. DIR = directories(os.path, os.getcwd()) -PDIR = directories(posixpath, os.environ["PWD"], - fixup=lambda s: re.sub(r'^(\w):', r'/\1', s)) -env['CPP_UNIT_TESTS_DIR_JS_SRC'] = DIR.js_src - -AUTOMATION = env.get('AUTOMATION', False) - -parser = argparse.ArgumentParser( - description='Run a spidermonkey shell build job') -parser.add_argument('--verbose', action='store_true', default=AUTOMATION, - help="display additional logging info") -parser.add_argument('--dep', action='store_true', - help='do not clobber the objdir before building') -parser.add_argument('--keep', action='store_true', - help='do not delete the sanitizer output directory (for testing)') -parser.add_argument('--platform', '-p', type=str, metavar='PLATFORM', - default='', help='build platform, including a suffix ("-debug" or "") used ' - 'by buildbot to override the variant\'s "debug" setting. The platform can be ' - 'used to specify 32 vs 64 bits.') -parser.add_argument('--timeout', '-t', type=int, metavar='TIMEOUT', - default=12600, - help='kill job after TIMEOUT seconds') -parser.add_argument('--objdir', type=str, metavar='DIR', - default=env.get('OBJDIR', os.path.join(DIR.source, 'obj-spider')), - help='object directory') +PDIR = directories( + posixpath, os.environ["PWD"], fixup=lambda s: re.sub(r"^(\w):", r"/\1", s) +) +env["CPP_UNIT_TESTS_DIR_JS_SRC"] = DIR.js_src + +AUTOMATION = env.get("AUTOMATION", False) + +parser = argparse.ArgumentParser(description="Run a spidermonkey shell build job") +parser.add_argument( + "--verbose", + action="store_true", + default=AUTOMATION, + help="display additional logging info", +) +parser.add_argument( + "--dep", action="store_true", help="do not clobber the objdir before building" +) +parser.add_argument( + "--keep", + action="store_true", + help="do not delete the sanitizer output directory (for testing)", +) +parser.add_argument( + "--platform", + "-p", + type=str, + metavar="PLATFORM", + default="", + help='build platform, including a suffix ("-debug" or "") used ' + 'by buildbot to override the variant\'s "debug" setting. The platform can be ' + "used to specify 32 vs 64 bits.", +) +parser.add_argument( + "--timeout", + "-t", + type=int, + metavar="TIMEOUT", + default=12600, + help="kill job after TIMEOUT seconds", +) +parser.add_argument( + "--objdir", + type=str, + metavar="DIR", + default=env.get("OBJDIR", os.path.join(DIR.source, "obj-spider")), + help="object directory", +) group = parser.add_mutually_exclusive_group() -group.add_argument('--optimize', action='store_true', - help='generate an optimized build. Overrides variant setting.') -group.add_argument('--no-optimize', action='store_false', - dest='optimize', - help='generate a non-optimized build. Overrides variant setting.') +group.add_argument( + "--optimize", + action="store_true", + help="generate an optimized build. Overrides variant setting.", +) +group.add_argument( + "--no-optimize", + action="store_false", + dest="optimize", + help="generate a non-optimized build. Overrides variant setting.", +) group.set_defaults(optimize=None) group = parser.add_mutually_exclusive_group() -group.add_argument('--debug', action='store_true', - help='generate a debug build. Overrides variant setting.') -group.add_argument('--no-debug', action='store_false', - dest='debug', - help='generate a non-debug build. Overrides variant setting.') +group.add_argument( + "--debug", + action="store_true", + help="generate a debug build. Overrides variant setting.", +) +group.add_argument( + "--no-debug", + action="store_false", + dest="debug", + help="generate a non-debug build. Overrides variant setting.", +) group.set_defaults(debug=None) group = parser.add_mutually_exclusive_group() -group.add_argument('--jemalloc', action='store_true', - dest='jemalloc', - help='use mozilla\'s jemalloc instead of the default allocator') -group.add_argument('--no-jemalloc', action='store_false', - dest='jemalloc', - help='use the default allocator instead of mozilla\'s jemalloc') +group.add_argument( + "--jemalloc", + action="store_true", + dest="jemalloc", + help="use mozilla's jemalloc instead of the default allocator", +) +group.add_argument( + "--no-jemalloc", + action="store_false", + dest="jemalloc", + help="use the default allocator instead of mozilla's jemalloc", +) group.set_defaults(jemalloc=None) -parser.add_argument('--run-tests', '--tests', type=str, metavar='TESTSUITE', - default='', - help="comma-separated set of test suites to add to the variant's default set") -parser.add_argument('--skip-tests', '--skip', type=str, metavar='TESTSUITE', - default='', - help="comma-separated set of test suites to remove from the variant's default " - "set") -parser.add_argument('--build-only', '--build', - dest='skip_tests', action='store_const', const='all', - help="only do a build, do not run any tests") -parser.add_argument('--noconf', action='store_true', - help="skip running configure when doing a build") -parser.add_argument('--nobuild', action='store_true', - help='Do not do a build. Rerun tests on existing build.') -parser.add_argument('variant', type=str, - help='type of job requested, see variants/ subdir') +parser.add_argument( + "--run-tests", + "--tests", + type=str, + metavar="TESTSUITE", + default="", + help="comma-separated set of test suites to add to the variant's default set", +) +parser.add_argument( + "--skip-tests", + "--skip", + type=str, + metavar="TESTSUITE", + default="", + help="comma-separated set of test suites to remove from the variant's default " + "set", +) +parser.add_argument( + "--build-only", + "--build", + dest="skip_tests", + action="store_const", + const="all", + help="only do a build, do not run any tests", +) +parser.add_argument( + "--noconf", action="store_true", help="skip running configure when doing a build" +) +parser.add_argument( + "--nobuild", + action="store_true", + help="Do not do a build. Rerun tests on existing build.", +) +parser.add_argument( + "variant", type=str, help="type of job requested, see variants/ subdir" +) args = parser.parse_args() -logging.basicConfig(level=logging.INFO, format='%(message)s') +logging.basicConfig(level=logging.INFO, format="%(message)s") OBJDIR = args.objdir OUTDIR = os.path.join(OBJDIR, "out") POBJDIR = posixpath.join(PDIR.source, args.objdir) -MAKE = env.get('MAKE', 'make') -MAKEFLAGS = env.get('MAKEFLAGS', '-j6' + ('' if AUTOMATION else ' -s')) +MAKE = env.get("MAKE", "make") +MAKEFLAGS = env.get("MAKEFLAGS", "-j6" + ("" if AUTOMATION else " -s")) PYTHON = sys.executable -for d in ('scripts', 'js_src', 'source', 'tooltool', 'fetches'): +for d in ("scripts", "js_src", "source", "tooltool", "fetches"): info("DIR.{name} = {dir}".format(name=d, dir=getattr(DIR, d))) def set_vars_from_script(script, vars): - '''Run a shell script, then dump out chosen environment variables. The build - system uses shell scripts to do some configuration that we need to - borrow. On Windows, the script itself must output the variable settings - (in the form "export FOO="), since otherwise there will be - problems with mismatched Windows/POSIX formats. - ''' - script_text = 'source %s' % script - if platform.system() == 'Windows': - parse_state = 'parsing exports' + """Run a shell script, then dump out chosen environment variables. The build + system uses shell scripts to do some configuration that we need to + borrow. On Windows, the script itself must output the variable settings + (in the form "export FOO="), since otherwise there will be + problems with mismatched Windows/POSIX formats. + """ + script_text = "source %s" % script + if platform.system() == "Windows": + parse_state = "parsing exports" else: - script_text += '; echo VAR SETTINGS:; ' - script_text += '; '.join('echo $' + var for var in vars) - parse_state = 'scanning' - stdout = subprocess.check_output(['sh', '-x', '-c', script_text]).decode() + script_text += "; echo VAR SETTINGS:; " + script_text += "; ".join("echo $" + var for var in vars) + parse_state = "scanning" + stdout = subprocess.check_output(["sh", "-x", "-c", script_text]).decode() tograb = vars[:] for line in stdout.splitlines(): - if parse_state == 'scanning': - if line == 'VAR SETTINGS:': - parse_state = 'grabbing' - elif parse_state == 'grabbing': + if parse_state == "scanning": + if line == "VAR SETTINGS:": + parse_state = "grabbing" + elif parse_state == "grabbing": var = tograb.pop(0) env[var] = line - elif parse_state == 'parsing exports': - m = re.match(r'export (\w+)=(.*)', line) + elif parse_state == "parsing exports": + m = re.match(r"export (\w+)=(.*)", line) if m: var, value = m.groups() if var in tograb: @@ -149,20 +212,29 @@ def set_vars_from_script(script, vars): info("Setting %s = %s" % (var, value)) -def ensure_dir_exists(name, clobber=True, creation_marker_filename="CREATED-BY-AUTOSPIDER"): +def ensure_dir_exists( + name, clobber=True, creation_marker_filename="CREATED-BY-AUTOSPIDER" +): if creation_marker_filename is None: marker = None else: marker = os.path.join(name, creation_marker_filename) if clobber: - if not AUTOMATION and marker and os.path.exists(name) and not os.path.exists(marker): + if ( + not AUTOMATION + and marker + and os.path.exists(name) + and not os.path.exists(marker) + ): raise Exception( - "Refusing to delete objdir %s because it was not created by autospider" % name) + "Refusing to delete objdir %s because it was not created by autospider" + % name + ) shutil.rmtree(name, ignore_errors=True) try: os.mkdir(name) if marker: - open(marker, 'a').close() + open(marker, "a").close() except OSError: if clobber: raise @@ -171,148 +243,151 @@ def ensure_dir_exists(name, clobber=True, creation_marker_filename="CREATED-BY-A with open(os.path.join(DIR.scripts, "variants", args.variant)) as fh: variant = json.load(fh) -if args.variant == 'nonunified': +if args.variant == "nonunified": # Rewrite js/src/**/moz.build to replace UNIFIED_SOURCES to SOURCES. # Note that this modifies the current checkout. for dirpath, dirnames, filenames in os.walk(DIR.js_src): - if 'moz.build' in filenames: - in_place = ['-i'] - if platform.system() == 'Darwin': - in_place.append('') - subprocess.check_call(['sed'] + in_place + ['s/UNIFIED_SOURCES/SOURCES/', - os.path.join(dirpath, 'moz.build')]) - -CONFIGURE_ARGS = variant['configure-args'] + if "moz.build" in filenames: + in_place = ["-i"] + if platform.system() == "Darwin": + in_place.append("") + subprocess.check_call( + ["sed"] + + in_place + + ["s/UNIFIED_SOURCES/SOURCES/", os.path.join(dirpath, "moz.build")] + ) + +CONFIGURE_ARGS = variant["configure-args"] opt = args.optimize if opt is None: - opt = variant.get('optimize') + opt = variant.get("optimize") if opt is not None: - CONFIGURE_ARGS += (" --enable-optimize" if opt else " --disable-optimize") + CONFIGURE_ARGS += " --enable-optimize" if opt else " --disable-optimize" opt = args.debug if opt is None: - opt = variant.get('debug') + opt = variant.get("debug") if opt is not None: - CONFIGURE_ARGS += (" --enable-debug" if opt else " --disable-debug") + CONFIGURE_ARGS += " --enable-debug" if opt else " --disable-debug" opt = args.jemalloc if opt is not None: - CONFIGURE_ARGS += (" --enable-jemalloc" if opt else " --disable-jemalloc") + CONFIGURE_ARGS += " --enable-jemalloc" if opt else " --disable-jemalloc" # Some of the variants request a particular word size (eg ARM simulators). -word_bits = variant.get('bits') +word_bits = variant.get("bits") # On Linux and Windows, we build 32- and 64-bit versions on a 64 bit # host, so the caller has to specify what is desired. if word_bits is None and args.platform: - platform_arch = args.platform.split('-')[0] - if platform_arch in ('win32', 'linux'): + platform_arch = args.platform.split("-")[0] + if platform_arch in ("win32", "linux"): word_bits = 32 - elif platform_arch in ('win64', 'linux64'): + elif platform_arch in ("win64", "linux64"): word_bits = 64 # Fall back to the word size of the host. if word_bits is None: - word_bits = 64 if platform.architecture()[0] == '64bit' else 32 + word_bits = 64 if platform.architecture()[0] == "64bit" else 32 -if 'compiler' in variant: - compiler = variant['compiler'] -elif platform.system() == 'Windows': - compiler = 'clang-cl' +if "compiler" in variant: + compiler = variant["compiler"] +elif platform.system() == "Windows": + compiler = "clang-cl" else: - compiler = 'clang' + compiler = "clang" # Need a platform name to use as a key in variant files. if args.platform: variant_platform = args.platform.split("-")[0] -elif platform.system() == 'Windows': - variant_platform = 'win64' if word_bits == 64 else 'win32' -elif platform.system() == 'Linux': - variant_platform = 'linux64' if word_bits == 64 else 'linux' -elif platform.system() == 'Darwin': - variant_platform = 'macosx64' +elif platform.system() == "Windows": + variant_platform = "win64" if word_bits == 64 else "win32" +elif platform.system() == "Linux": + variant_platform = "linux64" if word_bits == 64 else "linux" +elif platform.system() == "Darwin": + variant_platform = "macosx64" else: - variant_platform = 'other' + variant_platform = "other" info("using compiler '{}'".format(compiler)) -cxx = {'clang': 'clang++', - 'gcc': 'g++', - 'cl': 'cl', - 'clang-cl': 'clang-cl'}.get(compiler) +cxx = {"clang": "clang++", "gcc": "g++", "cl": "cl", "clang-cl": "clang-cl"}.get( + compiler +) -compiler_dir = env.get('GCCDIR', os.path.join(DIR.fetches, compiler)) +compiler_dir = env.get("GCCDIR", os.path.join(DIR.fetches, compiler)) info("looking for compiler under {}/".format(compiler_dir)) compiler_libdir = None -if os.path.exists(os.path.join(compiler_dir, 'bin', compiler)): - env.setdefault('CC', os.path.join(compiler_dir, 'bin', compiler)) - env.setdefault('CXX', os.path.join(compiler_dir, 'bin', cxx)) - if compiler == 'clang': - platlib = 'lib' +if os.path.exists(os.path.join(compiler_dir, "bin", compiler)): + env.setdefault("CC", os.path.join(compiler_dir, "bin", compiler)) + env.setdefault("CXX", os.path.join(compiler_dir, "bin", cxx)) + if compiler == "clang": + platlib = "lib" else: - platlib = 'lib64' if word_bits == 64 else 'lib' + platlib = "lib64" if word_bits == 64 else "lib" compiler_libdir = os.path.join(compiler_dir, platlib) else: - env.setdefault('CC', compiler) - env.setdefault('CXX', cxx) + env.setdefault("CC", compiler) + env.setdefault("CXX", cxx) -bindir = os.path.join(OBJDIR, 'dist', 'bin') -env['LD_LIBRARY_PATH'] = ':'.join( - p for p in (bindir, compiler_libdir, env.get('LD_LIBRARY_PATH')) if p) +bindir = os.path.join(OBJDIR, "dist", "bin") +env["LD_LIBRARY_PATH"] = ":".join( + p for p in (bindir, compiler_libdir, env.get("LD_LIBRARY_PATH")) if p +) -for v in ('CC', 'CXX', 'LD_LIBRARY_PATH'): +for v in ("CC", "CXX", "LD_LIBRARY_PATH"): info("default {name} = {value}".format(name=v, value=env[v])) -rust_dir = os.path.join(DIR.fetches, 'rustc') -if os.path.exists(os.path.join(rust_dir, 'bin', 'rustc')): - env.setdefault('RUSTC', os.path.join(rust_dir, 'bin', 'rustc')) - env.setdefault('CARGO', os.path.join(rust_dir, 'bin', 'cargo')) +rust_dir = os.path.join(DIR.fetches, "rustc") +if os.path.exists(os.path.join(rust_dir, "bin", "rustc")): + env.setdefault("RUSTC", os.path.join(rust_dir, "bin", "rustc")) + env.setdefault("CARGO", os.path.join(rust_dir, "bin", "cargo")) else: - env.setdefault('RUSTC', 'rustc') - env.setdefault('CARGO', 'cargo') - -if platform.system() == 'Darwin': - os.environ['SOURCE'] = DIR.source - set_vars_from_script(os.path.join(DIR.scripts, 'macbuildenv.sh'), - ['CC', 'CXX']) -elif platform.system() == 'Windows': - MAKE = env.get('MAKE', 'mozmake') - os.environ['SOURCE'] = DIR.source + env.setdefault("RUSTC", "rustc") + env.setdefault("CARGO", "cargo") + +if platform.system() == "Darwin": + os.environ["SOURCE"] = DIR.source + set_vars_from_script(os.path.join(DIR.scripts, "macbuildenv.sh"), ["CC", "CXX"]) +elif platform.system() == "Windows": + MAKE = env.get("MAKE", "mozmake") + os.environ["SOURCE"] = DIR.source if word_bits == 64: - os.environ['USE_64BIT'] = '1' - set_vars_from_script(posixpath.join(PDIR.scripts, 'winbuildenv.sh'), - ['PATH', 'VC_PATH', 'DIA_SDK_PATH', 'CC', 'CXX', - 'WINDOWSSDKDIR']) + os.environ["USE_64BIT"] = "1" + set_vars_from_script( + posixpath.join(PDIR.scripts, "winbuildenv.sh"), + ["PATH", "VC_PATH", "DIA_SDK_PATH", "CC", "CXX", "WINDOWSSDKDIR"], + ) # Configure flags, based on word length and cross-compilation if word_bits == 32: - if platform.system() == 'Windows': - CONFIGURE_ARGS += ' --target=i686-pc-mingw32' - elif platform.system() == 'Linux': - if not platform.machine().startswith('arm'): - CONFIGURE_ARGS += ' --target=i686-pc-linux' + if platform.system() == "Windows": + CONFIGURE_ARGS += " --target=i686-pc-mingw32" + elif platform.system() == "Linux": + if not platform.machine().startswith("arm"): + CONFIGURE_ARGS += " --target=i686-pc-linux" # Add SSE2 support for x86/x64 architectures. - if not platform.machine().startswith('arm'): - if platform.system() == 'Windows': - sse_flags = '-arch:SSE2' + if not platform.machine().startswith("arm"): + if platform.system() == "Windows": + sse_flags = "-arch:SSE2" else: - sse_flags = '-msse -msse2 -mfpmath=sse' - env['CCFLAGS'] = '{0} {1}'.format(env.get('CCFLAGS', ''), sse_flags) - env['CXXFLAGS'] = '{0} {1}'.format(env.get('CXXFLAGS', ''), sse_flags) + sse_flags = "-msse -msse2 -mfpmath=sse" + env["CCFLAGS"] = "{0} {1}".format(env.get("CCFLAGS", ""), sse_flags) + env["CXXFLAGS"] = "{0} {1}".format(env.get("CXXFLAGS", ""), sse_flags) else: - if platform.system() == 'Windows': - CONFIGURE_ARGS += ' --target=x86_64-pc-mingw32' + if platform.system() == "Windows": + CONFIGURE_ARGS += " --target=x86_64-pc-mingw32" -if platform.system() == 'Linux' and AUTOMATION: - CONFIGURE_ARGS = '--enable-stdcxx-compat --disable-gold ' + CONFIGURE_ARGS +if platform.system() == "Linux" and AUTOMATION: + CONFIGURE_ARGS = "--enable-stdcxx-compat --disable-gold " + CONFIGURE_ARGS # Override environment variant settings conditionally. CONFIGURE_ARGS = "{} {}".format( - variant.get('conditional-configure-args', {}).get(variant_platform, ''), - CONFIGURE_ARGS + variant.get("conditional-configure-args", {}).get(variant_platform, ""), + CONFIGURE_ARGS, ) # Timeouts. @@ -334,14 +409,14 @@ def killall(): # Any jobs that wish to produce additional output can save them into the upload # directory if there is such a thing, falling back to OBJDIR. -env.setdefault('MOZ_UPLOAD_DIR', OBJDIR) -ensure_dir_exists(env['MOZ_UPLOAD_DIR'], clobber=False, creation_marker_filename=None) -info("MOZ_UPLOAD_DIR = {}".format(env['MOZ_UPLOAD_DIR'])) +env.setdefault("MOZ_UPLOAD_DIR", OBJDIR) +ensure_dir_exists(env["MOZ_UPLOAD_DIR"], clobber=False, creation_marker_filename=None) +info("MOZ_UPLOAD_DIR = {}".format(env["MOZ_UPLOAD_DIR"])) def run_command(command, check=False, **kwargs): - kwargs.setdefault('cwd', OBJDIR) - info("in directory {}, running {}".format(kwargs['cwd'], command)) + kwargs.setdefault("cwd", OBJDIR) + info("in directory {}, running {}".format(kwargs["cwd"], command)) proc = Popen(command, **kwargs) ACTIVE_PROCESSES.add(proc) stdout, stderr = None, None @@ -357,43 +432,49 @@ def run_command(command, check=False, **kwargs): # Replacement strings in environment variables. REPLACEMENTS = { - 'DIR': DIR.scripts, - 'TOOLTOOL_CHECKOUT': DIR.tooltool, - 'MOZ_FETCHES_DIR': DIR.fetches, - 'MOZ_UPLOAD_DIR': env['MOZ_UPLOAD_DIR'], - 'OUTDIR': OUTDIR, + "DIR": DIR.scripts, + "TOOLTOOL_CHECKOUT": DIR.tooltool, + "MOZ_FETCHES_DIR": DIR.fetches, + "MOZ_UPLOAD_DIR": env["MOZ_UPLOAD_DIR"], + "OUTDIR": OUTDIR, } # Add in environment variable settings for this variant. Normally used to # modify the flags passed to the shell or to set the GC zeal mode. -for k, v in variant.get('env', {}).items(): +for k, v in variant.get("env", {}).items(): env[k] = v.format(**REPLACEMENTS) if AUTOMATION: # Currently only supported on linux64. - if platform.system() == 'Linux' and platform.machine() == 'x86_64': - use_minidump = variant.get('use_minidump', True) + if platform.system() == "Linux" and platform.machine() == "x86_64": + use_minidump = variant.get("use_minidump", True) else: use_minidump = False else: use_minidump = False if use_minidump: - env.setdefault('MINIDUMP_SAVE_PATH', env['MOZ_UPLOAD_DIR']) + env.setdefault("MINIDUMP_SAVE_PATH", env["MOZ_UPLOAD_DIR"]) injector_lib = None - if platform.system() == 'Linux': - injector_lib = os.path.join(DIR.tooltool, 'breakpad-tools', 'libbreakpadinjector.so') - env.setdefault('MINIDUMP_STACKWALK', - os.path.join(DIR.tooltool, 'breakpad-tools', 'minidump_stackwalk')) - elif platform.system() == 'Darwin': - injector_lib = os.path.join(DIR.tooltool, 'breakpad-tools', 'breakpadinjector.dylib') + if platform.system() == "Linux": + injector_lib = os.path.join( + DIR.tooltool, "breakpad-tools", "libbreakpadinjector.so" + ) + env.setdefault( + "MINIDUMP_STACKWALK", + os.path.join(DIR.tooltool, "breakpad-tools", "minidump_stackwalk"), + ) + elif platform.system() == "Darwin": + injector_lib = os.path.join( + DIR.tooltool, "breakpad-tools", "breakpadinjector.dylib" + ) if not injector_lib or not os.path.exists(injector_lib): use_minidump = False info("use_minidump is {}".format(use_minidump)) - info(" MINIDUMP_SAVE_PATH={}".format(env['MINIDUMP_SAVE_PATH'])) + info(" MINIDUMP_SAVE_PATH={}".format(env["MINIDUMP_SAVE_PATH"])) info(" injector lib is {}".format(injector_lib)) - info(" MINIDUMP_STACKWALK={}".format(env.get('MINIDUMP_STACKWALK'))) + info(" MINIDUMP_STACKWALK={}".format(env.get("MINIDUMP_STACKWALK"))) def need_updating_configure(configure): @@ -401,8 +482,8 @@ def need_updating_configure(configure): return True dep_files = [ - os.path.join(DIR.js_src, 'configure.in'), - os.path.join(DIR.js_src, 'old-configure.in'), + os.path.join(DIR.js_src, "configure.in"), + os.path.join(DIR.js_src, "old-configure.in"), ] for file in dep_files: if os.path.getmtime(file) > os.path.getmtime(configure): @@ -412,43 +493,54 @@ def need_updating_configure(configure): if not args.nobuild: - CONFIGURE_ARGS += ' --enable-nspr-build' - CONFIGURE_ARGS += ' --prefix={OBJDIR}/dist'.format(OBJDIR=POBJDIR) + CONFIGURE_ARGS += " --enable-nspr-build" + CONFIGURE_ARGS += " --prefix={OBJDIR}/dist".format(OBJDIR=POBJDIR) # Generate a configure script from configure.in. - configure = os.path.join(DIR.js_src, 'configure') + configure = os.path.join(DIR.js_src, "configure") if need_updating_configure(configure): shutil.copyfile(configure + ".in", configure) os.chmod(configure, 0o755) # Run configure if not args.noconf: - run_command(['sh', '-c', posixpath.join(PDIR.js_src, 'configure') + ' ' + CONFIGURE_ARGS], - check=True) + run_command( + [ + "sh", + "-c", + posixpath.join(PDIR.js_src, "configure") + " " + CONFIGURE_ARGS, + ], + check=True, + ) # Run make - run_command('%s -w %s' % (MAKE, MAKEFLAGS), shell=True, check=True) + run_command("%s -w %s" % (MAKE, MAKEFLAGS), shell=True, check=True) if use_minidump: # Convert symbols to breakpad format. hostdir = os.path.join(OBJDIR, "dist", "host", "bin") if not os.path.isdir(hostdir): os.makedirs(hostdir) - shutil.copy(os.path.join(DIR.tooltool, "breakpad-tools", "dump_syms"), - os.path.join(hostdir, 'dump_syms')) - run_command([ - 'make', - 'recurse_syms', - 'MOZ_SOURCE_REPO=file://' + DIR.source, - 'RUSTC_COMMIT=0', - 'MOZ_CRASHREPORTER=1', - 'MOZ_AUTOMATION_BUILD_SYMBOLS=1', - ], check=True) + shutil.copy( + os.path.join(DIR.tooltool, "breakpad-tools", "dump_syms"), + os.path.join(hostdir, "dump_syms"), + ) + run_command( + [ + "make", + "recurse_syms", + "MOZ_SOURCE_REPO=file://" + DIR.source, + "RUSTC_COMMIT=0", + "MOZ_CRASHREPORTER=1", + "MOZ_AUTOMATION_BUILD_SYMBOLS=1", + ], + check=True, + ) COMMAND_PREFIX = [] # On Linux, disable ASLR to make shell builds a bit more reproducible. if subprocess.call("type setarch >/dev/null 2>&1", shell=True) == 0: - COMMAND_PREFIX.extend(['setarch', platform.machine(), '-R']) + COMMAND_PREFIX.extend(["setarch", platform.machine(), "-R"]) def run_test_command(command, **kwargs): @@ -456,50 +548,56 @@ def run_test_command(command, **kwargs): return status -default_test_suites = frozenset(['jstests', 'jittest', 'jsapitests', 'checks']) -nondefault_test_suites = frozenset(['gdb']) +default_test_suites = frozenset(["jstests", "jittest", "jsapitests", "checks"]) +nondefault_test_suites = frozenset(["gdb"]) all_test_suites = default_test_suites | nondefault_test_suites test_suites = set(default_test_suites) def normalize_tests(tests): - if 'all' in tests: + if "all" in tests: return default_test_suites return tests # Override environment variant settings conditionally. -for k, v in variant.get('conditional-env', {}).get(variant_platform, {}).items(): +for k, v in variant.get("conditional-env", {}).get(variant_platform, {}).items(): env[k] = v.format(**REPLACEMENTS) # Skip any tests that are not run on this platform (or the 'all' platform). -test_suites -= set(normalize_tests(variant.get('skip-tests', {}).get(variant_platform, []))) -test_suites -= set(normalize_tests(variant.get('skip-tests', {}).get('all', []))) +test_suites -= set( + normalize_tests(variant.get("skip-tests", {}).get(variant_platform, [])) +) +test_suites -= set(normalize_tests(variant.get("skip-tests", {}).get("all", []))) # Add in additional tests for this platform (or the 'all' platform). -test_suites |= set(normalize_tests(variant.get('extra-tests', {}).get(variant_platform, []))) -test_suites |= set(normalize_tests(variant.get('extra-tests', {}).get('all', []))) +test_suites |= set( + normalize_tests(variant.get("extra-tests", {}).get(variant_platform, [])) +) +test_suites |= set(normalize_tests(variant.get("extra-tests", {}).get("all", []))) # Now adjust the variant's default test list with command-line arguments. test_suites |= set(normalize_tests(args.run_tests.split(","))) test_suites -= set(normalize_tests(args.skip_tests.split(","))) -if 'all' in args.skip_tests.split(","): +if "all" in args.skip_tests.split(","): test_suites = [] # Bug 1391877 - Windows test runs are getting mysterious timeouts when run # through taskcluster, but only when running multiple jit-test jobs in # parallel. Work around them for now. -if platform.system() == 'Windows': - env['JITTEST_EXTRA_ARGS'] = "-j1 " + env.get('JITTEST_EXTRA_ARGS', '') +if platform.system() == "Windows": + env["JITTEST_EXTRA_ARGS"] = "-j1 " + env.get("JITTEST_EXTRA_ARGS", "") # Bug 1557130 - Atomics tests can create many additional threads which can # lead to resource exhaustion, resulting in intermittent failures. This was # only seen on beefy machines (> 32 cores), so limit the number of parallel # workers for now. -if platform.system() == 'Windows': +if platform.system() == "Windows": worker_count = min(multiprocessing.cpu_count(), 16) - env['JSTESTS_EXTRA_ARGS'] = "-j{} ".format(worker_count) + env.get('JSTESTS_EXTRA_ARGS', '') + env["JSTESTS_EXTRA_ARGS"] = "-j{} ".format(worker_count) + env.get( + "JSTESTS_EXTRA_ARGS", "" + ) if use_minidump: # Set up later js invocations to run with the breakpad injector loaded. @@ -507,47 +605,49 @@ def normalize_tests(tests): # cross-compiling from 64- to 32-bit, that will fail and produce stderr # output when running any 64-bit commands, which breaks eg mozconfig # processing. So use the --dll command line mechanism universally. - for v in ('JSTESTS_EXTRA_ARGS', 'JITTEST_EXTRA_ARGS'): - env[v] = "--args='--dll %s' %s" % (injector_lib, env.get(v, '')) + for v in ("JSTESTS_EXTRA_ARGS", "JITTEST_EXTRA_ARGS"): + env[v] = "--args='--dll %s' %s" % (injector_lib, env.get(v, "")) # Always run all enabled tests, even if earlier ones failed. But return the # first failed status. -results = [('(make-nonempty)', 0)] +results = [("(make-nonempty)", 0)] -if 'checks' in test_suites: - results.append(('make check', run_test_command([MAKE, 'check']))) +if "checks" in test_suites: + results.append(("make check", run_test_command([MAKE, "check"]))) -if 'jittest' in test_suites: - results.append(('make check-jit-test', run_test_command([MAKE, 'check-jit-test']))) -if 'jsapitests' in test_suites: - jsapi_test_binary = os.path.join(OBJDIR, 'dist', 'bin', 'jsapi-tests') +if "jittest" in test_suites: + results.append(("make check-jit-test", run_test_command([MAKE, "check-jit-test"]))) +if "jsapitests" in test_suites: + jsapi_test_binary = os.path.join(OBJDIR, "dist", "bin", "jsapi-tests") test_env = env.copy() - test_env['TOPSRCDIR'] = DIR.source - if use_minidump and platform.system() == 'Linux': - test_env['LD_PRELOAD'] = injector_lib + test_env["TOPSRCDIR"] = DIR.source + if use_minidump and platform.system() == "Linux": + test_env["LD_PRELOAD"] = injector_lib st = run_test_command([jsapi_test_binary], env=test_env) if st < 0: print("PROCESS-CRASH | jsapi-tests | application crashed") print("Return code: {}".format(st)) - results.append(('jsapi-tests', st)) -if 'jstests' in test_suites: - results.append(('jstests', run_test_command([MAKE, 'check-jstests']))) -if 'gdb' in test_suites: + results.append(("jsapi-tests", st)) +if "jstests" in test_suites: + results.append(("jstests", run_test_command([MAKE, "check-jstests"]))) +if "gdb" in test_suites: test_script = os.path.join(DIR.js_src, "gdb", "run-tests.py") auto_args = ["-s", "-o", "--no-progress"] if AUTOMATION else [] - extra_args = env.get('GDBTEST_EXTRA_ARGS', '').split(' ') - results.append(( - 'gdb', - run_test_command([PYTHON, test_script, *auto_args, *extra_args, OBJDIR]) - )) + extra_args = env.get("GDBTEST_EXTRA_ARGS", "").split(" ") + results.append( + ( + "gdb", + run_test_command([PYTHON, test_script, *auto_args, *extra_args, OBJDIR]), + ) + ) # FIXME bug 1291449: This would be unnecessary if we could run msan with -mllvm # -msan-keep-going, but in clang 3.8 it causes a hang during compilation. -if variant.get('ignore-test-failures'): +if variant.get("ignore-test-failures"): logging.warning("Ignoring test results %s" % (results,)) - results = [('ignored', 0)] + results = [("ignored", 0)] -if args.variant == 'msan': +if args.variant == "msan": files = filter(lambda f: f.startswith("sanitize_log."), os.listdir(OUTDIR)) fullfiles = [os.path.join(OUTDIR, f) for f in files] @@ -555,48 +655,59 @@ def normalize_tests(tests): sites = Counter() errors = Counter() for filename in fullfiles: - with open(os.path.join(OUTDIR, filename), 'rb') as fh: + with open(os.path.join(OUTDIR, filename), "rb") as fh: for line in fh: - m = re.match(r'^SUMMARY: \w+Sanitizer: (?:data race|use-of-uninitialized-value) (.*)', # NOQA: E501 - line.strip()) + m = re.match( + r"^SUMMARY: \w+Sanitizer: (?:data race|use-of-uninitialized-value) (.*)", # NOQA: E501 + line.strip(), + ) if m: # Some reports include file:line:column, some just # file:line. Just in case it's nondeterministic, we will # canonicalize to just the line number. - site = re.sub(r'^(\S+?:\d+)(:\d+)* ', r'\1 ', m.group(1)) + site = re.sub(r"^(\S+?:\d+)(:\d+)* ", r"\1 ", m.group(1)) sites[site] += 1 # Write a summary file and display it to stdout. - summary_filename = os.path.join(env['MOZ_UPLOAD_DIR'], "%s_summary.txt" % args.variant) - with open(summary_filename, 'wb') as outfh: + summary_filename = os.path.join( + env["MOZ_UPLOAD_DIR"], "%s_summary.txt" % args.variant + ) + with open(summary_filename, "wb") as outfh: for location, count in sites.most_common(): print >> outfh, "%d %s" % (count, location) - print(open(summary_filename, 'rb').read()) + print(open(summary_filename, "rb").read()) - if 'max-errors' in variant: - max_allowed = variant['max-errors'] + if "max-errors" in variant: + max_allowed = variant["max-errors"] print("Found %d errors out of %d allowed" % (len(sites), max_allowed)) if len(sites) > max_allowed: - results.append(('too many msan errors', 1)) + results.append(("too many msan errors", 1)) # Gather individual results into a tarball. Note that these are # distinguished only by pid of the JS process running within each test, so # given the 16-bit limitation of pids, it's totally possible that some of # these files will be lost due to being overwritten. - command = ['tar', '-C', OUTDIR, '-zcf', - os.path.join(env['MOZ_UPLOAD_DIR'], '%s.tar.gz' % args.variant)] + command = [ + "tar", + "-C", + OUTDIR, + "-zcf", + os.path.join(env["MOZ_UPLOAD_DIR"], "%s.tar.gz" % args.variant), + ] command += files subprocess.call(command) # Generate stacks from minidumps. if use_minidump: venv_python = os.path.join(OBJDIR, "_virtualenvs", "init_py3", "bin", "python3") - run_command([ - venv_python, - os.path.join(DIR.source, "testing/mozbase/mozcrash/mozcrash/mozcrash.py"), - os.getenv("TMPDIR", "/tmp"), - os.path.join(OBJDIR, "dist/crashreporter-symbols"), - ]) + run_command( + [ + venv_python, + os.path.join(DIR.source, "testing/mozbase/mozcrash/mozcrash/mozcrash.py"), + os.getenv("TMPDIR", "/tmp"), + os.path.join(OBJDIR, "dist/crashreporter-symbols"), + ] + ) for name, st in results: print("exit status %d for '%s'" % (st, name)) diff --git a/js/src/devtools/gc/gc-test.py b/js/src/devtools/gc/gc-test.py index 2b030c1ae832f1..77314e66982ed0 100644 --- a/js/src/devtools/gc/gc-test.py +++ b/js/src/devtools/gc/gc-test.py @@ -25,10 +25,10 @@ def from_file(cls, path, name, options): def find_tests(dir, substring=None): ans = [] for dirpath, dirnames, filenames in os.walk(dir): - if dirpath == '.': + if dirpath == ".": continue for filename in filenames: - if not filename.endswith('.js'): + if not filename.endswith(".js"): continue test = os.path.join(dirpath, filename) if substring is None or substring in os.path.relpath(test, dir): @@ -37,7 +37,7 @@ def find_tests(dir, substring=None): def get_test_cmd(path): - return [JS, '-f', path] + return [JS, "-f", path] def avg(seq): @@ -51,12 +51,12 @@ def stddev(seq, mean): def run_test(test): env = os.environ.copy() - env['MOZ_GCTIMER'] = 'stderr' + env["MOZ_GCTIMER"] = "stderr" cmd = get_test_cmd(test.path) total = [] mark = [] sweep = [] - close_fds = sys.platform != 'win32' + close_fds = sys.platform != "win32" p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=close_fds, env=env) out, err = p.communicate() out, err = out.decode(), err.decode() @@ -64,14 +64,14 @@ def run_test(test): float_array = [float(_) for _ in err.split()] if len(float_array) == 0: - print('Error: No data from application. Configured with --enable-gctimer?') + print("Error: No data from application. Configured with --enable-gctimer?") sys.exit(1) for i, currItem in enumerate(float_array): - if (i % 3 == 0): + if i % 3 == 0: total.append(currItem) else: - if (i % 3 == 1): + if i % 3 == 1: mark.append(currItem) else: sweep.append(currItem) @@ -88,14 +88,19 @@ def run_tests(tests, test_dir): TMax, TAvg, MMax, MAvg, SMax, SAvg = run_test(test) bench_map[test.name] = [TMax, TAvg, MMax, MAvg, SMax, SAvg] fmt = '%20s: {"TMax": %4.1f, "TAvg": %4.1f, "MMax": %4.1f, "MAvg": %4.1f, "SMax": %4.1f, "SAvg": %4.1f}' # NOQA: E501 - if (i != len(tests) - 1): - fmt += ',' + if i != len(tests) - 1: + fmt += "," print(fmt % (filename_str, TMax, TAvg, MMax, MAvg, SMax, MAvg)) except KeyboardInterrupt: - print('fail') + print("fail") - return dict((filename, dict(TMax=TMax, TAvg=TAvg, MMax=MMax, MAvg=MAvg, SMax=SMax, SAvg=SAvg)) - for filename, (TMax, TAvg, MMax, MAvg, SMax, SAvg) in bench_map.iteritems()) + return dict( + ( + filename, + dict(TMax=TMax, TAvg=TAvg, MMax=MMax, MAvg=MAvg, SMax=SMax, SAvg=SAvg), + ) + for filename, (TMax, TAvg, MMax, MAvg, SMax, SAvg) in bench_map.iteritems() + ) def compare(current, baseline): @@ -104,43 +109,54 @@ def compare(current, baseline): try: baseline_result = baseline[key] except KeyError: - print(key, 'missing from baseline') + print(key, "missing from baseline") continue - val_getter = itemgetter('TMax', 'TAvg', 'MMax', 'MAvg', 'SMax', 'SAvg') + val_getter = itemgetter("TMax", "TAvg", "MMax", "MAvg", "SMax", "SAvg") BTMax, BTAvg, BMMax, BMAvg, BSMax, BSAvg = val_getter(baseline_result) CTMax, CTAvg, CMMax, CMAvg, CSMax, CSAvg = val_getter(current_result) if CTAvg <= BTAvg: speedup = (CTAvg / BTAvg - 1) * 100 - result = 'faster: %6.2f < baseline %6.2f (%+6.2f%%)' % \ - (CTAvg, BTAvg, speedup) + result = "faster: %6.2f < baseline %6.2f (%+6.2f%%)" % ( + CTAvg, + BTAvg, + speedup, + ) percent_speedups.append(speedup) else: slowdown = (CTAvg / BTAvg - 1) * 100 - result = 'SLOWER: %6.2f > baseline %6.2f (%+6.2f%%) ' % \ - (CTAvg, BTAvg, slowdown) + result = "SLOWER: %6.2f > baseline %6.2f (%+6.2f%%) " % ( + CTAvg, + BTAvg, + slowdown, + ) percent_speedups.append(slowdown) - print('%30s: %s' % (key, result)) + print("%30s: %s" % (key, result)) if percent_speedups: - print('Average speedup: %.2f%%' % avg(percent_speedups)) + print("Average speedup: %.2f%%" % avg(percent_speedups)) -if __name__ == '__main__': +if __name__ == "__main__": script_path = os.path.abspath(__file__) script_dir = os.path.dirname(script_path) - test_dir = os.path.join(script_dir, 'tests') + test_dir = os.path.join(script_dir, "tests") from optparse import OptionParser - op = OptionParser(usage='%prog [options] JS_SHELL [TESTS]') - op.add_option('-b', '--baseline', metavar='JSON_PATH', - dest='baseline_path', help='json file with baseline values to ' - 'compare against') + op = OptionParser(usage="%prog [options] JS_SHELL [TESTS]") + + op.add_option( + "-b", + "--baseline", + metavar="JSON_PATH", + dest="baseline_path", + help="json file with baseline values to " "compare against", + ) (OPTIONS, args) = op.parse_args() if len(args) < 1: - op.error('missing JS_SHELL argument') + op.error("missing JS_SHELL argument") # We need to make sure we are using backslashes on Windows. JS, test_args = os.path.normpath(args[0]), args[1:] @@ -150,7 +166,7 @@ def compare(current, baseline): test_list = find_tests(test_dir) if not test_list: - print >> sys.stderr, "No tests found matching command line arguments." + print >>sys.stderr, "No tests found matching command line arguments." sys.exit(0) test_list = [Test.from_file(tst, name, OPTIONS) for tst, name in test_list] @@ -162,14 +178,14 @@ def compare(current, baseline): except OSError: if not os.path.exists(JS): - print >> sys.stderr, "JS shell argument: file does not exist: '%s'" % JS + print >>sys.stderr, "JS shell argument: file does not exist: '%s'" % JS sys.exit(1) else: raise if OPTIONS.baseline_path: baseline_map = [] - fh = open(OPTIONS.baseline_path, 'r') + fh = open(OPTIONS.baseline_path, "r") baseline_map = json.load(fh) fh.close() compare(current=bench_map, baseline=baseline_map) diff --git a/js/src/devtools/rootAnalysis/analyze.py b/js/src/devtools/rootAnalysis/analyze.py index e131ccedb33b26..5c991eeafa7ba3 100755 --- a/js/src/devtools/rootAnalysis/analyze.py +++ b/js/src/devtools/rootAnalysis/analyze.py @@ -28,20 +28,21 @@ try: execfile except Exception: + def execfile(thefile, globals): exec(compile(open(thefile).read(), filename=thefile, mode="exec"), globals) def env(config): e = dict(os.environ) - e['PATH'] = ':'.join(p for p in (config.get('gcc_bin'), - config.get('sixgill_bin'), - e['PATH']) if p) - e['XDB'] = '%(sixgill_bin)s/xdb.so' % config - e['SOURCE'] = config['source'] - e['ANALYZED_OBJDIR'] = config['objdir'] - bindir = os.path.dirname(config['js']) - e['LD_LIBRARY_PATH'] = ':'.join(p for p in (e.get('LD_LIBRARY_PATH'), bindir) if p) + e["PATH"] = ":".join( + p for p in (config.get("gcc_bin"), config.get("sixgill_bin"), e["PATH"]) if p + ) + e["XDB"] = "%(sixgill_bin)s/xdb.so" % config + e["SOURCE"] = config["source"] + e["ANALYZED_OBJDIR"] = config["objdir"] + bindir = os.path.dirname(config["js"]) + e["LD_LIBRARY_PATH"] = ":".join(p for p in (e.get("LD_LIBRARY_PATH"), bindir) if p) return e @@ -56,13 +57,15 @@ def fill(command, config): fragment % config except Exception: problems.append(fragment) - raise Exception("\n".join(["Substitution failed:"] + [" %s" % s for s in problems])) + raise Exception( + "\n".join(["Substitution failed:"] + [" %s" % s for s in problems]) + ) def print_command(command, outfile=None, env=None): - output = ' '.join(quote(s) for s in command) + output = " ".join(quote(s) for s in command) if outfile: - output += ' > ' + outfile + output += " > " + outfile if env: changed = {} e = os.environ @@ -75,33 +78,37 @@ def print_command(command, outfile=None, env=None): if key in e and e[key] in value: start = value.index(e[key]) end = start + len(e[key]) - outputs.append('%s="%s${%s}%s"' % (key, - value[:start], - key, - value[end:])) + outputs.append( + '%s="%s${%s}%s"' % (key, value[:start], key, value[end:]) + ) else: outputs.append("%s='%s'" % (key, value)) - output = ' '.join(outputs) + " " + output + output = " ".join(outputs) + " " + output print(output) def generate_hazards(config, outfilename): jobs = [] - for i in range(int(config['jobs'])): - command = fill(('%(js)s', - '%(analysis_scriptdir)s/analyzeRoots.js', - '%(gcFunctions_list)s', - '%(gcEdges)s', - '%(limitedFunctions_list)s', - '%(gcTypes)s', - '%(typeInfo)s', - str(i+1), '%(jobs)s', - 'tmp.%s' % (i+1,)), - config) - outfile = 'rootingHazards.%s' % (i+1,) - output = open(outfile, 'w') - if config['verbose']: + for i in range(int(config["jobs"])): + command = fill( + ( + "%(js)s", + "%(analysis_scriptdir)s/analyzeRoots.js", + "%(gcFunctions_list)s", + "%(gcEdges)s", + "%(limitedFunctions_list)s", + "%(gcTypes)s", + "%(typeInfo)s", + str(i + 1), + "%(jobs)s", + "tmp.%s" % (i + 1,), + ), + config, + ) + outfile = "rootingHazards.%s" % (i + 1,) + output = open(outfile, "w") + if config["verbose"]: print_command(command, outfile=outfile, env=env(config)) jobs.append((command, Popen(command, stdout=output, env=env(config)))) @@ -112,69 +119,94 @@ def generate_hazards(config, outfilename): final_status = final_status or status if final_status: - raise subprocess.CalledProcessError(final_status, 'analyzeRoots.js') + raise subprocess.CalledProcessError(final_status, "analyzeRoots.js") - with open(outfilename, 'w') as output: - command = ['cat'] + ['rootingHazards.%s' % (i+1,) for i in range(int(config['jobs']))] - if config['verbose']: + with open(outfilename, "w") as output: + command = ["cat"] + [ + "rootingHazards.%s" % (i + 1,) for i in range(int(config["jobs"])) + ] + if config["verbose"]: print_command(command, outfile=outfilename) subprocess.call(command, stdout=output) -JOBS = {'dbs': - (('%(analysis_scriptdir)s/run_complete', - '--foreground', - '--no-logs', - '--build-root=%(objdir)s', - '--wrap-dir=%(sixgill)s/scripts/wrap_gcc', - '--work-dir=work', - '-b', '%(sixgill_bin)s', - '--buildcommand=%(buildcommand)s', - '.'), - ()), - - 'list-dbs': - (('ls', '-l'), - ()), - - 'callgraph': - (('%(js)s', '%(analysis_scriptdir)s/computeCallgraph.js', '%(typeInfo)s', - '[callgraph]'), - ('callgraph.txt',)), - - 'gcFunctions': - (('%(js)s', '%(analysis_scriptdir)s/computeGCFunctions.js', '%(callgraph)s', - '[gcFunctions]', '[gcFunctions_list]', '[gcEdges]', '[limitedFunctions_list]'), - ('gcFunctions.txt', 'gcFunctions.lst', 'gcEdges.txt', 'limitedFunctions.lst')), - - 'gcTypes': - (('%(js)s', '%(analysis_scriptdir)s/computeGCTypes.js', - '[gcTypes]', '[typeInfo]'), - ('gcTypes.txt', 'typeInfo.txt')), - - 'allFunctions': - (('%(sixgill_bin)s/xdbkeys', 'src_body.xdb',), - 'allFunctions.txt'), - - 'hazards': - (generate_hazards, 'rootingHazards.txt'), - - 'explain': - ((os.environ.get('PYTHON', 'python2.7'), - '%(analysis_scriptdir)s/explain.py', - '%(hazards)s', '%(gcFunctions)s', - '[explained_hazards]', '[unnecessary]', '[refs]'), - ('hazards.txt', 'unnecessary.txt', 'refs.txt')), - - 'heapwrites': - (('%(js)s', '%(analysis_scriptdir)s/analyzeHeapWrites.js'), - 'heapWriteHazards.txt'), - } +JOBS = { + "dbs": ( + ( + "%(analysis_scriptdir)s/run_complete", + "--foreground", + "--no-logs", + "--build-root=%(objdir)s", + "--wrap-dir=%(sixgill)s/scripts/wrap_gcc", + "--work-dir=work", + "-b", + "%(sixgill_bin)s", + "--buildcommand=%(buildcommand)s", + ".", + ), + (), + ), + "list-dbs": (("ls", "-l"), ()), + "callgraph": ( + ( + "%(js)s", + "%(analysis_scriptdir)s/computeCallgraph.js", + "%(typeInfo)s", + "[callgraph]", + ), + ("callgraph.txt",), + ), + "gcFunctions": ( + ( + "%(js)s", + "%(analysis_scriptdir)s/computeGCFunctions.js", + "%(callgraph)s", + "[gcFunctions]", + "[gcFunctions_list]", + "[gcEdges]", + "[limitedFunctions_list]", + ), + ("gcFunctions.txt", "gcFunctions.lst", "gcEdges.txt", "limitedFunctions.lst"), + ), + "gcTypes": ( + ( + "%(js)s", + "%(analysis_scriptdir)s/computeGCTypes.js", + "[gcTypes]", + "[typeInfo]", + ), + ("gcTypes.txt", "typeInfo.txt"), + ), + "allFunctions": ( + ( + "%(sixgill_bin)s/xdbkeys", + "src_body.xdb", + ), + "allFunctions.txt", + ), + "hazards": (generate_hazards, "rootingHazards.txt"), + "explain": ( + ( + os.environ.get("PYTHON", "python2.7"), + "%(analysis_scriptdir)s/explain.py", + "%(hazards)s", + "%(gcFunctions)s", + "[explained_hazards]", + "[unnecessary]", + "[refs]", + ), + ("hazards.txt", "unnecessary.txt", "refs.txt"), + ), + "heapwrites": ( + ("%(js)s", "%(analysis_scriptdir)s/analyzeHeapWrites.js"), + "heapWriteHazards.txt", + ), +} def out_indexes(command): for i in range(len(command)): - m = re.match(r'^\[(.*)\]$', command[i]) + m = re.match(r"^\[(.*)\]$", command[i]) if m: yield (i, m.group(1)) @@ -182,15 +214,15 @@ def out_indexes(command): def run_job(name, config): cmdspec, outfiles = JOBS[name] print("Running " + name + " to generate " + str(outfiles)) - if hasattr(cmdspec, '__call__'): + if hasattr(cmdspec, "__call__"): cmdspec(config, outfiles) else: temp_map = {} cmdspec = fill(cmdspec, config) if isinstance(outfiles, anystring_t): - stdout_filename = '%s.tmp' % name + stdout_filename = "%s.tmp" % name temp_map[stdout_filename] = outfiles - if config['verbose']: + if config["verbose"]: print_command(cmdspec, outfile=outfiles, env=env(config)) else: stdout_filename = None @@ -199,13 +231,13 @@ def run_job(name, config): for (i, name) in out_indexes(cmdspec): pc[i] = outfiles[outfile] outfile += 1 - if config['verbose']: + if config["verbose"]: print_command(pc, env=env(config)) command = list(cmdspec) outfile = 0 for (i, name) in out_indexes(cmdspec): - command[i] = '%s.tmp' % name + command[i] = "%s.tmp" % name temp_map[command[i]] = outfiles[outfile] outfile += 1 @@ -213,7 +245,7 @@ def run_job(name, config): if stdout_filename is None: subprocess.check_call(command, env=env(config)) else: - with open(stdout_filename, 'w') as output: + with open(stdout_filename, "w") as output: subprocess.check_call(command, stdout=output, env=env(config)) for (temp, final) in temp_map.items(): try: @@ -223,37 +255,79 @@ def run_job(name, config): raise -config = {'analysis_scriptdir': os.path.dirname(__file__)} - -defaults = ['%s/defaults.py' % config['analysis_scriptdir'], - '%s/defaults.py' % os.getcwd()] - -parser = argparse.ArgumentParser(description='Statically analyze build tree for rooting hazards.') -parser.add_argument('step', metavar='STEP', type=str, nargs='?', - help='run starting from this step') -parser.add_argument('--source', metavar='SOURCE', type=str, nargs='?', - help='source code to analyze') -parser.add_argument('--objdir', metavar='DIR', type=str, nargs='?', - help='object directory of compiled files') -parser.add_argument('--js', metavar='JSSHELL', type=str, nargs='?', - help='full path to ctypes-capable JS shell') -parser.add_argument('--upto', metavar='UPTO', type=str, nargs='?', - help='last step to execute') -parser.add_argument('--jobs', '-j', default=None, metavar='JOBS', type=int, - help='number of simultaneous analyzeRoots.js jobs') -parser.add_argument('--list', const=True, nargs='?', type=bool, - help='display available steps') -parser.add_argument('--buildcommand', '--build', '-b', type=str, nargs='?', - help='command to build the tree being analyzed') -parser.add_argument('--tag', '-t', type=str, nargs='?', - help='name of job, also sets build command to "build."') -parser.add_argument('--expect-file', type=str, nargs='?', - help='deprecated option, temporarily still present for backwards ' - 'compatibility') -parser.add_argument('--verbose', '-v', action='count', default=1, - help='Display cut & paste commands to run individual steps') -parser.add_argument('--quiet', '-q', action='count', default=0, - help='Suppress output') +config = {"analysis_scriptdir": os.path.dirname(__file__)} + +defaults = [ + "%s/defaults.py" % config["analysis_scriptdir"], + "%s/defaults.py" % os.getcwd(), +] + +parser = argparse.ArgumentParser( + description="Statically analyze build tree for rooting hazards." +) +parser.add_argument( + "step", metavar="STEP", type=str, nargs="?", help="run starting from this step" +) +parser.add_argument( + "--source", metavar="SOURCE", type=str, nargs="?", help="source code to analyze" +) +parser.add_argument( + "--objdir", + metavar="DIR", + type=str, + nargs="?", + help="object directory of compiled files", +) +parser.add_argument( + "--js", + metavar="JSSHELL", + type=str, + nargs="?", + help="full path to ctypes-capable JS shell", +) +parser.add_argument( + "--upto", metavar="UPTO", type=str, nargs="?", help="last step to execute" +) +parser.add_argument( + "--jobs", + "-j", + default=None, + metavar="JOBS", + type=int, + help="number of simultaneous analyzeRoots.js jobs", +) +parser.add_argument( + "--list", const=True, nargs="?", type=bool, help="display available steps" +) +parser.add_argument( + "--buildcommand", + "--build", + "-b", + type=str, + nargs="?", + help="command to build the tree being analyzed", +) +parser.add_argument( + "--tag", + "-t", + type=str, + nargs="?", + help='name of job, also sets build command to "build."', +) +parser.add_argument( + "--expect-file", + type=str, + nargs="?", + help="deprecated option, temporarily still present for backwards " "compatibility", +) +parser.add_argument( + "--verbose", + "-v", + action="count", + default=1, + help="Display cut & paste commands to run individual steps", +) +parser.add_argument("--quiet", "-q", action="count", default=0, help="Suppress output") args = parser.parse_args() args.verbose = max(0, args.verbose - args.quiet) @@ -276,42 +350,53 @@ def run_job(name, config): args.buildcommand = "build.%s" % args.tag if args.jobs is not None: - data['jobs'] = args.jobs -if not data.get('jobs'): - data['jobs'] = int(subprocess.check_output(['nproc', '--ignore=1']).strip()) + data["jobs"] = args.jobs +if not data.get("jobs"): + data["jobs"] = int(subprocess.check_output(["nproc", "--ignore=1"]).strip()) if args.buildcommand: - data['buildcommand'] = args.buildcommand -elif 'BUILD' in os.environ: - data['buildcommand'] = os.environ['BUILD'] + data["buildcommand"] = args.buildcommand +elif "BUILD" in os.environ: + data["buildcommand"] = os.environ["BUILD"] else: - data['buildcommand'] = 'make -j4 -s' + data["buildcommand"] = "make -j4 -s" -if 'ANALYZED_OBJDIR' in os.environ: - data['objdir'] = os.environ['ANALYZED_OBJDIR'] +if "ANALYZED_OBJDIR" in os.environ: + data["objdir"] = os.environ["ANALYZED_OBJDIR"] -if 'SOURCE' in os.environ: - data['source'] = os.environ['SOURCE'] +if "SOURCE" in os.environ: + data["source"] = os.environ["SOURCE"] -if data.get('sixgill_bin'): - if not data.get('source'): +if data.get("sixgill_bin"): + if not data.get("source"): path = subprocess.check_output( - ['sh', '-c', - data['sixgill_bin'] + '/xdbkeys file_source.xdb | grep jsapi.cpp']).decode() - data['source'] = path.replace("\n", "").replace("/js/src/jsapi.cpp", "") - if not data.get('objdir'): + [ + "sh", + "-c", + data["sixgill_bin"] + "/xdbkeys file_source.xdb | grep jsapi.cpp", + ] + ).decode() + data["source"] = path.replace("\n", "").replace("/js/src/jsapi.cpp", "") + if not data.get("objdir"): path = subprocess.check_output( - ['sh', '-c', data['sixgill_bin'] + '/xdbkeys file_source.xdb | grep jsapi.h']).decode() - data['objdir'] = path.replace("\n", "").replace("/jsapi.h", "") - -steps = ['dbs', - 'gcTypes', - 'callgraph', - 'gcFunctions', - 'allFunctions', - 'hazards', - 'explain', - 'heapwrites'] + [ + "sh", + "-c", + data["sixgill_bin"] + "/xdbkeys file_source.xdb | grep jsapi.h", + ] + ).decode() + data["objdir"] = path.replace("\n", "").replace("/jsapi.h", "") + +steps = [ + "dbs", + "gcTypes", + "callgraph", + "gcFunctions", + "allFunctions", + "hazards", + "explain", + "heapwrites", +] if args.list: for step in steps: @@ -331,14 +416,19 @@ def run_job(name, config): for (i, name) in out_indexes(command): data[name] = outfiles[outfile] outfile += 1 - assert len(outfiles) == outfile, 'step \'%s\': mismatched number of output files (%d) and params (%d)' % ( # NOQA: E501 - step, outfile, len(outfiles)) + assert ( + len(outfiles) == outfile + ), "step '%s': mismatched number of output files (%d) and params (%d)" % ( + step, + outfile, + len(outfiles), + ) # NOQA: E501 if args.step: - steps = steps[steps.index(args.step):] + steps = steps[steps.index(args.step) :] if args.upto: - steps = steps[:steps.index(args.upto)+1] + steps = steps[: steps.index(args.upto) + 1] for step in steps: run_job(step, data) diff --git a/js/src/devtools/rootAnalysis/explain.py b/js/src/devtools/rootAnalysis/explain.py index 373475e69d685c..993725273c783f 100755 --- a/js/src/devtools/rootAnalysis/explain.py +++ b/js/src/devtools/rootAnalysis/explain.py @@ -11,12 +11,12 @@ from collections import defaultdict -parser = argparse.ArgumentParser(description='Process some integers.') -parser.add_argument('rootingHazards', nargs='?', default='rootingHazards.txt') -parser.add_argument('gcFunctions', nargs='?', default='gcFunctions.txt') -parser.add_argument('hazards', nargs='?', default='hazards.txt') -parser.add_argument('extra', nargs='?', default='unnecessary.txt') -parser.add_argument('refs', nargs='?', default='refs.txt') +parser = argparse.ArgumentParser(description="Process some integers.") +parser.add_argument("rootingHazards", nargs="?", default="rootingHazards.txt") +parser.add_argument("gcFunctions", nargs="?", default="gcFunctions.txt") +parser.add_argument("hazards", nargs="?", default="hazards.txt") +parser.add_argument("extra", nargs="?", default="unnecessary.txt") +parser.add_argument("refs", nargs="?", default="refs.txt") args = parser.parse_args() num_hazards = 0 @@ -24,10 +24,9 @@ num_missing = 0 try: - with open(args.rootingHazards) as rootingHazards, \ - open(args.hazards, 'w') as hazards, \ - open(args.extra, 'w') as extra, \ - open(args.refs, 'w') as refs: + with open(args.rootingHazards) as rootingHazards, open( + args.hazards, "w" + ) as hazards, open(args.extra, "w") as extra, open(args.refs, "w") as refs: current_gcFunction = None # Map from a GC function name to the list of hazards resulting from @@ -42,37 +41,43 @@ fileOfFunction = {} for line in rootingHazards: - m = re.match(r'^Time: (.*)', line) - mm = re.match(r'^Run on:', line) + m = re.match(r"^Time: (.*)", line) + mm = re.match(r"^Run on:", line) if m or mm: print(line, file=hazards) print(line, file=extra) print(line, file=refs) continue - m = re.match(r'^Function.*has unnecessary root', line) + m = re.match(r"^Function.*has unnecessary root", line) if m: print(line, file=extra) continue - m = re.match(r'^Function.*takes unsafe address of unrooted', line) + m = re.match(r"^Function.*takes unsafe address of unrooted", line) if m: num_refs += 1 print(line, file=refs) continue m = re.match( - r"^Function.*has unrooted.*of type.*live across GC call '(.*?)' at (\S+):\d+$", line) # NOQA: E501 + r"^Function.*has unrooted.*of type.*live across GC call '(.*?)' at (\S+):\d+$", + line, + ) # NOQA: E501 if m: current_gcFunction = m.group(1) hazardousGCFunctions[current_gcFunction].append(line) - hazardOrder.append((current_gcFunction, - len(hazardousGCFunctions[current_gcFunction]) - 1)) + hazardOrder.append( + ( + current_gcFunction, + len(hazardousGCFunctions[current_gcFunction]) - 1, + ) + ) num_hazards += 1 fileOfFunction[current_gcFunction] = m.group(2) continue - m = re.match(r'Function.*expected hazard.*but none were found', line) + m = re.match(r"Function.*expected hazard.*but none were found", line) if m: num_missing += 1 print(line + "\n", file=hazards) @@ -91,7 +96,7 @@ current_func = None explanation = None for line in gcFunctions: - m = re.match(r'^GC Function: (.*)', line) + m = re.match(r"^GC Function: (.*)", line) if m: if current_func: gcExplanations[current_func] = explanation @@ -113,9 +118,12 @@ print(gcHazards[index], file=hazards) except IOError as e: - print('Failed: %s' % str(e)) + print("Failed: %s" % str(e)) print("Wrote %s" % args.hazards) print("Wrote %s" % args.extra) print("Wrote %s" % args.refs) -print("Found %d hazards %d unsafe references %d missing" % (num_hazards, num_refs, num_missing)) +print( + "Found %d hazards %d unsafe references %d missing" + % (num_hazards, num_refs, num_missing) +) diff --git a/js/src/devtools/rootAnalysis/mach_commands.py b/js/src/devtools/rootAnalysis/mach_commands.py index c589f23caca58f..9aa3e2dd939780 100644 --- a/js/src/devtools/rootAnalysis/mach_commands.py +++ b/js/src/devtools/rootAnalysis/mach_commands.py @@ -35,18 +35,21 @@ # of a decorator, this could be straight code that edits eg # MachCommands.build_shell._mach_command.arguments, but that looked uglier. def inherit_command_args(command, subcommand=None): - '''Decorator for inheriting all command-line arguments from `mach build`. + """Decorator for inheriting all command-line arguments from `mach build`. This should come earlier in the source file than @Command or @SubCommand, - because it relies on that decorator having run first.''' + because it relies on that decorator having run first.""" def inherited(func): handler = Registrar.command_handlers.get(command) if handler is not None and subcommand is not None: handler = handler.subcommand_handlers.get(subcommand) if handler is None: - raise MachError("{} command unknown or not yet loaded".format( - command if subcommand is None else command + " " + subcommand)) + raise MachError( + "{} command unknown or not yet loaded".format( + command if subcommand is None else command + " " + subcommand + ) + ) func._mach_command.arguments.extend(handler.arguments) return func @@ -55,15 +58,13 @@ def inherited(func): @CommandProvider class MachCommands(MachCommandBase): - @property def state_dir(self): - return os.environ.get('MOZBUILD_STATE_PATH', - os.path.expanduser('~/.mozbuild')) + return os.environ.get("MOZBUILD_STATE_PATH", os.path.expanduser("~/.mozbuild")) @property def tools_dir(self): - return os.path.join(self.state_dir, 'hazard-tools') + return os.path.join(self.state_dir, "hazard-tools") def ensure_tools_dir(self): dir = self.tools_dir @@ -75,11 +76,11 @@ def ensure_tools_dir(self): @property def sixgill_dir(self): - return os.path.join(self.tools_dir, 'sixgill') + return os.path.join(self.tools_dir, "sixgill") @property def gcc_dir(self): - return os.path.join(self.tools_dir, 'gcc') + return os.path.join(self.tools_dir, "gcc") @property def work_dir(self): @@ -97,74 +98,86 @@ def ensure_work_dir(self): def script_dir(self): return os.path.join(self.topsrcdir, "js/src/devtools/rootAnalysis") - @Command('hazards', category='build', order='declaration', - description='Commands for running the static analysis for GC rooting hazards') + @Command( + "hazards", + category="build", + order="declaration", + description="Commands for running the static analysis for GC rooting hazards", + ) def hazards(self): """Commands related to performing the GC rooting hazard analysis""" print("See `mach hazards --help` for a list of subcommands") - @inherit_command_args('artifact', 'toolchain') - @SubCommand('hazards', 'bootstrap', - description='Install prerequisites for the hazard analysis') + @inherit_command_args("artifact", "toolchain") + @SubCommand( + "hazards", + "bootstrap", + description="Install prerequisites for the hazard analysis", + ) def bootstrap(self, **kwargs): orig_dir = os.getcwd() os.chdir(self.ensure_tools_dir()) try: - kwargs['from_build'] = ('linux64-gcc-sixgill', 'linux64-gcc-8') + kwargs["from_build"] = ("linux64-gcc-sixgill", "linux64-gcc-8") self._mach_context.commands.dispatch( - 'artifact', self._mach_context, subcommand='toolchain', - **kwargs + "artifact", self._mach_context, subcommand="toolchain", **kwargs ) finally: os.chdir(orig_dir) - @inherit_command_args('build') - @SubCommand('hazards', 'build-shell', - description='Build a shell for the hazard analysis') - @CommandArgument('--mozconfig', default=None, metavar='FILENAME', - help='Build with the given mozconfig.') + @inherit_command_args("build") + @SubCommand( + "hazards", "build-shell", description="Build a shell for the hazard analysis" + ) + @CommandArgument( + "--mozconfig", + default=None, + metavar="FILENAME", + help="Build with the given mozconfig.", + ) def build_shell(self, **kwargs): - '''Build a JS shell to use to run the rooting hazard analysis.''' + """Build a JS shell to use to run the rooting hazard analysis.""" # The JS shell requires some specific configuration settings to execute # the hazard analysis code, and configuration is done via mozconfig. # Subprocesses find MOZCONFIG in the environment, so we can't just # modify the settings in this process's loaded version. Pass it through # the environment. - default_mozconfig = 'js/src/devtools/rootAnalysis/mozconfig.haz_shell' - mozconfig_path = kwargs.pop('mozconfig', None) \ - or os.environ.get('MOZCONFIG') \ + default_mozconfig = "js/src/devtools/rootAnalysis/mozconfig.haz_shell" + mozconfig_path = ( + kwargs.pop("mozconfig", None) + or os.environ.get("MOZCONFIG") or default_mozconfig + ) mozconfig_path = os.path.join(self.topsrcdir, mozconfig_path) loader = MozconfigLoader(self.topsrcdir) mozconfig = loader.read_mozconfig(mozconfig_path) # Validate the mozconfig settings in case the user overrode the default. - configure_args = mozconfig['configure_args'] - if '--enable-ctypes' not in configure_args: - raise FailedCommandError('ctypes required in hazard JS shell') + configure_args = mozconfig["configure_args"] + if "--enable-ctypes" not in configure_args: + raise FailedCommandError("ctypes required in hazard JS shell") # Transmit the mozconfig location to build subprocesses. - os.environ['MOZCONFIG'] = mozconfig_path + os.environ["MOZCONFIG"] = mozconfig_path # Record the location of the JS shell in the analysis work dir. - self.write_json_file("shell.json", { - 'js': os.path.join(mozconfig['topobjdir'], "dist/bin/js") - }) + self.write_json_file( + "shell.json", {"js": os.path.join(mozconfig["topobjdir"], "dist/bin/js")} + ) return self._mach_context.commands.dispatch( - 'build', self._mach_context, **kwargs + "build", self._mach_context, **kwargs ) def check_application(self, requested_app, objdir=None): - '''Verify that the objdir and work dir are for the expected application - ''' + """Verify that the objdir and work dir are for the expected application""" try: - work_dir_app = self.read_json_file('app.json')['application'] + work_dir_app = self.read_json_file("app.json")["application"] if work_dir_app != requested_app: raise FailedCommandError( - 'work dir {} is for the wrong app {}'.format( + "work dir {} is for the wrong app {}".format( self.work_dir, work_dir_app ) ) @@ -175,11 +188,11 @@ def check_application(self, requested_app, objdir=None): try: if not objdir: objdir = self.topobjdir - mozinfo = self.read_json_file(os.path.join(objdir, 'mozinfo.json')) - if mozinfo.get('buildapp') != requested_app: + mozinfo = self.read_json_file(os.path.join(objdir, "mozinfo.json")) + if mozinfo.get("buildapp") != requested_app: raise FailedCommandError( - 'objdir {} is for the wrong app {}, clobber required'.format( - objdir, mozinfo.get('buildapp') + "objdir {} is for the wrong app {}, clobber required".format( + objdir, mozinfo.get("buildapp") ) ) except (OSError, IOError): @@ -196,25 +209,30 @@ def read_json_file(self, filename): def ensure_shell(self): try: - return self.read_json_file("shell.json")['js'] + return self.read_json_file("shell.json")["js"] except OSError: raise FailedCommandError( - 'must build the JS shell with `mach hazards build-shell` first' + "must build the JS shell with `mach hazards build-shell` first" ) - @SubCommand('hazards', 'gather', - description='Gather analysis data by compiling the given application') - @CommandArgument('--application', default='browser', - help='Build the given application.') + @SubCommand( + "hazards", + "gather", + description="Gather analysis data by compiling the given application", + ) + @CommandArgument( + "--application", default="browser", help="Build the given application." + ) def gather_hazard_data(self, application=None): - '''Gather analysis information by compiling the tree''' + """Gather analysis information by compiling the tree""" shell_path = self.ensure_shell() objdir = os.path.join(self.topsrcdir, "obj-analyzed") self.check_application(application, objdir) - self.write_json_file('app.json', {'application': application}) + self.write_json_file("app.json", {"application": application}) with open(os.path.join(self.work_dir, "defaults.py"), "wt") as fh: - data = textwrap.dedent('''\ + data = textwrap.dedent( + """\ js = "{js}" analysis_scriptdir = "{script_dir}" objdir = "{objdir}" @@ -222,54 +240,62 @@ def gather_hazard_data(self, application=None): sixgill = "{sixgill_dir}/usr/libexec/sixgill" sixgill_bin = "{sixgill_dir}/usr/bin" gcc_bin = "{gcc_dir}/bin" - ''').format( + """ + ).format( js=shell_path, script_dir=self.script_dir, objdir=objdir, srcdir=self.topsrcdir, sixgill_dir=self.sixgill_dir, - gcc_dir=self.gcc_dir) + gcc_dir=self.gcc_dir, + ) fh.write(data) - buildscript = '{srcdir}/mach hazards compile --application={app}'.format( - srcdir=self.topsrcdir, - app=application + buildscript = "{srcdir}/mach hazards compile --application={app}".format( + srcdir=self.topsrcdir, app=application ) args = [ os.path.join(self.script_dir, "analyze.py"), - 'dbs', '--upto', 'dbs', - '-v', - '--buildcommand=' + buildscript, + "dbs", + "--upto", + "dbs", + "-v", + "--buildcommand=" + buildscript, ] return self.run_process(args=args, cwd=self.work_dir, pass_thru=True) - @inherit_command_args('build') - @SubCommand('hazards', 'compile', description=argparse.SUPPRESS) - @CommandArgument('--mozconfig', default=None, metavar='FILENAME', - help='Build with the given mozconfig.') - @CommandArgument('--application', default='browser', - help='Build the given application.') + @inherit_command_args("build") + @SubCommand("hazards", "compile", description=argparse.SUPPRESS) + @CommandArgument( + "--mozconfig", + default=None, + metavar="FILENAME", + help="Build with the given mozconfig.", + ) + @CommandArgument( + "--application", default="browser", help="Build the given application." + ) def inner_compile(self, **kwargs): - '''Build a source tree and gather analysis information while running - under the influence of the analysis collection server.''' + """Build a source tree and gather analysis information while running + under the influence of the analysis collection server.""" env = os.environ # Check whether we are running underneath the manager (and therefore # have a server to talk to). - if 'XGILL_CONFIG' not in env: + if "XGILL_CONFIG" not in env: raise Exception( - 'no sixgill manager detected. `mach hazards compile` ' + - 'should only be run from `mach hazards gather`' + "no sixgill manager detected. `mach hazards compile` " + + "should only be run from `mach hazards gather`" ) - app = kwargs.pop('application') + app = kwargs.pop("application") self.check_application(app) - default_mozconfig = 'js/src/devtools/rootAnalysis/mozconfig.%s' % app - mozconfig_path = kwargs.pop('mozconfig', None) \ - or env.get('MOZCONFIG') \ - or default_mozconfig + default_mozconfig = "js/src/devtools/rootAnalysis/mozconfig.%s" % app + mozconfig_path = ( + kwargs.pop("mozconfig", None) or env.get("MOZCONFIG") or default_mozconfig + ) mozconfig_path = os.path.join(self.topsrcdir, mozconfig_path) # Validate the mozconfig. @@ -278,41 +304,40 @@ def inner_compile(self, **kwargs): # want to build the default browser application.) loader = MozconfigLoader(self.topsrcdir) mozconfig = loader.read_mozconfig(mozconfig_path) - configure_args = mozconfig['configure_args'] - if '--enable-application=%s' % app not in configure_args: - raise Exception('mozconfig %s builds wrong project' % mozconfig_path) - if not any('--with-compiler-wrapper' in a for a in configure_args): - raise Exception('mozconfig must wrap compiles') + configure_args = mozconfig["configure_args"] + if "--enable-application=%s" % app not in configure_args: + raise Exception("mozconfig %s builds wrong project" % mozconfig_path) + if not any("--with-compiler-wrapper" in a for a in configure_args): + raise Exception("mozconfig must wrap compiles") # Communicate mozconfig to build subprocesses. - env['MOZCONFIG'] = os.path.join(self.topsrcdir, mozconfig_path) + env["MOZCONFIG"] = os.path.join(self.topsrcdir, mozconfig_path) # hazard mozconfigs need to find binaries in .mozbuild - env['MOZBUILD_STATE_PATH'] = self.state_dir + env["MOZBUILD_STATE_PATH"] = self.state_dir # Force the use of hazard-compatible installs of tools. - gccbin = os.path.join(self.gcc_dir, 'bin') - env['CC'] = os.path.join(gccbin, 'gcc') - env['CXX'] = os.path.join(gccbin, 'g++') - env['PATH'] = '{sixgill_dir}/usr/bin:{gccbin}:{PATH}'.format( - sixgill_dir=self.sixgill_dir, - gccbin=gccbin, - PATH=env['PATH'] + gccbin = os.path.join(self.gcc_dir, "bin") + env["CC"] = os.path.join(gccbin, "gcc") + env["CXX"] = os.path.join(gccbin, "g++") + env["PATH"] = "{sixgill_dir}/usr/bin:{gccbin}:{PATH}".format( + sixgill_dir=self.sixgill_dir, gccbin=gccbin, PATH=env["PATH"] ) - env['LD_LIBRARY_PATH'] = '{}/lib64'.format(self.gcc_dir) + env["LD_LIBRARY_PATH"] = "{}/lib64".format(self.gcc_dir) return self._mach_context.commands.dispatch( - 'build', self._mach_context, **kwargs + "build", self._mach_context, **kwargs ) - @SubCommand('hazards', 'analyze', - description='Analyzed gathered data for rooting hazards') + @SubCommand( + "hazards", "analyze", description="Analyzed gathered data for rooting hazards" + ) def analyze(self): - '''Analyzed gathered data for rooting hazards''' + """Analyzed gathered data for rooting hazards""" args = [ os.path.join(self.script_dir, "analyze.py"), - 'gcTypes', - '-v', + "gcTypes", + "-v", ] return self.run_process(args=args, cwd=self.work_dir, pass_thru=True) diff --git a/js/src/devtools/rootAnalysis/run-test.py b/js/src/devtools/rootAnalysis/run-test.py index 0ab8a9caf47d91..0ab2be4d8bf054 100755 --- a/js/src/devtools/rootAnalysis/run-test.py +++ b/js/src/devtools/rootAnalysis/run-test.py @@ -13,49 +13,52 @@ from glob import glob scriptdir = os.path.abspath(os.path.dirname(__file__)) -testdir = os.path.join(scriptdir, 't') +testdir = os.path.join(scriptdir, "t") site.addsitedir(testdir) from testlib import Test, equal -parser = argparse.ArgumentParser(description='run hazard analysis tests') +parser = argparse.ArgumentParser(description="run hazard analysis tests") parser.add_argument( - '--js', default=os.environ.get('JS'), - help='JS binary to run the tests with') + "--js", default=os.environ.get("JS"), help="JS binary to run the tests with" +) parser.add_argument( - '--sixgill', default=os.environ.get('SIXGILL', os.path.join(testdir, "sixgill")), - help='Path to root of sixgill installation') + "--sixgill", + default=os.environ.get("SIXGILL", os.path.join(testdir, "sixgill")), + help="Path to root of sixgill installation", +) parser.add_argument( - '--sixgill-bin', default=os.environ.get('SIXGILL_BIN'), - help='Path to sixgill binary dir') + "--sixgill-bin", + default=os.environ.get("SIXGILL_BIN"), + help="Path to sixgill binary dir", +) parser.add_argument( - '--sixgill-plugin', default=os.environ.get('SIXGILL_PLUGIN'), - help='Full path to sixgill gcc plugin') + "--sixgill-plugin", + default=os.environ.get("SIXGILL_PLUGIN"), + help="Full path to sixgill gcc plugin", +) parser.add_argument( - '--gccdir', default=os.environ.get('GCCDIR'), - help='Path to GCC installation dir') + "--gccdir", default=os.environ.get("GCCDIR"), help="Path to GCC installation dir" +) +parser.add_argument("--cc", default=os.environ.get("CC"), help="Path to gcc") +parser.add_argument("--cxx", default=os.environ.get("CXX"), help="Path to g++") parser.add_argument( - '--cc', default=os.environ.get('CC'), - help='Path to gcc') + "--verbose", + "-v", + action="store_true", + help="Display verbose output, including commands executed", +) parser.add_argument( - '--cxx', default=os.environ.get('CXX'), - help='Path to g++') -parser.add_argument( - '--verbose', '-v', action='store_true', - help='Display verbose output, including commands executed') -parser.add_argument( - 'tests', nargs='*', default=[ - 'sixgill-tree', - 'suppression', - 'hazards', - 'exceptions', - 'virtual'], - help='tests to run') + "tests", + nargs="*", + default=["sixgill-tree", "suppression", "hazards", "exceptions", "virtual"], + help="tests to run", +) cfg = parser.parse_args() if not cfg.js: - exit('Must specify JS binary through environment variable or --js option') + exit("Must specify JS binary through environment variable or --js option") if not cfg.cc: if cfg.gccdir: cfg.cc = os.path.join(cfg.gccdir, "bin", "gcc") @@ -69,9 +72,13 @@ if not cfg.sixgill_bin: cfg.sixgill_bin = os.path.join(cfg.sixgill, "usr", "bin") if not cfg.sixgill_plugin: - cfg.sixgill_plugin = os.path.join(cfg.sixgill, "usr", "libexec", "sixgill", "gcc", "xgill.so") + cfg.sixgill_plugin = os.path.join( + cfg.sixgill, "usr", "libexec", "sixgill", "gcc", "xgill.so" + ) -subprocess.check_call([cfg.js, '-e', 'if (!getBuildConfiguration()["has-ctypes"]) quit(1)']) +subprocess.check_call( + [cfg.js, "-e", 'if (!getBuildConfiguration()["has-ctypes"]) quit(1)'] +) def binpath(prog): @@ -82,13 +89,13 @@ def make_dir(dirname, exist_ok=True): try: os.mkdir(dirname) except OSError as e: - if exist_ok and e.strerror == 'File exists': + if exist_ok and e.strerror == "File exists": pass else: raise -outroot = os.path.join(testdir, 'out') +outroot = os.path.join(testdir, "out") make_dir(outroot) for name in cfg.tests: @@ -105,9 +112,9 @@ def make_dir(dirname, exist_ok=True): print("START TEST {}".format(name), flush=True) testpath = os.path.join(indir, "test.py") testscript = open(testpath).read() - testcode = compile(testscript, testpath, 'exec') + testcode = compile(testscript, testpath, "exec") try: - exec(testcode, {'test': test, 'equal': equal}) + exec(testcode, {"test": test, "equal": equal}) except subprocess.CalledProcessError: print("TEST-FAILED: %s" % name) except AssertionError: diff --git a/js/src/devtools/rootAnalysis/t/exceptions/test.py b/js/src/devtools/rootAnalysis/t/exceptions/test.py index 68abbc6553aed5..a40753d87ae00d 100644 --- a/js/src/devtools/rootAnalysis/t/exceptions/test.py +++ b/js/src/devtools/rootAnalysis/t/exceptions/test.py @@ -1,21 +1,21 @@ # flake8: noqa: F821 -test.compile("source.cpp", '-fno-exceptions') -test.run_analysis_script('gcTypes') +test.compile("source.cpp", "-fno-exceptions") +test.run_analysis_script("gcTypes") hazards = test.load_hazards() -assert(len(hazards) == 0) +assert len(hazards) == 0 # If we compile with exceptions, then there *should* be a hazard because # AutoSomething::AutoSomething might throw an exception, which would cause the # partially-constructed value to be torn down, which will call ~RAII_GC. -test.compile("source.cpp", '-fexceptions') -test.run_analysis_script('gcTypes') +test.compile("source.cpp", "-fexceptions") +test.run_analysis_script("gcTypes") hazards = test.load_hazards() -assert(len(hazards) == 1) +assert len(hazards) == 1 hazard = hazards[0] -assert(hazard.function == 'void f()') -assert(hazard.variable == 'thing') -assert("AutoSomething::AutoSomething" in hazard.GCFunction) +assert hazard.function == "void f()" +assert hazard.variable == "thing" +assert "AutoSomething::AutoSomething" in hazard.GCFunction diff --git a/js/src/devtools/rootAnalysis/t/hazards/test.py b/js/src/devtools/rootAnalysis/t/hazards/test.py index 48633daa58e09c..eb53946b1ba533 100644 --- a/js/src/devtools/rootAnalysis/t/hazards/test.py +++ b/js/src/devtools/rootAnalysis/t/hazards/test.py @@ -1,66 +1,68 @@ # flake8: noqa: F821 test.compile("source.cpp") -test.run_analysis_script('gcTypes') +test.run_analysis_script("gcTypes") # gcFunctions should be the inverse, but we get to rely on unmangled names here. gcFunctions = test.load_gcFunctions() print(gcFunctions) -assert('void GC()' in gcFunctions) -assert('void suppressedFunction()' not in gcFunctions) -assert('void halfSuppressedFunction()' in gcFunctions) -assert('void unsuppressedFunction()' in gcFunctions) -assert('Cell* f()' in gcFunctions) +assert "void GC()" in gcFunctions +assert "void suppressedFunction()" not in gcFunctions +assert "void halfSuppressedFunction()" in gcFunctions +assert "void unsuppressedFunction()" in gcFunctions +assert "Cell* f()" in gcFunctions hazards = test.load_hazards() hazmap = {haz.variable: haz for haz in hazards} -assert('cell1' not in hazmap) -assert('cell2' in hazmap) -assert('cell3' in hazmap) -assert('cell4' not in hazmap) -assert('cell5' not in hazmap) -assert('cell6' not in hazmap) -assert('' in hazmap) +assert "cell1" not in hazmap +assert "cell2" in hazmap +assert "cell3" in hazmap +assert "cell4" not in hazmap +assert "cell5" not in hazmap +assert "cell6" not in hazmap +assert "" in hazmap # All hazards should be in f(), loopy(), and safevals() -assert(hazmap['cell2'].function == 'Cell* f()') +assert hazmap["cell2"].function == "Cell* f()" print(len(set(haz.function for haz in hazards))) -assert(len(set(haz.function for haz in hazards)) == 3) +assert len(set(haz.function for haz in hazards)) == 3 # Check that the correct GC call is reported for each hazard. (cell3 has a # hazard from two different GC calls; it doesn't really matter which is # reported.) -assert(hazmap['cell2'].GCFunction == 'void halfSuppressedFunction()') -assert(hazmap['cell3'].GCFunction in ( - 'void halfSuppressedFunction()', 'void unsuppressedFunction()')) -assert(hazmap[''].GCFunction == 'void GCInDestructor::~GCInDestructor()') +assert hazmap["cell2"].GCFunction == "void halfSuppressedFunction()" +assert hazmap["cell3"].GCFunction in ( + "void halfSuppressedFunction()", + "void unsuppressedFunction()", +) +assert hazmap[""].GCFunction == "void GCInDestructor::~GCInDestructor()" -assert('container1' in hazmap); -assert('container2' not in hazmap); +assert "container1" in hazmap +assert "container2" not in hazmap # Type names are handy to have in the report. -assert(hazmap['cell2'].type == 'Cell*') -assert(hazmap[''].type == 'Cell*') +assert hazmap["cell2"].type == "Cell*" +assert hazmap[""].type == "Cell*" # loopy hazards. See comments in source. -assert('haz1' not in hazmap) -assert('haz2' not in hazmap) -assert('haz3' in hazmap) -assert('haz4' in hazmap) -assert('haz5' in hazmap) -assert('haz6' not in hazmap) -assert('haz7' not in hazmap) -assert('haz8' in hazmap) +assert "haz1" not in hazmap +assert "haz2" not in hazmap +assert "haz3" in hazmap +assert "haz4" in hazmap +assert "haz5" in hazmap +assert "haz6" not in hazmap +assert "haz7" not in hazmap +assert "haz8" in hazmap # safevals hazards. See comments in source. -assert('unsafe1' in hazmap) -assert('safe2' not in hazmap) -assert('unsafe3' in hazmap) -assert('unsafe3b' in hazmap) -assert('unsafe4' in hazmap) -assert('safe5' not in hazmap) -assert('safe6' not in hazmap) -assert('unsafe7' in hazmap) -assert('safe8' not in hazmap) -assert('safe9' not in hazmap) -assert('safe10' not in hazmap) +assert "unsafe1" in hazmap +assert "safe2" not in hazmap +assert "unsafe3" in hazmap +assert "unsafe3b" in hazmap +assert "unsafe4" in hazmap +assert "safe5" not in hazmap +assert "safe6" not in hazmap +assert "unsafe7" in hazmap +assert "safe8" not in hazmap +assert "safe9" not in hazmap +assert "safe10" not in hazmap diff --git a/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py b/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py index 2a9a6e56d94a48..5e99fff908ef36 100644 --- a/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py +++ b/js/src/devtools/rootAnalysis/t/sixgill-tree/test.py @@ -3,59 +3,61 @@ test.compile("source.cpp") test.computeGCTypes() -body = test.process_body(test.load_db_entry("src_body", re.compile(r'root_arg'))[0]) +body = test.process_body(test.load_db_entry("src_body", re.compile(r"root_arg"))[0]) # Rendering positive and negative integers -marker1 = body.assignment_line('MARKER1') -equal(body.edge_from_line(marker1 + 2)['Exp'][1]['String'], '1') -equal(body.edge_from_line(marker1 + 3)['Exp'][1]['String'], '-1') +marker1 = body.assignment_line("MARKER1") +equal(body.edge_from_line(marker1 + 2)["Exp"][1]["String"], "1") +equal(body.edge_from_line(marker1 + 3)["Exp"][1]["String"], "-1") -equal(body.edge_from_point(body.assignment_point('u1'))['Exp'][1]['String'], '1') -equal(body.edge_from_point(body.assignment_point('u2'))['Exp'][1]['String'], '4294967295') +equal(body.edge_from_point(body.assignment_point("u1"))["Exp"][1]["String"], "1") +equal( + body.edge_from_point(body.assignment_point("u2"))["Exp"][1]["String"], "4294967295" +) -assert('obj' in body['Variables']) -assert('random' in body['Variables']) -assert('other1' in body['Variables']) -assert('other2' in body['Variables']) +assert "obj" in body["Variables"] +assert "random" in body["Variables"] +assert "other1" in body["Variables"] +assert "other2" in body["Variables"] # Test function annotations -js_GC = test.process_body(test.load_db_entry("src_body", re.compile(r'js_GC'))[0]) -annotations = js_GC['Variables']['void js_GC()']['Annotation'] -assert(annotations) +js_GC = test.process_body(test.load_db_entry("src_body", re.compile(r"js_GC"))[0]) +annotations = js_GC["Variables"]["void js_GC()"]["Annotation"] +assert annotations found_call_annotate = False for annotation in annotations: - (annType, value) = annotation['Name'] - if annType == 'annotate' and value == 'GC Call': + (annType, value) = annotation["Name"] + if annType == "annotate" and value == "GC Call": found_call_annotate = True -assert(found_call_annotate) +assert found_call_annotate # Test type annotations # js::gc::Cell first -cell = test.load_db_entry("src_comp", 'js::gc::Cell')[0] -assert(cell['Kind'] == 'Struct') -annotations = cell['Annotation'] -assert(len(annotations) == 1) -(tag, value) = annotations[0]['Name'] -assert(tag == 'annotate') -assert(value == 'GC Thing') +cell = test.load_db_entry("src_comp", "js::gc::Cell")[0] +assert cell["Kind"] == "Struct" +annotations = cell["Annotation"] +assert len(annotations) == 1 +(tag, value) = annotations[0]["Name"] +assert tag == "annotate" +assert value == "GC Thing" # Check JSObject inheritance. -JSObject = test.load_db_entry("src_comp", 'JSObject')[0] -bases = [b['Base'] for b in JSObject['CSUBaseClass']] -assert('js::gc::Cell' in bases) -assert('Bogon' in bases) -assert(len(bases) == 2) +JSObject = test.load_db_entry("src_comp", "JSObject")[0] +bases = [b["Base"] for b in JSObject["CSUBaseClass"]] +assert "js::gc::Cell" in bases +assert "Bogon" in bases +assert len(bases) == 2 # Check type analysis gctypes = test.load_gcTypes() -assert('js::gc::Cell' in gctypes['GCThings']) -assert('JustACell' in gctypes['GCThings']) -assert('JSObject' in gctypes['GCThings']) -assert('SpecialObject' in gctypes['GCThings']) -assert('UnrootedPointer' in gctypes['GCPointers']) -assert('Bogon' not in gctypes['GCThings']) -assert('Bogon' not in gctypes['GCPointers']) -assert('ErrorResult' not in gctypes['GCPointers']) -assert('OkContainer' not in gctypes['GCPointers']) -assert('class Rooted' not in gctypes['GCPointers']) +assert "js::gc::Cell" in gctypes["GCThings"] +assert "JustACell" in gctypes["GCThings"] +assert "JSObject" in gctypes["GCThings"] +assert "SpecialObject" in gctypes["GCThings"] +assert "UnrootedPointer" in gctypes["GCPointers"] +assert "Bogon" not in gctypes["GCThings"] +assert "Bogon" not in gctypes["GCPointers"] +assert "ErrorResult" not in gctypes["GCPointers"] +assert "OkContainer" not in gctypes["GCPointers"] +assert "class Rooted" not in gctypes["GCPointers"] diff --git a/js/src/devtools/rootAnalysis/t/sixgill.py b/js/src/devtools/rootAnalysis/t/sixgill.py index 5c81f13f95f5e2..0b8c2c7073dbc5 100644 --- a/js/src/devtools/rootAnalysis/t/sixgill.py +++ b/js/src/devtools/rootAnalysis/t/sixgill.py @@ -10,57 +10,61 @@ class Body(dict): def __init__(self, body): - self['BlockIdKind'] = body['BlockId']['Kind'] - if 'Variable' in body['BlockId']: - self['BlockName'] = body['BlockId']['Variable']['Name'][0].split("$")[-1] - loc = body['Location'] - self['LineRange'] = (loc[0]['Line'], loc[1]['Line']) - self['Filename'] = loc[0]['CacheString'] - self['Edges'] = body.get('PEdge', []) - self['Points'] = {i: p['Location']['Line'] for i, p in enumerate(body['PPoint'], 1)} - self['Index'] = body['Index'] - self['Variables'] = {x['Variable']['Name'][0].split( - "$")[-1]: x['Type'] for x in body['DefineVariable']} + self["BlockIdKind"] = body["BlockId"]["Kind"] + if "Variable" in body["BlockId"]: + self["BlockName"] = body["BlockId"]["Variable"]["Name"][0].split("$")[-1] + loc = body["Location"] + self["LineRange"] = (loc[0]["Line"], loc[1]["Line"]) + self["Filename"] = loc[0]["CacheString"] + self["Edges"] = body.get("PEdge", []) + self["Points"] = { + i: p["Location"]["Line"] for i, p in enumerate(body["PPoint"], 1) + } + self["Index"] = body["Index"] + self["Variables"] = { + x["Variable"]["Name"][0].split("$")[-1]: x["Type"] + for x in body["DefineVariable"] + } # Indexes - self['Line2Points'] = defaultdict(list) - for point, line in self['Points'].items(): - self['Line2Points'][line].append(point) - self['SrcPoint2Edges'] = defaultdict(list) - for edge in self['Edges']: - src, dst = edge['Index'] - self['SrcPoint2Edges'][src].append(edge) - self['Line2Edges'] = defaultdict(list) - for (src, edges) in self['SrcPoint2Edges'].items(): - line = self['Points'][src] - self['Line2Edges'][line].extend(edges) + self["Line2Points"] = defaultdict(list) + for point, line in self["Points"].items(): + self["Line2Points"][line].append(point) + self["SrcPoint2Edges"] = defaultdict(list) + for edge in self["Edges"]: + src, dst = edge["Index"] + self["SrcPoint2Edges"][src].append(edge) + self["Line2Edges"] = defaultdict(list) + for (src, edges) in self["SrcPoint2Edges"].items(): + line = self["Points"][src] + self["Line2Edges"][line].extend(edges) def edges_from_line(self, line): - return self['Line2Edges'][line] + return self["Line2Edges"][line] def edge_from_line(self, line): edges = self.edges_from_line(line) - assert(len(edges) == 1) + assert len(edges) == 1 return edges[0] def edges_from_point(self, point): - return self['SrcPoint2Edges'][point] + return self["SrcPoint2Edges"][point] def edge_from_point(self, point): edges = self.edges_from_point(point) - assert(len(edges) == 1) + assert len(edges) == 1 return edges[0] def assignment_point(self, varname): - for edge in self['Edges']: - if edge['Kind'] != 'Assign': + for edge in self["Edges"]: + if edge["Kind"] != "Assign": continue - dst = edge['Exp'][0] - if dst['Kind'] != 'Var': + dst = edge["Exp"][0] + if dst["Kind"] != "Var": continue - if dst['Variable']['Name'][0] == varname: - return edge['Index'][0] + if dst["Variable"]["Name"][0] == varname: + return edge["Index"][0] raise Exception("assignment to variable %s not found" % varname) def assignment_line(self, varname): - return self['Points'][self.assignment_point(varname)] + return self["Points"][self.assignment_point(varname)] diff --git a/js/src/devtools/rootAnalysis/t/suppression/test.py b/js/src/devtools/rootAnalysis/t/suppression/test.py index 6e34464592ff6e..b1a1c2f21f8096 100644 --- a/js/src/devtools/rootAnalysis/t/suppression/test.py +++ b/js/src/devtools/rootAnalysis/t/suppression/test.py @@ -1,20 +1,20 @@ # flake8: noqa: F821 test.compile("source.cpp") -test.run_analysis_script('gcTypes', upto='gcFunctions') +test.run_analysis_script("gcTypes", upto="gcFunctions") # The suppressions file uses mangled names. suppressed = test.load_suppressed_functions() # Only one of these is fully suppressed (ie, *always* called within the scope # of an AutoSuppressGC). -assert len(list(filter(lambda f: 'suppressedFunction' in f, suppressed))) == 1 -assert len(list(filter(lambda f: 'halfSuppressedFunction' in f, suppressed))) == 0 -assert len(list(filter(lambda f: 'unsuppressedFunction' in f, suppressed))) == 0 +assert len(list(filter(lambda f: "suppressedFunction" in f, suppressed))) == 1 +assert len(list(filter(lambda f: "halfSuppressedFunction" in f, suppressed))) == 0 +assert len(list(filter(lambda f: "unsuppressedFunction" in f, suppressed))) == 0 # gcFunctions should be the inverse, but we get to rely on unmangled names here. gcFunctions = test.load_gcFunctions() -assert 'void GC()' in gcFunctions -assert 'void suppressedFunction()' not in gcFunctions -assert 'void halfSuppressedFunction()' in gcFunctions -assert 'void unsuppressedFunction()' in gcFunctions -assert 'void f()' in gcFunctions +assert "void GC()" in gcFunctions +assert "void suppressedFunction()" not in gcFunctions +assert "void halfSuppressedFunction()" in gcFunctions +assert "void unsuppressedFunction()" in gcFunctions +assert "void f()" in gcFunctions diff --git a/js/src/devtools/rootAnalysis/t/virtual/test.py b/js/src/devtools/rootAnalysis/t/virtual/test.py index fe8999b2484c2f..a0e2a410ea8e2e 100644 --- a/js/src/devtools/rootAnalysis/t/virtual/test.py +++ b/js/src/devtools/rootAnalysis/t/virtual/test.py @@ -2,45 +2,47 @@ # flake8: noqa: F821 test.compile("source.cpp") -test.run_analysis_script('gcTypes') +test.run_analysis_script("gcTypes") info = test.load_typeInfo() -assert 'Sub1' in info['OtherCSUTags'] -assert ['CSU1', 'CSU2'] == sorted(info['OtherCSUTags']['Sub1']) -assert 'Base' in info['OtherFieldTags'] -assert 'someGC' in info['OtherFieldTags']['Base'] -assert 'Sub1' in info['OtherFieldTags'] -assert 'someGC' in info['OtherFieldTags']['Sub1'] -assert ['Sub1 override', 'second attr'] == sorted(info['OtherFieldTags']['Sub1']['someGC']) +assert "Sub1" in info["OtherCSUTags"] +assert ["CSU1", "CSU2"] == sorted(info["OtherCSUTags"]["Sub1"]) +assert "Base" in info["OtherFieldTags"] +assert "someGC" in info["OtherFieldTags"]["Base"] +assert "Sub1" in info["OtherFieldTags"] +assert "someGC" in info["OtherFieldTags"]["Sub1"] +assert ["Sub1 override", "second attr"] == sorted( + info["OtherFieldTags"]["Sub1"]["someGC"] +) gcFunctions = test.load_gcFunctions() -assert 'void Sub1::noneGC()' not in gcFunctions -assert 'void Sub1::someGC()' not in gcFunctions -assert 'void Sub1::allGC()' in gcFunctions -assert 'void Sub2::noneGC()' not in gcFunctions -assert 'void Sub2::someGC()' in gcFunctions -assert 'void Sub2::allGC()' in gcFunctions +assert "void Sub1::noneGC()" not in gcFunctions +assert "void Sub1::someGC()" not in gcFunctions +assert "void Sub1::allGC()" in gcFunctions +assert "void Sub2::noneGC()" not in gcFunctions +assert "void Sub2::someGC()" in gcFunctions +assert "void Sub2::allGC()" in gcFunctions callgraph = test.load_callgraph() -assert callgraph.calleeGraph['void f()']['Super.noneGC'] -assert callgraph.calleeGraph['Super.noneGC']['void Sub1::noneGC()'] -assert callgraph.calleeGraph['Super.noneGC']['void Sub2::noneGC()'] -assert 'void Sibling::noneGC()' not in callgraph.calleeGraph['Super.noneGC'] +assert callgraph.calleeGraph["void f()"]["Super.noneGC"] +assert callgraph.calleeGraph["Super.noneGC"]["void Sub1::noneGC()"] +assert callgraph.calleeGraph["Super.noneGC"]["void Sub2::noneGC()"] +assert "void Sibling::noneGC()" not in callgraph.calleeGraph["Super.noneGC"] hazards = test.load_hazards() hazmap = {haz.variable: haz for haz in hazards} -assert 'c1' not in hazmap -assert 'c2' in hazmap -assert 'c3' in hazmap -assert 'c4' not in hazmap -assert 'c5' in hazmap -assert 'c6' in hazmap -assert 'c7' not in hazmap -assert 'c8' in hazmap -assert 'c9' in hazmap -assert 'c10' in hazmap -assert 'c11' in hazmap +assert "c1" not in hazmap +assert "c2" in hazmap +assert "c3" in hazmap +assert "c4" not in hazmap +assert "c5" in hazmap +assert "c6" in hazmap +assert "c7" not in hazmap +assert "c8" in hazmap +assert "c9" in hazmap +assert "c10" in hazmap +assert "c11" in hazmap diff --git a/js/src/frontend/GenerateReservedWords.py b/js/src/frontend/GenerateReservedWords.py index 29d559c136f9b0..dc5641884f6891 100644 --- a/js/src/frontend/GenerateReservedWords.py +++ b/js/src/frontend/GenerateReservedWords.py @@ -11,32 +11,32 @@ def read_reserved_word_list(filename): reserved_word_list = [] index = 0 - with open(filename, 'r') as f: + with open(filename, "r") as f: for line in f: m = macro_pat.search(line) if m: reserved_word_list.append((index, m.group(1))) index += 1 - assert(len(reserved_word_list) != 0) + assert len(reserved_word_list) != 0 return reserved_word_list def line(opt, s): - opt['output'].write('{}{}\n'.format(' ' * opt['indent_level'], s)) + opt["output"].write("{}{}\n".format(" " * opt["indent_level"], s)) def indent(opt): - opt['indent_level'] += 1 + opt["indent_level"] += 1 def dedent(opt): - opt['indent_level'] -= 1 + opt["indent_level"] -= 1 def span_and_count_at(reserved_word_list, column): - assert(len(reserved_word_list) != 0) + assert len(reserved_word_list) != 0 chars_dict = {} for index, word in reserved_word_list: @@ -47,8 +47,8 @@ def span_and_count_at(reserved_word_list, column): def optimal_switch_column(opt, reserved_word_list, columns, unprocessed_columns): - assert(len(reserved_word_list) != 0) - assert(unprocessed_columns != 0) + assert len(reserved_word_list) != 0 + assert unprocessed_columns != 0 min_count = 0 min_span = 0 @@ -57,13 +57,13 @@ def optimal_switch_column(opt, reserved_word_list, columns, unprocessed_columns) for index in range(0, unprocessed_columns): span, count = span_and_count_at(reserved_word_list, columns[index]) - assert(span != 0) + assert span != 0 if span == 1: - assert(count == 1) + assert count == 1 return 1, True - assert(count != 1) + assert count != 1 if index == 0 or min_span > span: min_span = span min_span_index = index @@ -72,14 +72,14 @@ def optimal_switch_column(opt, reserved_word_list, columns, unprocessed_columns) min_count = count min_count_index = index - if min_count <= opt['use_if_threshold']: + if min_count <= opt["use_if_threshold"]: return min_count_index, True return min_span_index, False def split_list_per_column(reserved_word_list, column): - assert(len(reserved_word_list) != 0) + assert len(reserved_word_list) != 0 column_dict = {} for item in reserved_word_list: @@ -90,9 +90,8 @@ def split_list_per_column(reserved_word_list, column): return sorted(column_dict.items()) -def generate_letter_switch(opt, unprocessed_columns, reserved_word_list, - columns=None): - assert(len(reserved_word_list) != 0) +def generate_letter_switch(opt, unprocessed_columns, reserved_word_list, columns=None): + assert len(reserved_word_list) != 0 if not columns: columns = range(0, unprocessed_columns) @@ -101,33 +100,33 @@ def generate_letter_switch(opt, unprocessed_columns, reserved_word_list, index, word = reserved_word_list[0] if unprocessed_columns == 0: - line(opt, 'JSRW_GOT_MATCH({}) /* {} */'.format(index, word)) + line(opt, "JSRW_GOT_MATCH({}) /* {} */".format(index, word)) return - if unprocessed_columns > opt['char_tail_test_threshold']: - line(opt, 'JSRW_TEST_GUESS({}) /* {} */'.format(index, word)) + if unprocessed_columns > opt["char_tail_test_threshold"]: + line(opt, "JSRW_TEST_GUESS({}) /* {} */".format(index, word)) return conds = [] for column in columns[0:unprocessed_columns]: quoted = repr(word[column]) - conds.append('JSRW_AT({})=={}'.format(column, quoted)) + conds.append("JSRW_AT({})=={}".format(column, quoted)) - line(opt, 'if ({}) {{'.format(' && '.join(conds))) + line(opt, "if ({}) {{".format(" && ".join(conds))) indent(opt) - line(opt, 'JSRW_GOT_MATCH({}) /* {} */'.format(index, word)) + line(opt, "JSRW_GOT_MATCH({}) /* {} */".format(index, word)) dedent(opt) - line(opt, '}') - line(opt, 'JSRW_NO_MATCH()') + line(opt, "}") + line(opt, "JSRW_NO_MATCH()") return - assert(unprocessed_columns != 0) + assert unprocessed_columns != 0 - optimal_column_index, use_if = optimal_switch_column(opt, reserved_word_list, - columns, - unprocessed_columns) + optimal_column_index, use_if = optimal_switch_column( + opt, reserved_word_list, columns, unprocessed_columns + ) optimal_column = columns[optimal_column_index] # Make a copy to avoid breaking passed list. @@ -137,32 +136,32 @@ def generate_letter_switch(opt, unprocessed_columns, reserved_word_list, list_per_column = split_list_per_column(reserved_word_list, optimal_column) if not use_if: - line(opt, 'switch (JSRW_AT({})) {{'.format(optimal_column)) + line(opt, "switch (JSRW_AT({})) {{".format(optimal_column)) for char, reserved_word_list_per_column in list_per_column: quoted = repr(char) if use_if: - line(opt, 'if (JSRW_AT({}) == {}) {{'.format(optimal_column, - quoted)) + line(opt, "if (JSRW_AT({}) == {}) {{".format(optimal_column, quoted)) else: - line(opt, ' case {}:'.format(quoted)) + line(opt, " case {}:".format(quoted)) indent(opt) - generate_letter_switch(opt, unprocessed_columns - 1, - reserved_word_list_per_column, columns) + generate_letter_switch( + opt, unprocessed_columns - 1, reserved_word_list_per_column, columns + ) dedent(opt) if use_if: - line(opt, '}') + line(opt, "}") if not use_if: - line(opt, '}') + line(opt, "}") - line(opt, 'JSRW_NO_MATCH()') + line(opt, "JSRW_NO_MATCH()") def split_list_per_length(reserved_word_list): - assert(len(reserved_word_list) != 0) + assert len(reserved_word_list) != 0 length_dict = {} for item in reserved_word_list: @@ -174,52 +173,57 @@ def split_list_per_length(reserved_word_list): def generate_switch(opt, reserved_word_list): - assert(len(reserved_word_list) != 0) - - line(opt, '/*') - line(opt, ' * Generating switch for the list of {} entries:'.format(len(reserved_word_list))) + assert len(reserved_word_list) != 0 + + line(opt, "/*") + line( + opt, + " * Generating switch for the list of {} entries:".format( + len(reserved_word_list) + ), + ) for index, word in reserved_word_list: - line(opt, ' * {}'.format(word)) - line(opt, ' */') + line(opt, " * {}".format(word)) + line(opt, " */") list_per_length = split_list_per_length(reserved_word_list) use_if = False - if len(list_per_length) < opt['use_if_threshold']: + if len(list_per_length) < opt["use_if_threshold"]: use_if = True if not use_if: - line(opt, 'switch (JSRW_LENGTH()) {') + line(opt, "switch (JSRW_LENGTH()) {") for length, reserved_word_list_per_length in list_per_length: if use_if: - line(opt, 'if (JSRW_LENGTH() == {}) {{'.format(length)) + line(opt, "if (JSRW_LENGTH() == {}) {{".format(length)) else: - line(opt, ' case {}:'.format(length)) + line(opt, " case {}:".format(length)) indent(opt) generate_letter_switch(opt, length, reserved_word_list_per_length) dedent(opt) if use_if: - line(opt, '}') + line(opt, "}") if not use_if: - line(opt, '}') - line(opt, 'JSRW_NO_MATCH()') + line(opt, "}") + line(opt, "JSRW_NO_MATCH()") def main(output, reserved_words_h): reserved_word_list = read_reserved_word_list(reserved_words_h) opt = { - 'indent_level': 1, - 'use_if_threshold': 3, - 'char_tail_test_threshold': 4, - 'output': output + "indent_level": 1, + "use_if_threshold": 3, + "char_tail_test_threshold": 4, + "output": output, } generate_switch(opt, reserved_word_list) -if __name__ == '__main__': +if __name__ == "__main__": main(sys.stdout, *sys.argv[1:]) diff --git a/js/src/frontend/align_stack_comment.py b/js/src/frontend/align_stack_comment.py index edca78841075f4..1ae2204dc3828e 100755 --- a/js/src/frontend/align_stack_comment.py +++ b/js/src/frontend/align_stack_comment.py @@ -23,7 +23,7 @@ # The maximum column for comment MAX_CHARS_PER_LINE = 80 -stack_comment_pat = re.compile('^( *//) *(\[stack\].*)$') +stack_comment_pat = re.compile("^( *//) *(\[stack\].*)$") def align_stack_comment(path): @@ -39,29 +39,37 @@ def align_stack_comment(path): for line in f: line_num += 1 # Python includes \n in lines. - line = line.rstrip('\n') + line = line.rstrip("\n") m = stack_comment_pat.search(line) if m: - head = m.group(1) + ' ' + head = m.group(1) + " " head_len = len(head) comment = m.group(2) comment_len = len(comment) if head_len > ALIGNMENT_COLUMN: - print('Warning: line {} overflows from alignment column {}: {}'.format( - line_num, ALIGNMENT_COLUMN, head_len), file=sys.stderr) + print( + "Warning: line {} overflows from alignment column {}: {}".format( + line_num, ALIGNMENT_COLUMN, head_len + ), + file=sys.stderr, + ) line_len = max(head_len, ALIGNMENT_COLUMN) + comment_len if line_len > MAX_CHARS_PER_LINE: - print('Warning: line {} overflows from {} chars: {}'.format( - line_num, MAX_CHARS_PER_LINE, line_len), file=sys.stderr) + print( + "Warning: line {} overflows from {} chars: {}".format( + line_num, MAX_CHARS_PER_LINE, line_len + ), + file=sys.stderr, + ) max_head_len = max(max_head_len, head_len) max_comment_len = max(max_comment_len, comment_len) spaces = max(ALIGNMENT_COLUMN - head_len, 0) - formatted = head + ' ' * spaces + comment + formatted = head + " " * spaces + comment if formatted != line: changed = True @@ -70,25 +78,30 @@ def align_stack_comment(path): else: lines.append(line) - print('Info: Minimum column number for [stack]: {}'.format( - max_head_len), file=sys.stderr) - print('Info: Alignment column number for [stack]: {}'.format( - ALIGNMENT_COLUMN), file=sys.stderr) - print('Info: Max length of stack transition comments: {}'.format( - max_comment_len), file=sys.stderr) + print( + "Info: Minimum column number for [stack]: {}".format(max_head_len), + file=sys.stderr, + ) + print( + "Info: Alignment column number for [stack]: {}".format(ALIGNMENT_COLUMN), + file=sys.stderr, + ) + print( + "Info: Max length of stack transition comments: {}".format(max_comment_len), + file=sys.stderr, + ) if changed: - with open(path, 'w') as f: + with open(path, "w") as f: for line in lines: print(line, file=f) else: print("No change.") -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) < 2: - print('Usage: align_stack_comment.py FILE', - file=sys.stderr) + print("Usage: align_stack_comment.py FILE", file=sys.stderr) sys.exit(1) for path in sys.argv[1:]: diff --git a/js/src/frontend/moz.build b/js/src/frontend/moz.build index 8cd30b3b4971fb..930b2d58baa3b7 100644 --- a/js/src/frontend/moz.build +++ b/js/src/frontend/moz.build @@ -4,76 +4,76 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!..', - '..' -] +LOCAL_INCLUDES += ["!..", ".."] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") # Generate frontend/ReservedWordsGenerated.h from frontend/ReservedWords.h -GeneratedFile('ReservedWordsGenerated.h', script='GenerateReservedWords.py', - inputs=['ReservedWords.h']) +GeneratedFile( + "ReservedWordsGenerated.h", + script="GenerateReservedWords.py", + inputs=["ReservedWords.h"], +) -if CONFIG['JS_ENABLE_SMOOSH']: - CbindgenHeader('smoosh_generated.h', inputs=['/js/src/frontend/smoosh']) +if CONFIG["JS_ENABLE_SMOOSH"]: + CbindgenHeader("smoosh_generated.h", inputs=["/js/src/frontend/smoosh"]) UNIFIED_SOURCES += [ - 'AbstractScopePtr.cpp', - 'BytecodeCompiler.cpp', - 'BytecodeControlStructures.cpp', - 'BytecodeEmitter.cpp', - 'BytecodeSection.cpp', - 'CallOrNewEmitter.cpp', - 'CForEmitter.cpp', - 'DefaultEmitter.cpp', - 'DoWhileEmitter.cpp', - 'ElemOpEmitter.cpp', - 'EmitterScope.cpp', - 'ExpressionStatementEmitter.cpp', - 'FoldConstants.cpp', - 'ForInEmitter.cpp', - 'ForOfEmitter.cpp', - 'ForOfLoopControl.cpp', - 'FunctionEmitter.cpp', - 'IfEmitter.cpp', - 'JumpList.cpp', - 'LabelEmitter.cpp', - 'LexicalScopeEmitter.cpp', - 'NameFunctions.cpp', - 'NameOpEmitter.cpp', - 'ObjectEmitter.cpp', - 'ObjLiteral.cpp', - 'OptionalEmitter.cpp', - 'ParseContext.cpp', - 'ParseNode.cpp', - 'ParseNodeVerify.cpp', - 'ParserAtom.cpp', - 'PropOpEmitter.cpp', - 'SharedContext.cpp', - 'SourceNotes.cpp', - 'Stencil.cpp', - 'StencilXdr.cpp', - 'SwitchEmitter.cpp', - 'TDZCheckCache.cpp', - 'TokenStream.cpp', - 'TryEmitter.cpp', - 'WhileEmitter.cpp', + "AbstractScopePtr.cpp", + "BytecodeCompiler.cpp", + "BytecodeControlStructures.cpp", + "BytecodeEmitter.cpp", + "BytecodeSection.cpp", + "CallOrNewEmitter.cpp", + "CForEmitter.cpp", + "DefaultEmitter.cpp", + "DoWhileEmitter.cpp", + "ElemOpEmitter.cpp", + "EmitterScope.cpp", + "ExpressionStatementEmitter.cpp", + "FoldConstants.cpp", + "ForInEmitter.cpp", + "ForOfEmitter.cpp", + "ForOfLoopControl.cpp", + "FunctionEmitter.cpp", + "IfEmitter.cpp", + "JumpList.cpp", + "LabelEmitter.cpp", + "LexicalScopeEmitter.cpp", + "NameFunctions.cpp", + "NameOpEmitter.cpp", + "ObjectEmitter.cpp", + "ObjLiteral.cpp", + "OptionalEmitter.cpp", + "ParseContext.cpp", + "ParseNode.cpp", + "ParseNodeVerify.cpp", + "ParserAtom.cpp", + "PropOpEmitter.cpp", + "SharedContext.cpp", + "SourceNotes.cpp", + "Stencil.cpp", + "StencilXdr.cpp", + "SwitchEmitter.cpp", + "TDZCheckCache.cpp", + "TokenStream.cpp", + "TryEmitter.cpp", + "WhileEmitter.cpp", ] -if CONFIG['JS_ENABLE_SMOOSH']: +if CONFIG["JS_ENABLE_SMOOSH"]: UNIFIED_SOURCES += [ - 'Frontend2.cpp', + "Frontend2.cpp", ] # Parser.cpp cannot be built in unified mode because of explicit # template instantiations. SOURCES += [ - 'Parser.cpp', + "Parser.cpp", ] diff --git a/js/src/frontend/smoosh/moz.build b/js/src/frontend/smoosh/moz.build index c29156d5599040..d75c4c18ba7e08 100644 --- a/js/src/frontend/smoosh/moz.build +++ b/js/src/frontend/smoosh/moz.build @@ -4,15 +4,12 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!../..', - '../..' -] +LOCAL_INCLUDES += ["!../..", "../.."] -include('../../js-config.mozbuild') -include('../../js-cxxflags.mozbuild') +include("../../js-config.mozbuild") +include("../../js-cxxflags.mozbuild") -DIRS += ['../../rust'] +DIRS += ["../../rust"] diff --git a/js/src/fuzz-tests/moz.build b/js/src/fuzz-tests/moz.build index 58af1dfc2cedec..dc6e5cb9a5c965 100644 --- a/js/src/fuzz-tests/moz.build +++ b/js/src/fuzz-tests/moz.build @@ -4,41 +4,41 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeckoProgram('fuzz-tests', linkage=None) +GeckoProgram("fuzz-tests", linkage=None) -include('../js-cxxflags.mozbuild') -include('../js-standalone.mozbuild') +include("../js-cxxflags.mozbuild") +include("../js-standalone.mozbuild") UNIFIED_SOURCES += [ - 'testExample.cpp', - 'testRegExp.cpp', - 'tests.cpp', - 'testStructuredCloneReader.cpp', - 'testWasm.cpp', + "testExample.cpp", + "testRegExp.cpp", + "tests.cpp", + "testStructuredCloneReader.cpp", + "testWasm.cpp", ] -DEFINES['EXPORT_JS_API'] = True +DEFINES["EXPORT_JS_API"] = True LOCAL_INCLUDES += [ - '!..', - '..', + "!..", + "..", ] -if CONFIG['FUZZING']: +if CONFIG["FUZZING"]: USE_LIBS += [ - 'static:fuzzer-registry', + "static:fuzzer-registry", ] -if CONFIG['LIBFUZZER']: +if CONFIG["LIBFUZZER"]: USE_LIBS += [ - 'static:fuzzer', + "static:fuzzer", ] # Add libFuzzer configuration directives - include('/tools/fuzzing/libfuzzer-config.mozbuild') + include("/tools/fuzzing/libfuzzer-config.mozbuild") USE_LIBS += [ - 'static:js', + "static:js", ] -DEFINES['topsrcdir'] = '%s/js/src' % TOPSRCDIR +DEFINES["topsrcdir"] = "%s/js/src" % TOPSRCDIR diff --git a/js/src/gc/GenerateStatsPhases.py b/js/src/gc/GenerateStatsPhases.py index 4a83a93aefb458..c7cae53be279d8 100644 --- a/js/src/gc/GenerateStatsPhases.py +++ b/js/src/gc/GenerateStatsPhases.py @@ -58,7 +58,7 @@ import collections -class PhaseKind(): +class PhaseKind: def __init__(self, name, descr, bucket, children=[]): self.name = name self.descr = descr @@ -68,107 +68,191 @@ def __init__(self, name, descr, bucket, children=[]): # The root marking phase appears in several places in the graph. -MarkRootsPhaseKind = PhaseKind("MARK_ROOTS", "Mark Roots", 48, [ - PhaseKind("MARK_CCWS", "Mark Cross Compartment Wrappers", 50), - PhaseKind("MARK_STACK", "Mark C and JS stacks", 51), - PhaseKind("MARK_RUNTIME_DATA", "Mark Runtime-wide Data", 52), - PhaseKind("MARK_EMBEDDING", "Mark Embedding", 53), - PhaseKind("MARK_COMPARTMENTS", "Mark Compartments", 54) -]) +MarkRootsPhaseKind = PhaseKind( + "MARK_ROOTS", + "Mark Roots", + 48, + [ + PhaseKind("MARK_CCWS", "Mark Cross Compartment Wrappers", 50), + PhaseKind("MARK_STACK", "Mark C and JS stacks", 51), + PhaseKind("MARK_RUNTIME_DATA", "Mark Runtime-wide Data", 52), + PhaseKind("MARK_EMBEDDING", "Mark Embedding", 53), + PhaseKind("MARK_COMPARTMENTS", "Mark Compartments", 54), + ], +) JoinParallelTasksPhaseKind = PhaseKind("JOIN_PARALLEL_TASKS", "Join Parallel Tasks", 67) PhaseKindGraphRoots = [ PhaseKind("MUTATOR", "Mutator Running", 0), PhaseKind("GC_BEGIN", "Begin Callback", 1), - PhaseKind("EVICT_NURSERY_FOR_MAJOR_GC", "Evict Nursery For Major GC", 70, [ - MarkRootsPhaseKind, - ]), - PhaseKind("WAIT_BACKGROUND_THREAD", "Wait Background Thread", 2), - PhaseKind("PREPARE", "Prepare For Collection", 69, [ - PhaseKind("UNMARK", "Unmark", 7), - PhaseKind("UNMARK_WEAKMAPS", "Unmark WeakMaps", 76), - PhaseKind("BUFFER_GRAY_ROOTS", "Buffer Gray Roots", 49), - PhaseKind("MARK_DISCARD_CODE", "Mark Discard Code", 3), - PhaseKind("RELAZIFY_FUNCTIONS", "Relazify Functions", 4), - PhaseKind("PURGE", "Purge", 5), - PhaseKind("PURGE_SHAPE_CACHES", "Purge ShapeCaches", 60), - PhaseKind("PURGE_SOURCE_URLS", "Purge Source URLs", 73), - JoinParallelTasksPhaseKind - ]), - PhaseKind("MARK", "Mark", 6, [ - MarkRootsPhaseKind, - PhaseKind("MARK_DELAYED", "Mark Delayed", 8) - ]), - PhaseKind("SWEEP", "Sweep", 9, [ - PhaseKind("SWEEP_MARK", "Mark During Sweeping", 10, [ - PhaseKind("SWEEP_MARK_INCOMING_BLACK", "Mark Incoming Black Pointers", 12), - PhaseKind("SWEEP_MARK_WEAK", "Mark Weak", 13, [ - PhaseKind("SWEEP_MARK_GRAY_WEAK", "Mark Gray and Weak", 16) - ]), - PhaseKind("SWEEP_MARK_INCOMING_GRAY", "Mark Incoming Gray Pointers", 14), - PhaseKind("SWEEP_MARK_GRAY", "Mark Gray", 15), - ]), - PhaseKind("FINALIZE_START", "Finalize Start Callbacks", 17, [ - PhaseKind("WEAK_ZONES_CALLBACK", "Per-Slice Weak Callback", 57), - PhaseKind("WEAK_COMPARTMENT_CALLBACK", "Per-Compartment Weak Callback", 58) - ]), - PhaseKind("UPDATE_ATOMS_BITMAP", "Sweep Atoms Bitmap", 68), - PhaseKind("SWEEP_ATOMS_TABLE", "Sweep Atoms Table", 18), - PhaseKind("SWEEP_COMPARTMENTS", "Sweep Compartments", 20, [ - PhaseKind("SWEEP_DISCARD_CODE", "Sweep Discard Code", 21), - PhaseKind("SWEEP_INNER_VIEWS", "Sweep Inner Views", 22), - PhaseKind("SWEEP_CC_WRAPPER", "Sweep Cross Compartment Wrappers", 23), - PhaseKind("SWEEP_BASE_SHAPE", "Sweep Base Shapes", 24), - PhaseKind("SWEEP_INITIAL_SHAPE", "Sweep Initial Shapes", 25), - PhaseKind("SWEEP_TYPE_OBJECT", "Sweep Type Objects", 26), - PhaseKind("SWEEP_REGEXP", "Sweep Regexps", 28), - PhaseKind("SWEEP_COMPRESSION", "Sweep Compression Tasks", 62), - PhaseKind("SWEEP_WEAKMAPS", "Sweep WeakMaps", 63), - PhaseKind("SWEEP_UNIQUEIDS", "Sweep Unique IDs", 64), - PhaseKind("SWEEP_FINALIZATION_REGISTRIES", "Sweep FinalizationRegistries", 74), - PhaseKind("SWEEP_WEAKREFS", "Sweep WeakRefs", 75), - PhaseKind("SWEEP_JIT_DATA", "Sweep JIT Data", 65), - PhaseKind("SWEEP_WEAK_CACHES", "Sweep Weak Caches", 66), - PhaseKind("SWEEP_MISC", "Sweep Miscellaneous", 29), - PhaseKind("SWEEP_TYPES", "Sweep type information", 30, [ - PhaseKind("SWEEP_TYPES_BEGIN", "Sweep type tables and compilations", 31), - PhaseKind("SWEEP_TYPES_END", "Free type arena", 32), - ]), - JoinParallelTasksPhaseKind - ]), - PhaseKind("SWEEP_OBJECT", "Sweep Object", 33), - PhaseKind("SWEEP_STRING", "Sweep String", 34), - PhaseKind("SWEEP_SCRIPT", "Sweep Script", 35), - PhaseKind("SWEEP_SCOPE", "Sweep Scope", 59), - PhaseKind("SWEEP_REGEXP_SHARED", "Sweep RegExpShared", 61), - PhaseKind("SWEEP_SHAPE", "Sweep Shape", 36), - PhaseKind("FINALIZE_END", "Finalize End Callback", 38), - PhaseKind("DESTROY", "Deallocate", 39), - JoinParallelTasksPhaseKind - ]), - PhaseKind("COMPACT", "Compact", 40, [ - PhaseKind("COMPACT_MOVE", "Compact Move", 41), - PhaseKind("COMPACT_UPDATE", "Compact Update", 42, [ + PhaseKind( + "EVICT_NURSERY_FOR_MAJOR_GC", + "Evict Nursery For Major GC", + 70, + [ MarkRootsPhaseKind, - PhaseKind("COMPACT_UPDATE_CELLS", "Compact Update Cells", 43), - JoinParallelTasksPhaseKind - ]), - ]), + ], + ), + PhaseKind("WAIT_BACKGROUND_THREAD", "Wait Background Thread", 2), + PhaseKind( + "PREPARE", + "Prepare For Collection", + 69, + [ + PhaseKind("UNMARK", "Unmark", 7), + PhaseKind("UNMARK_WEAKMAPS", "Unmark WeakMaps", 76), + PhaseKind("BUFFER_GRAY_ROOTS", "Buffer Gray Roots", 49), + PhaseKind("MARK_DISCARD_CODE", "Mark Discard Code", 3), + PhaseKind("RELAZIFY_FUNCTIONS", "Relazify Functions", 4), + PhaseKind("PURGE", "Purge", 5), + PhaseKind("PURGE_SHAPE_CACHES", "Purge ShapeCaches", 60), + PhaseKind("PURGE_SOURCE_URLS", "Purge Source URLs", 73), + JoinParallelTasksPhaseKind, + ], + ), + PhaseKind( + "MARK", + "Mark", + 6, + [MarkRootsPhaseKind, PhaseKind("MARK_DELAYED", "Mark Delayed", 8)], + ), + PhaseKind( + "SWEEP", + "Sweep", + 9, + [ + PhaseKind( + "SWEEP_MARK", + "Mark During Sweeping", + 10, + [ + PhaseKind( + "SWEEP_MARK_INCOMING_BLACK", "Mark Incoming Black Pointers", 12 + ), + PhaseKind( + "SWEEP_MARK_WEAK", + "Mark Weak", + 13, + [PhaseKind("SWEEP_MARK_GRAY_WEAK", "Mark Gray and Weak", 16)], + ), + PhaseKind( + "SWEEP_MARK_INCOMING_GRAY", "Mark Incoming Gray Pointers", 14 + ), + PhaseKind("SWEEP_MARK_GRAY", "Mark Gray", 15), + ], + ), + PhaseKind( + "FINALIZE_START", + "Finalize Start Callbacks", + 17, + [ + PhaseKind("WEAK_ZONES_CALLBACK", "Per-Slice Weak Callback", 57), + PhaseKind( + "WEAK_COMPARTMENT_CALLBACK", "Per-Compartment Weak Callback", 58 + ), + ], + ), + PhaseKind("UPDATE_ATOMS_BITMAP", "Sweep Atoms Bitmap", 68), + PhaseKind("SWEEP_ATOMS_TABLE", "Sweep Atoms Table", 18), + PhaseKind( + "SWEEP_COMPARTMENTS", + "Sweep Compartments", + 20, + [ + PhaseKind("SWEEP_DISCARD_CODE", "Sweep Discard Code", 21), + PhaseKind("SWEEP_INNER_VIEWS", "Sweep Inner Views", 22), + PhaseKind( + "SWEEP_CC_WRAPPER", "Sweep Cross Compartment Wrappers", 23 + ), + PhaseKind("SWEEP_BASE_SHAPE", "Sweep Base Shapes", 24), + PhaseKind("SWEEP_INITIAL_SHAPE", "Sweep Initial Shapes", 25), + PhaseKind("SWEEP_TYPE_OBJECT", "Sweep Type Objects", 26), + PhaseKind("SWEEP_REGEXP", "Sweep Regexps", 28), + PhaseKind("SWEEP_COMPRESSION", "Sweep Compression Tasks", 62), + PhaseKind("SWEEP_WEAKMAPS", "Sweep WeakMaps", 63), + PhaseKind("SWEEP_UNIQUEIDS", "Sweep Unique IDs", 64), + PhaseKind( + "SWEEP_FINALIZATION_REGISTRIES", + "Sweep FinalizationRegistries", + 74, + ), + PhaseKind("SWEEP_WEAKREFS", "Sweep WeakRefs", 75), + PhaseKind("SWEEP_JIT_DATA", "Sweep JIT Data", 65), + PhaseKind("SWEEP_WEAK_CACHES", "Sweep Weak Caches", 66), + PhaseKind("SWEEP_MISC", "Sweep Miscellaneous", 29), + PhaseKind( + "SWEEP_TYPES", + "Sweep type information", + 30, + [ + PhaseKind( + "SWEEP_TYPES_BEGIN", + "Sweep type tables and compilations", + 31, + ), + PhaseKind("SWEEP_TYPES_END", "Free type arena", 32), + ], + ), + JoinParallelTasksPhaseKind, + ], + ), + PhaseKind("SWEEP_OBJECT", "Sweep Object", 33), + PhaseKind("SWEEP_STRING", "Sweep String", 34), + PhaseKind("SWEEP_SCRIPT", "Sweep Script", 35), + PhaseKind("SWEEP_SCOPE", "Sweep Scope", 59), + PhaseKind("SWEEP_REGEXP_SHARED", "Sweep RegExpShared", 61), + PhaseKind("SWEEP_SHAPE", "Sweep Shape", 36), + PhaseKind("FINALIZE_END", "Finalize End Callback", 38), + PhaseKind("DESTROY", "Deallocate", 39), + JoinParallelTasksPhaseKind, + ], + ), + PhaseKind( + "COMPACT", + "Compact", + 40, + [ + PhaseKind("COMPACT_MOVE", "Compact Move", 41), + PhaseKind( + "COMPACT_UPDATE", + "Compact Update", + 42, + [ + MarkRootsPhaseKind, + PhaseKind("COMPACT_UPDATE_CELLS", "Compact Update Cells", 43), + JoinParallelTasksPhaseKind, + ], + ), + ], + ), PhaseKind("DECOMMIT", "Decommit", 72), PhaseKind("GC_END", "End Callback", 44), - PhaseKind("MINOR_GC", "All Minor GCs", 45, [ - MarkRootsPhaseKind, - ]), - PhaseKind("EVICT_NURSERY", "Minor GCs to Evict Nursery", 46, [ - MarkRootsPhaseKind, - ]), - PhaseKind("TRACE_HEAP", "Trace Heap", 47, [ - MarkRootsPhaseKind, - ]), - PhaseKind("BARRIER", "Barriers", 55, [ - PhaseKind("UNMARK_GRAY", "Unmark gray", 56) - ]) + PhaseKind( + "MINOR_GC", + "All Minor GCs", + 45, + [ + MarkRootsPhaseKind, + ], + ), + PhaseKind( + "EVICT_NURSERY", + "Minor GCs to Evict Nursery", + 46, + [ + MarkRootsPhaseKind, + ], + ), + PhaseKind( + "TRACE_HEAP", + "Trace Heap", + 47, + [ + MarkRootsPhaseKind, + ], + ), + PhaseKind("BARRIER", "Barriers", 55, [PhaseKind("UNMARK_GRAY", "Unmark gray", 56)]), ] @@ -206,9 +290,9 @@ def __init__(self, phaseKind, parent): self.nextSibling = None self.nextInPhaseKind = None - self.path = re.sub(r'\W+', '_', phaseKind.name.lower()) + self.path = re.sub(r"\W+", "_", phaseKind.name.lower()) if parent is not None: - self.path = parent.path + '.' + self.path + self.path = parent.path + "." + self.path def expandPhases(): @@ -281,7 +365,7 @@ def generateHeader(out): extraPhaseKinds = [ "NONE = LIMIT", "EXPLICIT_SUSPENSION = LIMIT", - "IMPLICIT_SUSPENSION" + "IMPLICIT_SUSPENSION", ] writeEnumClass(out, "PhaseKind", "uint8_t", phaseKindNames, extraPhaseKinds) out.write("\n") @@ -290,11 +374,7 @@ def generateHeader(out): # Generate Phase enum. # phaseNames = map(lambda phase: phase.name, AllPhases) - extraPhases = [ - "NONE = LIMIT", - "EXPLICIT_SUSPENSION = LIMIT", - "IMPLICIT_SUSPENSION" - ] + extraPhases = ["NONE = LIMIT", "EXPLICIT_SUSPENSION = LIMIT", "IMPLICIT_SUSPENSION"] writeEnumClass(out, "Phase", "uint8_t", phaseNames, extraPhases) out.write("\n") @@ -311,8 +391,10 @@ def generateCpp(out): out.write("static constexpr PhaseKindTable phaseKinds = {\n") for phaseKind in AllPhaseKinds: phase = PhasesForPhaseKind[phaseKind][0] - out.write(" /* PhaseKind::%s */ PhaseKindInfo { Phase::%s, %d },\n" % - (phaseKind.name, phase.name, phaseKind.bucket)) + out.write( + " /* PhaseKind::%s */ PhaseKindInfo { Phase::%s, %d },\n" + % (phaseKind.name, phase.name, phaseKind.bucket) + ) out.write("};\n") out.write("\n") @@ -326,20 +408,23 @@ def name(phase): for phase in AllPhases: firstChild = phase.children[0] if phase.children else None phaseKind = phase.phaseKind - out.write(" /* %s */ PhaseInfo { %s, %s, %s, %s, PhaseKind::%s, %d, \"%s\", \"%s\" },\n" % # NOQA: E501 - (name(phase), - name(phase.parent), - name(firstChild), - name(phase.nextSibling), - name(phase.nextInPhaseKind), - phaseKind.name, - phase.depth, - phaseKind.descr, - phase.path)) + out.write( + ' /* %s */ PhaseInfo { %s, %s, %s, %s, PhaseKind::%s, %d, "%s", "%s" },\n' + % ( # NOQA: E501 + name(phase), + name(phase.parent), + name(firstChild), + name(phase.nextSibling), + name(phase.nextInPhaseKind), + phaseKind.name, + phase.depth, + phaseKind.descr, + phase.path, + ) + ) out.write("};\n") # # Print in a comment the next available phase kind number. # - out.write("// The next available phase kind number is: %d\n" % - (MaxBucket + 1)) + out.write("// The next available phase kind number is: %d\n" % (MaxBucket + 1)) diff --git a/js/src/gc/moz.build b/js/src/gc/moz.build index bbd091b225ff4a..88f5da03398ccd 100644 --- a/js/src/gc/moz.build +++ b/js/src/gc/moz.build @@ -4,48 +4,51 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!..', - '..' -] +LOCAL_INCLUDES += ["!..", ".."] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") # Generate GC statistics phase data. -GeneratedFile('StatsPhasesGenerated.h', - script='GenerateStatsPhases.py', entry_point='generateHeader') -GeneratedFile('StatsPhasesGenerated.inc', - script='GenerateStatsPhases.py', entry_point='generateCpp') +GeneratedFile( + "StatsPhasesGenerated.h", + script="GenerateStatsPhases.py", + entry_point="generateHeader", +) +GeneratedFile( + "StatsPhasesGenerated.inc", + script="GenerateStatsPhases.py", + entry_point="generateCpp", +) UNIFIED_SOURCES += [ - 'Allocator.cpp', - 'AtomMarking.cpp', - 'Barrier.cpp', - 'FinalizationRegistry.cpp', - 'GC.cpp', - 'GCParallelTask.cpp', - 'Marking.cpp', - 'Memory.cpp', - 'Nursery.cpp', - 'PublicIterators.cpp', - 'RootMarking.cpp', - 'Scheduling.cpp', - 'Statistics.cpp', - 'Tracer.cpp', - 'Verifier.cpp', - 'WeakMap.cpp', - 'WeakMapPtr.cpp', - 'Zone.cpp', + "Allocator.cpp", + "AtomMarking.cpp", + "Barrier.cpp", + "FinalizationRegistry.cpp", + "GC.cpp", + "GCParallelTask.cpp", + "Marking.cpp", + "Memory.cpp", + "Nursery.cpp", + "PublicIterators.cpp", + "RootMarking.cpp", + "Scheduling.cpp", + "Statistics.cpp", + "Tracer.cpp", + "Verifier.cpp", + "WeakMap.cpp", + "WeakMapPtr.cpp", + "Zone.cpp", ] # StoreBuffer.cpp cannot be built in unified mode because its template # instantiations may or may not be needed depending on what it gets bundled # with. SOURCES += [ - 'StoreBuffer.cpp', + "StoreBuffer.cpp", ] diff --git a/js/src/gdb/lib-for-tests/catcher.py b/js/src/gdb/lib-for-tests/catcher.py index dead8db3160df0..8ef3529255b4ad 100644 --- a/js/src/gdb/lib-for-tests/catcher.py +++ b/js/src/gdb/lib-for-tests/catcher.py @@ -21,15 +21,15 @@ def execfile(filename, globs, locs): with open(filename) as f: - code = compile(f.read(), filename, 'exec') + code = compile(f.read(), filename, "exec") exec(code, globs, locs) try: # testlibdir is set on the GDB command line, via: # --eval-command python testlibdir=... - execfile(os.path.join(testlibdir, 'prologue.py'), globals(), locals()) + execfile(os.path.join(testlibdir, "prologue.py"), globals(), locals()) except Exception as err: - sys.stderr.write('Error running GDB prologue:\n') + sys.stderr.write("Error running GDB prologue:\n") traceback.print_exc() sys.exit(1) diff --git a/js/src/gdb/lib-for-tests/prologue.py b/js/src/gdb/lib-for-tests/prologue.py index d9c84b60e2f3b6..64f2994abe49ef 100644 --- a/js/src/gdb/lib-for-tests/prologue.py +++ b/js/src/gdb/lib-for-tests/prologue.py @@ -18,7 +18,7 @@ # ('breakpoint', by default) and then select the calling frame. -def run_fragment(fragment, function='gdb-tests.cpp:breakpoint'): +def run_fragment(fragment, function="gdb-tests.cpp:breakpoint"): # Arrange to stop at a reasonable place in the test program. bp = gdb.Breakpoint(function) try: @@ -27,28 +27,37 @@ def run_fragment(fragment, function='gdb-tests.cpp:breakpoint'): assert bp.hit_count == 1 finally: bp.delete() - gdb.execute('frame 1') + gdb.execute("frame 1") global active_fragment active_fragment = fragment + # Assert that |actual| is equal to |expected|; if not, complain in a helpful way. def assert_eq(actual, expected): if actual != expected: - raise AssertionError("""Unexpected result: + raise AssertionError( + """Unexpected result: expected: %r -actual: %r""" % (expected, actual)) +actual: %r""" + % (expected, actual) + ) + # Assert that |expected| regex matches |actual| result; if not, complain in a helpful way. def assert_match(actual, expected): if re.match(expected, actual, re.MULTILINE) is None: - raise AssertionError("""Unexpected result: + raise AssertionError( + """Unexpected result: expected pattern: %r -actual: %r""" % (expected, actual)) +actual: %r""" + % (expected, actual) + ) + # Assert that |value|'s pretty-printed form is |form|. If |value| is a # string, then evaluate it with gdb.parse_and_eval to produce a value. @@ -59,6 +68,7 @@ def assert_pretty(value, form): value = gdb.parse_and_eval(value) assert_eq(str(value), form) + # Assert that |value|'s pretty-printed form match the pattern |pattern|. If # |value| is a string, then evaluate it with gdb.parse_and_eval to produce a # value. @@ -69,6 +79,7 @@ def assert_regexp_pretty(value, form): value = gdb.parse_and_eval(value) assert_match(str(value), form) + # Check that the list of registered pretty-printers includes one named # |printer|, with a subprinter named |subprinter|. @@ -77,39 +88,41 @@ def assert_subprinter_registered(printer, subprinter): # Match a line containing |printer| followed by a colon, and then a # series of more-indented lines containing |subprinter|. - names = {'printer': re.escape(printer), 'subprinter': re.escape(subprinter)} - pat = r'^( +)%(printer)s *\n(\1 +.*\n)*\1 +%(subprinter)s *\n' % names - output = gdb.execute('info pretty-printer', to_string=True) + names = {"printer": re.escape(printer), "subprinter": re.escape(subprinter)} + pat = r"^( +)%(printer)s *\n(\1 +.*\n)*\1 +%(subprinter)s *\n" % names + output = gdb.execute("info pretty-printer", to_string=True) if not re.search(pat, output, re.MULTILINE): - raise AssertionError("assert_subprinter_registered failed to find pretty-printer:\n" - " %s:%s\n" - "'info pretty-printer' says:\n" - "%s" % (printer, subprinter, output)) + raise AssertionError( + "assert_subprinter_registered failed to find pretty-printer:\n" + " %s:%s\n" + "'info pretty-printer' says:\n" + "%s" % (printer, subprinter, output) + ) # Request full stack traces for Python errors. -gdb.execute('set python print-stack full') +gdb.execute("set python print-stack full") # Tell GDB not to ask the user about the things we tell it to do. -gdb.execute('set confirm off', False) +gdb.execute("set confirm off", False) # Some print settings that make testing easier. -gdb.execute('set print static-members off') -gdb.execute('set print address off') -gdb.execute('set print pretty off') -gdb.execute('set width 0') +gdb.execute("set print static-members off") +gdb.execute("set print address off") +gdb.execute("set print pretty off") +gdb.execute("set width 0") try: # testscript is set on the GDB command line, via: # --eval-command python testscript=... execfile(testscript, globals(), locals()) except AssertionError as err: - header = '\nAssertion traceback' + header = "\nAssertion traceback" if active_fragment: - header += ' for ' + active_fragment - sys.stderr.write(header + ':\n') + header += " for " + active_fragment + sys.stderr.write(header + ":\n") (t, v, tb) = sys.exc_info() traceback.print_tb(tb) - sys.stderr.write('\nTest assertion failed:\n') + sys.stderr.write("\nTest assertion failed:\n") sys.stderr.write(str(err)) sys.exit(1) diff --git a/js/src/gdb/moz.build b/js/src/gdb/moz.build index ee47f3e8b79256..29c93a8fc06c97 100644 --- a/js/src/gdb/moz.build +++ b/js/src/gdb/moz.build @@ -4,62 +4,62 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeckoProgram('gdb-tests', linkage=None) +GeckoProgram("gdb-tests", linkage=None) -include('../js-cxxflags.mozbuild') -include('../js-standalone.mozbuild') +include("../js-cxxflags.mozbuild") +include("../js-standalone.mozbuild") SOURCES += [ # This file must not be unified with any of the test files, or the use() # and breakpoint() functions might get optimized out. - 'gdb-tests.cpp' + "gdb-tests.cpp" ] UNIFIED_SOURCES += [ - 'tests/enum-printers.cpp', - 'tests/test-asmjs.cpp', - 'tests/test-ExecutableAllocator.cpp', - 'tests/test-GCCellPtr.cpp', - 'tests/test-Interpreter.cpp', - 'tests/test-jsbytecode.cpp', - 'tests/test-jsid.cpp', - 'tests/test-JSObject.cpp', - 'tests/test-jsop.cpp', - 'tests/test-JSString.cpp', - 'tests/test-JSSymbol.cpp', - 'tests/test-jsval.cpp', - 'tests/test-Root.cpp', - 'tests/test-unwind.cpp', - 'tests/typedef-printers.cpp', + "tests/enum-printers.cpp", + "tests/test-asmjs.cpp", + "tests/test-ExecutableAllocator.cpp", + "tests/test-GCCellPtr.cpp", + "tests/test-Interpreter.cpp", + "tests/test-jsbytecode.cpp", + "tests/test-jsid.cpp", + "tests/test-JSObject.cpp", + "tests/test-jsop.cpp", + "tests/test-JSString.cpp", + "tests/test-JSSymbol.cpp", + "tests/test-jsval.cpp", + "tests/test-Root.cpp", + "tests/test-unwind.cpp", + "tests/typedef-printers.cpp", ] -SOURCES += [ - 'tests/test-prettyprinters.cpp' -] +SOURCES += ["tests/test-prettyprinters.cpp"] -if CONFIG['CC_TYPE'] != 'clang-cl': +if CONFIG["CC_TYPE"] != "clang-cl": # Test expects to see pre-typedef names of base classes, but the compiler will # normally omit those from the debuginfo. The current clang-cl does not support # this option. - SOURCES['tests/test-prettyprinters.cpp'].flags += ['-fno-eliminate-unused-debug-types'] + SOURCES["tests/test-prettyprinters.cpp"].flags += [ + "-fno-eliminate-unused-debug-types" + ] -if CONFIG['CC_TYPE'] == 'clang': +if CONFIG["CC_TYPE"] == "clang": # clang has poor debuginfo when optimized. Some of the test files have # unrecoverable values even with -Og. gcc is far better about making # optimized code debuggable. - CXXFLAGS += ['-O0'] + CXXFLAGS += ["-O0"] -DEFINES['EXPORT_JS_API'] = True +DEFINES["EXPORT_JS_API"] = True LOCAL_INCLUDES += [ - '!..', - '..', + "!..", + "..", ] USE_LIBS += [ - 'static:js', + "static:js", ] -DEFINES['topsrcdir'] = '%s/js/src' % TOPSRCDIR -FINAL_TARGET_PP_FILES += ['gdb-tests-gdb.py.in'] -OBJDIR_FILES.js.src.gdb += ['!/dist/bin/gdb-tests-gdb.py'] +DEFINES["topsrcdir"] = "%s/js/src" % TOPSRCDIR +FINAL_TARGET_PP_FILES += ["gdb-tests-gdb.py.in"] +OBJDIR_FILES.js.src.gdb += ["!/dist/bin/gdb-tests-gdb.py"] diff --git a/js/src/gdb/mozilla/CellHeader.py b/js/src/gdb/mozilla/CellHeader.py index fbd5aa095db118..8163e57e1b62b9 100644 --- a/js/src/gdb/mozilla/CellHeader.py +++ b/js/src/gdb/mozilla/CellHeader.py @@ -8,18 +8,18 @@ def get_header_ptr(value, ptr_t): # Return the pointer stored in Cell::header_ for subclasses of # TenuredCellWithNonGCPointer and CellWithTenuredGCPointer. - return value['header_']['mValue'].cast(ptr_t) + return value["header_"]["mValue"].cast(ptr_t) def get_header_length_and_flags(value, cache): # Return the length and flags values for subclasses of # CellWithLengthAndFlags. - flags = value['header_']['mValue'].cast(cache.uintptr_t) + flags = value["header_"]["mValue"].cast(cache.uintptr_t) try: - length = value['length_'] + length = value["length_"] except gdb.error: # If we couldn't fetch the length directly, it must be stored # within `flags`. length = flags >> 32 - flags = flags % 2**32 + flags = flags % 2 ** 32 return length, flags diff --git a/js/src/gdb/mozilla/ExecutableAllocator.py b/js/src/gdb/mozilla/ExecutableAllocator.py index 387444baf759aa..e82b1a186da4a3 100644 --- a/js/src/gdb/mozilla/ExecutableAllocator.py +++ b/js/src/gdb/mozilla/ExecutableAllocator.py @@ -17,7 +17,8 @@ class jsjitExecutableAllocatorCache(object): - '''Cache information about the ExecutableAllocator type for this objfile.''' + """Cache information about the ExecutableAllocator type for this objfile.""" + def __init__(self): self.d = None @@ -28,9 +29,9 @@ def __getattr__(self, name): def initialize(self): self.d = {} - self.d['ExecutableAllocator'] = gdb.lookup_type('js::jit::ExecutableAllocator') - self.d['ExecutablePool'] = gdb.lookup_type('js::jit::ExecutablePool') - self.d['HashNumber'] = gdb.lookup_type('mozilla::HashNumber') + self.d["ExecutableAllocator"] = gdb.lookup_type("js::jit::ExecutableAllocator") + self.d["ExecutablePool"] = gdb.lookup_type("js::jit::ExecutablePool") + self.d["HashNumber"] = gdb.lookup_type("mozilla::HashNumber") @pretty_printer("js::jit::ExecutableAllocator") @@ -42,7 +43,7 @@ def __init__(self, value, cache): self.cache = cache.mod_ExecutableAllocator def to_string(self): - return "ExecutableAllocator([%s])" % ', '.join([str(x) for x in self]) + return "ExecutableAllocator([%s])" % ", ".join([str(x) for x in self]) def __iter__(self): return self.PoolIterator(self) @@ -53,10 +54,10 @@ def __init__(self, allocator): self.entryType = allocator.cache.ExecutablePool.pointer() self.hashNumType = allocator.cache.HashNumber # Emulate the HashSet::Range - self.table = allocator.value['m_pools']['mImpl']['mTable'] + self.table = allocator.value["m_pools"]["mImpl"]["mTable"] self.index = 0 kHashNumberBits = 32 - hashShift = allocator.value['m_pools']['mImpl']['mHashShift'] + hashShift = allocator.value["m_pools"]["mImpl"]["mHashShift"] self.capacity = 1 << (kHashNumberBits - hashShift) if self.table == 0: self.capacity = 0 @@ -90,6 +91,6 @@ def __init__(self, value, cache): self.cache = cache.mod_ExecutableAllocator def to_string(self): - pages = self.value['m_allocation']['pages'] - size = self.value['m_allocation']['size'] + pages = self.value["m_allocation"]["pages"] + size = self.value["m_allocation"]["size"] return "ExecutablePool %08x-%08x" % (pages, pages + size) diff --git a/js/src/gdb/mozilla/GCCellPtr.py b/js/src/gdb/mozilla/GCCellPtr.py index 7f2dc49cc621e6..738d9b3e452a42 100644 --- a/js/src/gdb/mozilla/GCCellPtr.py +++ b/js/src/gdb/mozilla/GCCellPtr.py @@ -17,47 +17,46 @@ class GCCellPtrTypeCache(object): def __init__(self, cache): - self.TraceKind_t = gdb.lookup_type('JS::TraceKind') - self.AllocKind_t = gdb.lookup_type('js::gc::AllocKind') - self.Arena_t = gdb.lookup_type('js::gc::Arena') - self.Cell_t = gdb.lookup_type('js::gc::Cell') - self.TenuredCell_t = gdb.lookup_type('js::gc::TenuredCell') + self.TraceKind_t = gdb.lookup_type("JS::TraceKind") + self.AllocKind_t = gdb.lookup_type("js::gc::AllocKind") + self.Arena_t = gdb.lookup_type("js::gc::Arena") + self.Cell_t = gdb.lookup_type("js::gc::Cell") + self.TenuredCell_t = gdb.lookup_type("js::gc::TenuredCell") trace_kinds = gdb.types.make_enum_dict(self.TraceKind_t) alloc_kinds = gdb.types.make_enum_dict(self.AllocKind_t) def trace_kind(k): - return trace_kinds['JS::TraceKind::' + k] + return trace_kinds["JS::TraceKind::" + k] def alloc_kind(k): - return alloc_kinds['js::gc::AllocKind::' + k] + return alloc_kinds["js::gc::AllocKind::" + k] # Build a mapping from TraceKind enum values to the types they denote. trace_map = { # Inline types. - 'Object': 'JSObject', - 'BigInt': 'JS::BigInt', - 'String': 'JSString', - 'Symbol': 'JS::Symbol', - 'Shape': 'js::Shape', - 'ObjectGroup': 'js::ObjectGroup', - 'Null': 'std::nullptr_t', - + "Object": "JSObject", + "BigInt": "JS::BigInt", + "String": "JSString", + "Symbol": "JS::Symbol", + "Shape": "js::Shape", + "ObjectGroup": "js::ObjectGroup", + "Null": "std::nullptr_t", # Out-of-line types. - 'BaseShape': 'js::BaseShape', - 'JitCode': 'js::jit::JitCode', - 'Script': 'js::BaseScript', - 'Scope': 'js::Scope', - 'RegExpShared': 'js::RegExpShared', + "BaseShape": "js::BaseShape", + "JitCode": "js::jit::JitCode", + "Script": "js::BaseScript", + "Scope": "js::Scope", + "RegExpShared": "js::RegExpShared", } # Map from AllocKind to TraceKind for out-of-line types. alloc_map = { - 'BASE_SHAPE': 'BaseShape', - 'JITCODE': 'JitCode', - 'SCRIPT': 'Script', - 'SCOPE': 'Scope', - 'REGEXP_SHARED': 'RegExpShared', + "BASE_SHAPE": "BaseShape", + "JITCODE": "JitCode", + "SCRIPT": "Script", + "SCOPE": "Scope", + "REGEXP_SHARED": "RegExpShared", } self.trace_kind_to_type = { @@ -67,12 +66,12 @@ def alloc_kind(k): alloc_kind(k): trace_kind(v) for k, v in alloc_map.items() } - self.Null = trace_kind('Null') - self.tracekind_mask = gdb.parse_and_eval('JS::OutOfLineTraceKindMask') - self.arena_mask = gdb.parse_and_eval('js::gc::ArenaMask') + self.Null = trace_kind("Null") + self.tracekind_mask = gdb.parse_and_eval("JS::OutOfLineTraceKindMask") + self.arena_mask = gdb.parse_and_eval("js::gc::ArenaMask") -@pretty_printer('JS::GCCellPtr') +@pretty_printer("JS::GCCellPtr") class GCCellPtr(object): def __init__(self, value, cache): self.value = value @@ -81,7 +80,7 @@ def __init__(self, value, cache): self.cache = cache def to_string(self): - ptr = self.value['ptr'] + ptr = self.value["ptr"] kind = ptr & self.cache.mod_GCCellPtr.tracekind_mask if kind == self.cache.mod_GCCellPtr.Null: return "JS::GCCellPtr(nullptr)" @@ -104,13 +103,19 @@ def to_string(self): # TenuredCell::arena() addr = int(tenured) arena_ptr = addr & ~self.cache.mod_GCCellPtr.arena_mask - arena = arena_ptr.reinterpret_cast(self.cache.mod_GCCellPtr.Arena_t.pointer()) + arena = arena_ptr.reinterpret_cast( + self.cache.mod_GCCellPtr.Arena_t.pointer() + ) # Arena::getAllocKind() - alloc_kind = arena['allocKind'].cast(self.cache.mod_GCCellPtr.AllocKind_t) - alloc_idx = int(alloc_kind.cast(self.cache.mod_GCCellPtr.AllocKind_t.target())) + alloc_kind = arena["allocKind"].cast(self.cache.mod_GCCellPtr.AllocKind_t) + alloc_idx = int( + alloc_kind.cast(self.cache.mod_GCCellPtr.AllocKind_t.target()) + ) # Map the AllocKind to a TraceKind. kind = self.cache.mod_GCCellPtr.alloc_kind_to_trace_kind[alloc_idx] type_name = self.cache.mod_GCCellPtr.trace_kind_to_type[int(kind)] - return "JS::GCCellPtr(({}*) {})".format(type_name, ptr.cast(self.cache.void_ptr_t)) + return "JS::GCCellPtr(({}*) {})".format( + type_name, ptr.cast(self.cache.void_ptr_t) + ) diff --git a/js/src/gdb/mozilla/Interpreter.py b/js/src/gdb/mozilla/Interpreter.py index aae7d90881f5d2..febf92b86debbc 100644 --- a/js/src/gdb/mozilla/Interpreter.py +++ b/js/src/gdb/mozilla/Interpreter.py @@ -15,22 +15,22 @@ class InterpreterTypeCache(object): # Cache information about the Interpreter types for this objfile. def __init__(self): - self.tValue = gdb.lookup_type('JS::Value') - self.tJSOp = gdb.lookup_type('JSOp') + self.tValue = gdb.lookup_type("JS::Value") + self.tJSOp = gdb.lookup_type("JSOp") try: - self.tScriptFrameIterData = gdb.lookup_type('js::ScriptFrameIter::Data') + self.tScriptFrameIterData = gdb.lookup_type("js::ScriptFrameIter::Data") except gdb.error: # Work around problem with gcc optimized debuginfo where it doesn't # seem to be able to see that ScriptFrameIter inherits the # FrameIter::Data type. - self.tScriptFrameIterData = gdb.lookup_type('js::FrameIter::Data') - self.tInterpreterFrame = gdb.lookup_type('js::InterpreterFrame') - self.tBaselineFrame = gdb.lookup_type('js::jit::BaselineFrame') - self.tRematerializedFrame = gdb.lookup_type('js::jit::RematerializedFrame') - self.tDebugFrame = gdb.lookup_type('js::wasm::DebugFrame') + self.tScriptFrameIterData = gdb.lookup_type("js::FrameIter::Data") + self.tInterpreterFrame = gdb.lookup_type("js::InterpreterFrame") + self.tBaselineFrame = gdb.lookup_type("js::jit::BaselineFrame") + self.tRematerializedFrame = gdb.lookup_type("js::jit::RematerializedFrame") + self.tDebugFrame = gdb.lookup_type("js::wasm::DebugFrame") -@pretty_printer('js::InterpreterRegs') +@pretty_printer("js::InterpreterRegs") class InterpreterRegs(object): def __init__(self, value, cache): self.value = value @@ -43,14 +43,14 @@ def __init__(self, value, cache): # you get the current level of indentation?), so we don't even bother # trying. No 'children', just 'to_string'. def to_string(self): - fp_ = 'fp_ = {}'.format(self.value['fp_']) - slots = (self.value['fp_'] + 1).cast(self.itc.tValue.pointer()) - sp = 'sp = fp_.slots() + {}'.format(self.value['sp'] - slots) - pc = 'pc = {}'.format(self.value['pc']) - return '{{ {}, {}, {} }}'.format(fp_, sp, pc) + fp_ = "fp_ = {}".format(self.value["fp_"]) + slots = (self.value["fp_"] + 1).cast(self.itc.tValue.pointer()) + sp = "sp = fp_.slots() + {}".format(self.value["sp"] - slots) + pc = "pc = {}".format(self.value["pc"]) + return "{{ {}, {}, {} }}".format(fp_, sp, pc) -@pretty_printer('js::AbstractFramePtr') +@pretty_printer("js::AbstractFramePtr") class AbstractFramePtr(object): Tag_ScriptFrameIterData = 0x0 Tag_InterpreterFrame = 0x1 @@ -67,27 +67,27 @@ def __init__(self, value, cache): self.itc = cache.mod_Interpreter def to_string(self): - ptr = self.value['ptr_'] + ptr = self.value["ptr_"] tag = ptr & AbstractFramePtr.TagMask ptr = ptr & ~AbstractFramePtr.TagMask if tag == AbstractFramePtr.Tag_ScriptFrameIterData: - label = 'js::ScriptFrameIter::Data' + label = "js::ScriptFrameIter::Data" ptr = ptr.cast(self.itc.tScriptFrameIterData.pointer()) if tag == AbstractFramePtr.Tag_InterpreterFrame: - label = 'js::InterpreterFrame' + label = "js::InterpreterFrame" ptr = ptr.cast(self.itc.tInterpreterFrame.pointer()) if tag == AbstractFramePtr.Tag_BaselineFrame: - label = 'js::jit::BaselineFrame' + label = "js::jit::BaselineFrame" ptr = ptr.cast(self.itc.tBaselineFrame.pointer()) if tag == AbstractFramePtr.Tag_RematerializedFrame: - label = 'js::jit::RematerializedFrame' + label = "js::jit::RematerializedFrame" ptr = ptr.cast(self.itc.tRematerializedFrame.pointer()) if tag == AbstractFramePtr.Tag_WasmDebugFrame: - label = 'js::wasm::DebugFrame' + label = "js::wasm::DebugFrame" ptr = ptr.cast(self.itc.tDebugFrame.pointer()) - return 'AbstractFramePtr (({} *) {})'.format(label, ptr) + return "AbstractFramePtr (({} *) {})".format(label, ptr) # Provide the ptr_ field as a child, so it prints after the pretty string # provided above. def children(self): - yield ('ptr_', self.value['ptr_']) + yield ("ptr_", self.value["ptr_"]) diff --git a/js/src/gdb/mozilla/IonGraph.py b/js/src/gdb/mozilla/IonGraph.py index 4d0aba36b864ae..b5f6a899b88666 100644 --- a/js/src/gdb/mozilla/IonGraph.py +++ b/js/src/gdb/mozilla/IonGraph.py @@ -36,7 +36,8 @@ def __getattr__(self, name): def initialize(self): self.d = {} - self.d['char'] = gdb.lookup_type('char') + self.d["char"] = gdb.lookup_type("char") + # Dummy class used to store the content of the type cache in the context of the # iongraph command, which uses the jsvmLSprinter. @@ -56,18 +57,18 @@ def __init__(self, value, cache): self.cache = cache.mod_IonGraph def to_string(self): - next = self.value['head_'] - tail = self.value['tail_'] + next = self.value["head_"] + tail = self.value["tail_"] if next == 0: return "" res = "" while next != tail: chars = (next + 1).cast(self.cache.char.pointer()) - res = res + chars.string('ascii', 'ignore', next['length']) - next = next['next'] - length = next['length'] - self.value['unused_'] + res = res + chars.string("ascii", "ignore", next["length"]) + next = next["next"] + length = next["length"] - self.value["unused_"] chars = (next + 1).cast(self.cache.char.pointer()) - res = res + chars.string('ascii', 'ignore', length) + res = res + chars.string("ascii", "ignore", length) return res @@ -94,7 +95,8 @@ def get_show_string(self, value): def __init__(self): super(IonGraphBinParameter, self).__init__( - "iongraph-bin", gdb.COMMAND_SUPPORT, gdb.PARAM_FILENAME) + "iongraph-bin", gdb.COMMAND_SUPPORT, gdb.PARAM_FILENAME + ) self.value = os.getenv("GDB_IONGRAPH", "") if self.value == "": self.value = search_in_path("iongraph") @@ -111,7 +113,9 @@ def get_show_string(self, value): return "Path to dot binary set to: %s" % value def __init__(self): - super(DotBinParameter, self).__init__("dot-bin", gdb.COMMAND_SUPPORT, gdb.PARAM_FILENAME) + super(DotBinParameter, self).__init__( + "dot-bin", gdb.COMMAND_SUPPORT, gdb.PARAM_FILENAME + ) self.value = os.getenv("GDB_DOT", "") if self.value == "": self.value = search_in_path("dot") @@ -129,7 +133,8 @@ def get_show_string(self): def __init__(self): super(PngViewerBinParameter, self).__init__( - "pngviewer-bin", gdb.COMMAND_SUPPORT, gdb.PARAM_FILENAME) + "pngviewer-bin", gdb.COMMAND_SUPPORT, gdb.PARAM_FILENAME + ) self.value = os.getenv("GDB_PNGVIEWER", "") if self.value == "": self.value = search_in_path("xdg-open") @@ -147,7 +152,8 @@ class IonGraphCommand(gdb.Command): def __init__(self): super(IonGraphCommand, self).__init__( - "iongraph", gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION) + "iongraph", gdb.COMMAND_DATA, gdb.COMPLETE_EXPRESSION + ) self.typeCache = ModuleCache() def invoke(self, mirGenExpr, from_tty): @@ -160,26 +166,68 @@ def invoke(self, mirGenExpr, from_tty): # jsonPrinter (containing the result of the output), and the jsonSpewer # (continaining methods for spewing the graph). mirGen = gdb.parse_and_eval(mirGenExpr) - jsonPrinter = mirGen['gs_']['jsonPrinter_'] - jsonSpewer = mirGen['gs_']['jsonSpewer_'] - graph = mirGen['graph_'] + jsonPrinter = mirGen["gs_"]["jsonPrinter_"] + jsonSpewer = mirGen["gs_"]["jsonSpewer_"] + graph = mirGen["graph_"] # These commands are doing side-effects which are saving the state of # the compiled code on the LSprinter dedicated for logging. Fortunately, # if you are using these gdb command, this probably means that other # ways of getting this content failed you already, so making a mess in # these logging strings should not cause much issues. - gdb.parse_and_eval('(*(%s*)(%s)).clear()' % (jsonPrinter.type, jsonPrinter.address,)) - gdb.parse_and_eval('(*(%s*)(%s)).beginFunction((JSScript*)0)' % - (jsonSpewer.type, jsonSpewer.address,)) - gdb.parse_and_eval('(*(%s*)(%s)).beginPass("gdb")' % - (jsonSpewer.type, jsonSpewer.address,)) - gdb.parse_and_eval('(*(%s*)(%s)).spewMIR((%s)%s)' % - (jsonSpewer.type, jsonSpewer.address, graph.type, graph,)) - gdb.parse_and_eval('(*(%s*)(%s)).spewLIR((%s)%s)' % - (jsonSpewer.type, jsonSpewer.address, graph.type, graph,)) - gdb.parse_and_eval('(*(%s*)(%s)).endPass()' % (jsonSpewer.type, jsonSpewer.address,)) - gdb.parse_and_eval('(*(%s*)(%s)).endFunction()' % (jsonSpewer.type, jsonSpewer.address,)) + gdb.parse_and_eval( + "(*(%s*)(%s)).clear()" + % ( + jsonPrinter.type, + jsonPrinter.address, + ) + ) + gdb.parse_and_eval( + "(*(%s*)(%s)).beginFunction((JSScript*)0)" + % ( + jsonSpewer.type, + jsonSpewer.address, + ) + ) + gdb.parse_and_eval( + '(*(%s*)(%s)).beginPass("gdb")' + % ( + jsonSpewer.type, + jsonSpewer.address, + ) + ) + gdb.parse_and_eval( + "(*(%s*)(%s)).spewMIR((%s)%s)" + % ( + jsonSpewer.type, + jsonSpewer.address, + graph.type, + graph, + ) + ) + gdb.parse_and_eval( + "(*(%s*)(%s)).spewLIR((%s)%s)" + % ( + jsonSpewer.type, + jsonSpewer.address, + graph.type, + graph, + ) + ) + gdb.parse_and_eval( + "(*(%s*)(%s)).endPass()" + % ( + jsonSpewer.type, + jsonSpewer.address, + ) + ) + gdb.parse_and_eval( + "(*(%s*)(%s)).endFunction()" + % ( + jsonSpewer.type, + jsonSpewer.address, + ) + ) # Dump the content of the LSprinter containing the JSON view of the # graph into a python string. @@ -203,13 +251,15 @@ def displayMIRGraph(self, jsonStr): # start all processes in a shell-like equivalent of: # iongraph < json | dot > tmp.png; xdg-open tmp.png - i = subprocess.Popen([iongraph.value, '--funcnum', '0', '--passnum', '0', - '--out-mir', '-', '-'], stdin=subprocess.PIPE, - stdout=subprocess.PIPE) - d = subprocess.Popen([dot.value, '-Tpng'], stdin=i.stdout, stdout=png) + i = subprocess.Popen( + [iongraph.value, "--funcnum", "0", "--passnum", "0", "--out-mir", "-", "-"], + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + ) + d = subprocess.Popen([dot.value, "-Tpng"], stdin=i.stdout, stdout=png) # Write the json file as the input of the iongraph command. - i.stdin.write(jsonStr.encode('utf8')) + i.stdin.write(jsonStr.encode("utf8")) i.stdin.close() i.stdout.close() diff --git a/js/src/gdb/mozilla/JSObject.py b/js/src/gdb/mozilla/JSObject.py index 35655381f97d7a..282b47a5d16741 100644 --- a/js/src/gdb/mozilla/JSObject.py +++ b/js/src/gdb/mozilla/JSObject.py @@ -16,15 +16,18 @@ class JSObjectTypeCache(object): def __init__(self, value, cache): - baseshape_flags = gdb.lookup_type('js::BaseShape::Flag') - self.flag_DELEGATE = prettyprinters.enum_value(baseshape_flags, 'js::BaseShape::DELEGATE') - self.func_ptr_type = gdb.lookup_type('JSFunction').pointer() - self.class_NON_NATIVE = gdb.parse_and_eval('JSClass::NON_NATIVE') - self.NativeObject_ptr_t = gdb.lookup_type('js::NativeObject').pointer() - self.BaseShape_ptr_t = gdb.lookup_type('js::BaseShape').pointer() - self.Shape_ptr_t = gdb.lookup_type('js::Shape').pointer() - self.ObjectGroup_ptr_t = gdb.lookup_type('js::ObjectGroup').pointer() - self.JSClass_ptr_t = gdb.lookup_type('JSClass').pointer() + baseshape_flags = gdb.lookup_type("js::BaseShape::Flag") + self.flag_DELEGATE = prettyprinters.enum_value( + baseshape_flags, "js::BaseShape::DELEGATE" + ) + self.func_ptr_type = gdb.lookup_type("JSFunction").pointer() + self.class_NON_NATIVE = gdb.parse_and_eval("JSClass::NON_NATIVE") + self.NativeObject_ptr_t = gdb.lookup_type("js::NativeObject").pointer() + self.BaseShape_ptr_t = gdb.lookup_type("js::BaseShape").pointer() + self.Shape_ptr_t = gdb.lookup_type("js::Shape").pointer() + self.ObjectGroup_ptr_t = gdb.lookup_type("js::ObjectGroup").pointer() + self.JSClass_ptr_t = gdb.lookup_type("JSClass").pointer() + # There should be no need to register this for JSFunction as well, since we # search for pretty-printers under the names of base classes, and @@ -34,7 +37,7 @@ def __init__(self, value, cache): gdb_string_regexp = re.compile(r'(?:0x[0-9a-z]+ )?(?:<.*> )?"(.*)"', re.I) -@ptr_pretty_printer('JSObject') +@ptr_pretty_printer("JSObject") class JSObjectPtrOrRef(prettyprinters.Pointer): def __init__(self, value, cache): super(JSObjectPtrOrRef, self).__init__(value, cache) @@ -45,36 +48,39 @@ def __init__(self, value, cache): def summary(self): group = get_header_ptr(self.value, self.otc.ObjectGroup_ptr_t) classp = get_header_ptr(group, self.otc.JSClass_ptr_t) - non_native = classp['flags'] & self.otc.class_NON_NATIVE + non_native = classp["flags"] & self.otc.class_NON_NATIVE # Use GDB to format the class name, but then strip off the address # and the quotes. - class_name = str(classp['name']) + class_name = str(classp["name"]) m = gdb_string_regexp.match(class_name) if m: class_name = m.group(1) if non_native: - return '[object {}]'.format(class_name) + return "[object {}]".format(class_name) else: native = self.value.cast(self.otc.NativeObject_ptr_t) - shape = deref(native['shape_']) + shape = deref(native["shape_"]) baseshape = get_header_ptr(shape, self.otc.BaseShape_ptr_t) - flags = baseshape['flags'] + flags = baseshape["flags"] is_delegate = bool(flags & self.otc.flag_DELEGATE) name = None - if class_name == 'Function': + if class_name == "Function": function = self.value concrete_type = function.type.strip_typedefs() if concrete_type.code == gdb.TYPE_CODE_REF: function = function.address function = function.cast(self.otc.func_ptr_type) - atom = deref(function['atom_']) - name = str(atom) if atom else '' - return '[object {}{}]{}'.format(class_name, - ' ' + name if name else '', - ' delegate' if is_delegate else '') + atom = deref(function["atom_"]) + name = str(atom) if atom else "" + return "[object {}{}]{}".format( + class_name, + " " + name if name else "", + " delegate" if is_delegate else "", + ) -@ref_pretty_printer('JSObject') -def JSObjectRef(value, cache): return JSObjectPtrOrRef(value, cache) +@ref_pretty_printer("JSObject") +def JSObjectRef(value, cache): + return JSObjectPtrOrRef(value, cache) diff --git a/js/src/gdb/mozilla/JSString.py b/js/src/gdb/mozilla/JSString.py index 88cfd34b2aa4e0..121480b1f8e71b 100644 --- a/js/src/gdb/mozilla/JSString.py +++ b/js/src/gdb/mozilla/JSString.py @@ -22,11 +22,11 @@ class JSStringTypeCache(object): # Cache information about the JSString type for this objfile. def __init__(self, cache): dummy = gdb.Value(0).cast(cache.JSString_ptr_t) - self.ATOM_BIT = dummy['ATOM_BIT'] - self.LINEAR_BIT = dummy['LINEAR_BIT'] - self.INLINE_CHARS_BIT = dummy['INLINE_CHARS_BIT'] - self.TYPE_FLAGS_MASK = dummy['TYPE_FLAGS_MASK'] - self.LATIN1_CHARS_BIT = dummy['LATIN1_CHARS_BIT'] + self.ATOM_BIT = dummy["ATOM_BIT"] + self.LINEAR_BIT = dummy["LINEAR_BIT"] + self.INLINE_CHARS_BIT = dummy["INLINE_CHARS_BIT"] + self.TYPE_FLAGS_MASK = dummy["TYPE_FLAGS_MASK"] + self.LATIN1_CHARS_BIT = dummy["LATIN1_CHARS_BIT"] class Common(mozilla.prettyprinters.Pointer): @@ -43,42 +43,42 @@ def display_hint(self): return "string" def chars(self): - d = self.value['d'] + d = self.value["d"] length, flags = get_header_length_and_flags(self.value, self.cache) corrupt = { - 0x2f2f2f2f: 'JS_FRESH_NURSERY_PATTERN', - 0x2b2b2b2b: 'JS_SWEPT_NURSERY_PATTERN', - 0xe5e5e5e5: 'jemalloc freed memory', - }.get(flags & 0xffffffff) + 0x2F2F2F2F: "JS_FRESH_NURSERY_PATTERN", + 0x2B2B2B2B: "JS_SWEPT_NURSERY_PATTERN", + 0xE5E5E5E5: "jemalloc freed memory", + }.get(flags & 0xFFFFFFFF) if corrupt: for ch in "" % corrupt: yield ch return is_rope = (flags & self.stc.LINEAR_BIT) == 0 if is_rope: - for c in JSStringPtr(d['s']['u2']['left'], self.cache).chars(): + for c in JSStringPtr(d["s"]["u2"]["left"], self.cache).chars(): yield c - for c in JSStringPtr(d['s']['u3']['right'], self.cache).chars(): + for c in JSStringPtr(d["s"]["u3"]["right"], self.cache).chars(): yield c else: is_inline = (flags & self.stc.INLINE_CHARS_BIT) != 0 is_latin1 = (flags & self.stc.LATIN1_CHARS_BIT) != 0 if is_inline: if is_latin1: - chars = d['inlineStorageLatin1'] + chars = d["inlineStorageLatin1"] else: - chars = d['inlineStorageTwoByte'] + chars = d["inlineStorageTwoByte"] else: if is_latin1: - chars = d['s']['u2']['nonInlineCharsLatin1'] + chars = d["s"]["u2"]["nonInlineCharsLatin1"] else: - chars = d['s']['u2']['nonInlineCharsTwoByte'] + chars = d["s"]["u2"]["nonInlineCharsTwoByte"] for i in range(int(length)): yield chars[i] def to_string(self, maxlen=200): - s = '' + s = "" invalid_chars_allowed = 2 for c in self.chars(): if len(s) >= maxlen: @@ -94,7 +94,7 @@ def to_string(self, maxlen=200): break else: invalid_chars_allowed -= 1 - s += "\\x%04x" % (c & 0xffff) + s += "\\x%04x" % (c & 0xFFFF) return s diff --git a/js/src/gdb/mozilla/JSSymbol.py b/js/src/gdb/mozilla/JSSymbol.py index 37be8b9697a48b..06b354404d100c 100644 --- a/js/src/gdb/mozilla/JSSymbol.py +++ b/js/src/gdb/mozilla/JSSymbol.py @@ -12,9 +12,9 @@ mozilla.prettyprinters.clear_module_printers(__name__) # JS::SymbolCode enumerators -PrivateNameSymbol = 0xfffffffd -InSymbolRegistry = 0xfffffffe -UniqueSymbol = 0xffffffff +PrivateNameSymbol = 0xFFFFFFFD +InSymbolRegistry = 0xFFFFFFFE +UniqueSymbol = 0xFFFFFFFF @ptr_pretty_printer("JS::Symbol") @@ -24,7 +24,7 @@ def __init__(self, value, cache): self.value = value def to_string(self): - code = int(self.value['code_']) & 0xffffffff + code = int(self.value["code_"]) & 0xFFFFFFFF desc = str(get_header_ptr(self.value, self.cache.JSString_ptr_t)) if code == InSymbolRegistry: return "Symbol.for({})".format(desc) diff --git a/js/src/gdb/mozilla/PropertyKey.py b/js/src/gdb/mozilla/PropertyKey.py index 8cb63ac1b3e14c..5e7dc9c8113a50 100644 --- a/js/src/gdb/mozilla/PropertyKey.py +++ b/js/src/gdb/mozilla/PropertyKey.py @@ -13,7 +13,7 @@ mozilla.prettyprinters.clear_module_printers(__name__) -@pretty_printer('JS::PropertyKey') +@pretty_printer("JS::PropertyKey") class PropertyKey(object): # Since people don't always build with macro debugging info, I can't # think of any way to avoid copying these values here, short of using @@ -32,7 +32,7 @@ def __init__(self, value, cache): self.concrete_type = self.value.type.strip_typedefs() def to_string(self): - bits = self.value['asBits'] + bits = self.value["asBits"] tag = bits & PropertyKey.TYPE_MASK if tag == PropertyKey.TYPE_STRING: body = bits.cast(self.cache.JSString_ptr_t) @@ -41,27 +41,26 @@ def to_string(self): elif tag == PropertyKey.TYPE_VOID: return "JSID_VOID" elif tag == PropertyKey.TYPE_SYMBOL: - body = ((bits & ~PropertyKey.TYPE_MASK) - .cast(self.cache.JSSymbol_ptr_t)) + body = (bits & ~PropertyKey.TYPE_MASK).cast(self.cache.JSSymbol_ptr_t) elif tag == PropertyKey.TYPE_EMPTY: return "JSID_EMPTY" else: body = "" - return '$jsid(%s)' % (body,) + return "$jsid(%s)" % (body,) -@pretty_printer('JS::Rooted') +@pretty_printer("JS::Rooted") def RootedPropertyKey(value, cache): # Hard-code the referent type pretty-printer for PropertyKey roots and # handles. See the comment for mozilla.Root.Common.__init__. return mozilla.Root.Rooted(value, cache, PropertyKey) -@pretty_printer('JS::Handle') +@pretty_printer("JS::Handle") def HandlePropertyKey(value, cache): return mozilla.Root.Handle(value, cache, PropertyKey) -@pretty_printer('JS::MutableHandle') +@pretty_printer("JS::MutableHandle") def MutableHandlePropertyKey(value, cache): return mozilla.Root.MutableHandle(value, cache, PropertyKey) diff --git a/js/src/gdb/mozilla/Root.py b/js/src/gdb/mozilla/Root.py index 1fcd2508bd5cf4..fa80f3fec640ae 100644 --- a/js/src/gdb/mozilla/Root.py +++ b/js/src/gdb/mozilla/Root.py @@ -18,7 +18,7 @@ class Common(object): # there's not much to it. # The name of the template member holding the referent. - member = 'ptr' + member = "ptr" # If True, this is a handle type, and should be dereferenced. If False, # the template member holds the referent directly. @@ -82,21 +82,23 @@ class MutableHandle(Common): @template_pretty_printer("js::BarrieredBase") class BarrieredBase(Common): - member = 'value' + member = "value" def deref(root): # Return the referent of a HeapPtr, Rooted, or Handle. tag = root.type.strip_typedefs().tag if not tag: - raise TypeError("Can't dereference type with no structure tag: %s" % (root.type,)) - elif tag.startswith('js::HeapPtr<'): - return root['value'] - elif tag.startswith('JS::Rooted<'): - return root['ptr'] - elif tag.startswith('JS::Handle<'): - return root['ptr'] - elif tag.startswith('js::GCPtr<'): - return root['value'] + raise TypeError( + "Can't dereference type with no structure tag: %s" % (root.type,) + ) + elif tag.startswith("js::HeapPtr<"): + return root["value"] + elif tag.startswith("JS::Rooted<"): + return root["ptr"] + elif tag.startswith("JS::Handle<"): + return root["ptr"] + elif tag.startswith("js::GCPtr<"): + return root["value"] else: raise NotImplementedError("Unrecognized tag: " + tag) diff --git a/js/src/gdb/mozilla/asmjs.py b/js/src/gdb/mozilla/asmjs.py index c7445df7a19eda..85a19a5cfc7734 100644 --- a/js/src/gdb/mozilla/asmjs.py +++ b/js/src/gdb/mozilla/asmjs.py @@ -16,19 +16,23 @@ def on_stop(event): - if isinstance(event, gdb.SignalEvent) and event.stop_signal == 'SIGSEGV': + if isinstance(event, gdb.SignalEvent) and event.stop_signal == "SIGSEGV": # Allocate memory for sigaction, once per js shell process. process = gdb.selected_inferior() buf = sigaction_buffers.get(process) if buf is None: - buf = gdb.parse_and_eval("(struct sigaction *) malloc(sizeof(struct sigaction))") + buf = gdb.parse_and_eval( + "(struct sigaction *) malloc(sizeof(struct sigaction))" + ) sigaction_buffers[process] = buf # See if WasmFaultHandler is installed as the SIGSEGV signal action. - sigaction_fn = gdb.parse_and_eval('(void(*)(int,void*,void*))__sigaction').dereference() + sigaction_fn = gdb.parse_and_eval( + "(void(*)(int,void*,void*))__sigaction" + ).dereference() sigaction_fn(SIGSEGV, 0, buf) WasmTrapHandler = gdb.parse_and_eval("WasmTrapHandler") - if buf['__sigaction_handler']['sa_handler'] == WasmTrapHandler: + if buf["__sigaction_handler"]["sa_handler"] == WasmTrapHandler: # Advise the user that magic is happening. print("js/src/gdb/mozilla/asmjs.py: Allowing WasmTrapHandler to run.") diff --git a/js/src/gdb/mozilla/jitsrc.py b/js/src/gdb/mozilla/jitsrc.py index 6e629e7bf0f23a..ee206878b350d9 100644 --- a/js/src/gdb/mozilla/jitsrc.py +++ b/js/src/gdb/mozilla/jitsrc.py @@ -25,21 +25,53 @@ # # If an invocation of `jitsrc` stops in the middle of a memcpy, the solution # is normally to add a new pattern here. -patterns = [("__memmove_avx_unaligned_erms", 1, - "js::jit::X86Encoding::BaseAssembler::executableCopy", "src", "dst"), - ("__memcpy_avx_unaligned", 1, "js::jit::X86Encoding::BaseAssembler::executableCopy", - "src", "dst"), - ("__memmove_avx_unaligned_erms", 1, "arena_t::RallocSmallOrLarge", "aPtr", "ret"), - ("__memcpy_avx_unaligned", 1, "arena_t::RallocSmallOrLarge", "aPtr", "ret"), - ("mozilla::detail::VectorImpl<.*>::new_<.*>", 3, - "mozilla::Vector<.*>::convertToHeapStorage", "beginNoCheck()", "newBuf"), - ("__memmove_avx_unaligned_erms", 1, "js::jit::AssemblerBufferWithConstantPools", - "&cur->instructions[0]", "dest"), - ("__memcpy_sse2_unaligned", 1, "js::jit::AssemblerBufferWithConstantPools", - "&cur->instructions[0]", "dest"), - ("__memcpy_sse2_unaligned", 2, "js::jit::AssemblerX86Shared::executableCopy", - "masm.m_formatter.m_buffer.m_buffer.mBegin", "buffer"), - ("__memcpy_sse2_unaligned", 1, "arena_t::RallocSmallOrLarge", "aPtr", "ret")] +patterns = [ + ( + "__memmove_avx_unaligned_erms", + 1, + "js::jit::X86Encoding::BaseAssembler::executableCopy", + "src", + "dst", + ), + ( + "__memcpy_avx_unaligned", + 1, + "js::jit::X86Encoding::BaseAssembler::executableCopy", + "src", + "dst", + ), + ("__memmove_avx_unaligned_erms", 1, "arena_t::RallocSmallOrLarge", "aPtr", "ret"), + ("__memcpy_avx_unaligned", 1, "arena_t::RallocSmallOrLarge", "aPtr", "ret"), + ( + "mozilla::detail::VectorImpl<.*>::new_<.*>", + 3, + "mozilla::Vector<.*>::convertToHeapStorage", + "beginNoCheck()", + "newBuf", + ), + ( + "__memmove_avx_unaligned_erms", + 1, + "js::jit::AssemblerBufferWithConstantPools", + "&cur->instructions[0]", + "dest", + ), + ( + "__memcpy_sse2_unaligned", + 1, + "js::jit::AssemblerBufferWithConstantPools", + "&cur->instructions[0]", + "dest", + ), + ( + "__memcpy_sse2_unaligned", + 2, + "js::jit::AssemblerX86Shared::executableCopy", + "masm.m_formatter.m_buffer.m_buffer.mBegin", + "buffer", + ), + ("__memcpy_sse2_unaligned", 1, "arena_t::RallocSmallOrLarge", "aPtr", "ret"), +] class JitSource(gdb.Command): @@ -77,12 +109,11 @@ def next_address(self, old): return None def runback(self, address): - b = gdb.Breakpoint("*" + address, - type=gdb.BP_WATCHPOINT, - wp_class=gdb.WP_WRITE, - internal=True) + b = gdb.Breakpoint( + "*" + address, type=gdb.BP_WATCHPOINT, wp_class=gdb.WP_WRITE, internal=True + ) while b.hit_count == 0: - gdb.execute('rc', to_string=True) + gdb.execute("rc", to_string=True) b.delete() def invoke(self, arg, from_tty): diff --git a/js/src/gdb/mozilla/jsop.py b/js/src/gdb/mozilla/jsop.py index e87d71cac50820..ae21aad5f638b8 100644 --- a/js/src/gdb/mozilla/jsop.py +++ b/js/src/gdb/mozilla/jsop.py @@ -16,7 +16,7 @@ class JSOpTypeCache(object): # Cache information about the JSOp type for this objfile. def __init__(self, cache): - self.tJSOp = gdb.lookup_type('JSOp') + self.tJSOp = gdb.lookup_type("JSOp") @classmethod def get_or_create(cls, cache): @@ -25,7 +25,7 @@ def get_or_create(cls, cache): return cache.mod_JSOp -@pretty_printer('JSOp') +@pretty_printer("JSOp") class JSOp(object): def __init__(self, value, cache): self.value = value @@ -45,7 +45,7 @@ def to_string(self): return "(JSOp) {:d}".format(idx) -@ptr_pretty_printer('jsbytecode') +@ptr_pretty_printer("jsbytecode") class JSBytecodePtr(mozilla.prettyprinters.Pointer): def __init__(self, value, cache): super(JSBytecodePtr, self).__init__(value, cache) @@ -55,5 +55,5 @@ def to_string(self): try: opcode = str(self.value.dereference().cast(self.jotc.tJSOp)) except Exception: - opcode = 'bad pc' - return '{} ({})'.format(self.value.cast(self.cache.void_ptr_t), opcode) + opcode = "bad pc" + return "{} ({})".format(self.value.cast(self.cache.void_ptr_t), opcode) diff --git a/js/src/gdb/mozilla/jsval.py b/js/src/gdb/mozilla/jsval.py index 64a31cf5d93ce7..022e59b3631dbe 100644 --- a/js/src/gdb/mozilla/jsval.py +++ b/js/src/gdb/mozilla/jsval.py @@ -84,17 +84,20 @@ def __init__(self, asBits, jtc): self.asBits = self.asBits + (1 << 64) # Return this value's type tag. - def tag(self): raise NotImplementedError + def tag(self): + raise NotImplementedError # Return this value as a 32-bit integer, double, or address. - def as_uint32(self): raise NotImplementedError + def as_uint32(self): + raise NotImplementedError def as_double(self): packed = struct.pack("q", self.asBits) (unpacked,) = struct.unpack("d", packed) return unpacked - def as_address(self): raise NotImplementedError + def as_address(self): + raise NotImplementedError class Punbox(Box): @@ -106,8 +109,8 @@ class Punbox(Box): TAG_SHIFT = 47 PAYLOAD_MASK = (1 << TAG_SHIFT) - 1 TAG_MASK = (1 << (FULL_WIDTH - TAG_SHIFT)) - 1 - TAG_MAX_DOUBLE = 0x1fff0 - TAG_TYPE_MASK = 0x0000f + TAG_MAX_DOUBLE = 0x1FFF0 + TAG_TYPE_MASK = 0x0000F def tag(self): tag = self.asBits >> Punbox.TAG_SHIFT @@ -116,16 +119,18 @@ def tag(self): else: return tag & Punbox.TAG_TYPE_MASK - def as_uint32(self): return int(self.asBits & ((1 << 32) - 1)) + def as_uint32(self): + return int(self.asBits & ((1 << 32) - 1)) - def as_address(self): return gdb.Value(self.asBits & Punbox.PAYLOAD_MASK) + def as_address(self): + return gdb.Value(self.asBits & Punbox.PAYLOAD_MASK) class Nunbox(Box): TAG_SHIFT = 32 - TAG_CLEAR = 0xffff0000 - PAYLOAD_MASK = 0xffffffff - TAG_TYPE_MASK = 0x0000000f + TAG_CLEAR = 0xFFFF0000 + PAYLOAD_MASK = 0xFFFFFFFF + TAG_TYPE_MASK = 0x0000000F def tag(self): tag = self.asBits >> Nunbox.TAG_SHIFT @@ -133,9 +138,11 @@ def tag(self): return self.jtc.DOUBLE return tag & Nunbox.TAG_TYPE_MASK - def as_uint32(self): return int(self.asBits & Nunbox.PAYLOAD_MASK) + def as_uint32(self): + return int(self.asBits & Nunbox.PAYLOAD_MASK) - def as_address(self): return gdb.Value(self.asBits & Nunbox.PAYLOAD_MASK) + def as_address(self): + return gdb.Value(self.asBits & Nunbox.PAYLOAD_MASK) class JSValueTypeCache(object): @@ -143,7 +150,7 @@ class JSValueTypeCache(object): def __init__(self, cache): # Capture the tag values. - d = gdb.types.make_enum_dict(gdb.lookup_type('JSValueType')) + d = gdb.types.make_enum_dict(gdb.lookup_type("JSValueType")) # The enum keys are prefixed when building with some compilers (clang at # a minimum), so use a helper function to handle either key format. @@ -151,22 +158,22 @@ def get(key): val = d.get(key) if val is not None: return val - return d['JSValueType::' + key] - - self.DOUBLE = get('JSVAL_TYPE_DOUBLE') - self.INT32 = get('JSVAL_TYPE_INT32') - self.UNDEFINED = get('JSVAL_TYPE_UNDEFINED') - self.BOOLEAN = get('JSVAL_TYPE_BOOLEAN') - self.MAGIC = get('JSVAL_TYPE_MAGIC') - self.STRING = get('JSVAL_TYPE_STRING') - self.SYMBOL = get('JSVAL_TYPE_SYMBOL') - self.BIGINT = get('JSVAL_TYPE_BIGINT') - self.NULL = get('JSVAL_TYPE_NULL') - self.OBJECT = get('JSVAL_TYPE_OBJECT') + return d["JSValueType::" + key] + + self.DOUBLE = get("JSVAL_TYPE_DOUBLE") + self.INT32 = get("JSVAL_TYPE_INT32") + self.UNDEFINED = get("JSVAL_TYPE_UNDEFINED") + self.BOOLEAN = get("JSVAL_TYPE_BOOLEAN") + self.MAGIC = get("JSVAL_TYPE_MAGIC") + self.STRING = get("JSVAL_TYPE_STRING") + self.SYMBOL = get("JSVAL_TYPE_SYMBOL") + self.BIGINT = get("JSVAL_TYPE_BIGINT") + self.NULL = get("JSVAL_TYPE_NULL") + self.OBJECT = get("JSVAL_TYPE_OBJECT") # Let self.magic_names be an array whose i'th element is the name of # the i'th magic value. - d = gdb.types.make_enum_dict(gdb.lookup_type('JSWhyMagic')) + d = gdb.types.make_enum_dict(gdb.lookup_type("JSWhyMagic")) self.magic_names = list(range(max(d.values()) + 1)) for (k, v) in d.items(): self.magic_names[v] = k @@ -175,7 +182,7 @@ def get(key): self.boxer = Punbox if cache.void_ptr_t.sizeof == 8 else Nunbox -@pretty_printer('JS::Value') +@pretty_printer("JS::Value") class JSValue(object): def __init__(self, value, cache): # Save the generic typecache, and create our own, if we haven't already. @@ -185,32 +192,32 @@ def __init__(self, value, cache): self.jtc = cache.mod_JS_Value self.value = value - self.box = self.jtc.boxer(value['asBits_'], self.jtc) + self.box = self.jtc.boxer(value["asBits_"], self.jtc) def to_string(self): tag = self.box.tag() if tag == self.jtc.UNDEFINED: - return '$JS::UndefinedValue()' + return "$JS::UndefinedValue()" if tag == self.jtc.NULL: - return '$JS::NullValue()' + return "$JS::NullValue()" if tag == self.jtc.BOOLEAN: - return '$JS::BooleanValue(%s)' % str(self.box.as_uint32() != 0).lower() + return "$JS::BooleanValue(%s)" % str(self.box.as_uint32() != 0).lower() if tag == self.jtc.MAGIC: value = self.box.as_uint32() if 0 <= value and value < len(self.jtc.magic_names): - return '$JS::MagicValue(%s)' % (self.jtc.magic_names[value],) + return "$JS::MagicValue(%s)" % (self.jtc.magic_names[value],) else: - return '$JS::MagicValue(%d)' % (value,) + return "$JS::MagicValue(%d)" % (value,) if tag == self.jtc.INT32: value = self.box.as_uint32() signbit = 1 << 31 value = (value ^ signbit) - signbit - return '$JS::Int32Value(%s)' % value + return "$JS::Int32Value(%s)" % value if tag == self.jtc.DOUBLE: - return '$JS::DoubleValue(%s)' % self.box.as_double() + return "$JS::DoubleValue(%s)" % self.box.as_double() if tag == self.jtc.STRING: value = self.box.as_address().cast(self.cache.JSString_ptr_t) @@ -219,7 +226,7 @@ def to_string(self): elif tag == self.jtc.SYMBOL: value = self.box.as_address().cast(self.cache.JSSymbol_ptr_t) elif tag == self.jtc.BIGINT: - return '$JS::BigIntValue()' + return "$JS::BigIntValue()" else: - value = 'unrecognized!' - return '$JS::Value(%s)' % (value,) + value = "unrecognized!" + return "$JS::Value(%s)" % (value,) diff --git a/js/src/gdb/mozilla/prettyprinters.py b/js/src/gdb/mozilla/prettyprinters.py index c7754e13fbcb0d..2296068936c623 100644 --- a/js/src/gdb/mozilla/prettyprinters.py +++ b/js/src/gdb/mozilla/prettyprinters.py @@ -20,7 +20,7 @@ def check_for_reused_pretty_printer(fn): # 'enable/disable/info pretty-printer' commands are simply stored as # properties of the function objects themselves, so a single function # object can't carry the 'enabled' flags for two different printers.) - if hasattr(fn, 'enabled'): + if hasattr(fn, "enabled"): raise RuntimeError("pretty-printer function %r registered more than once" % fn) @@ -37,6 +37,7 @@ def add(fn): add_to_subprinter_list(fn, type_name) printers_by_tag[type_name] = fn return fn + return add @@ -54,6 +55,7 @@ def add(fn): add_to_subprinter_list(fn, "ptr-to-" + type_name) ptr_printers_by_tag[type_name] = fn return fn + return add @@ -71,6 +73,7 @@ def add(fn): add_to_subprinter_list(fn, "ref-to-" + type_name) ref_printers_by_tag[type_name] = fn return fn + return add @@ -85,9 +88,10 @@ def add(fn): def template_pretty_printer(template_name): def add(fn): check_for_reused_pretty_printer(fn) - add_to_subprinter_list(fn, 'instantiations-of-' + template_name) + add_to_subprinter_list(fn, "instantiations-of-" + template_name) template_printers_by_tag[template_name] = fn return fn + return add @@ -110,8 +114,10 @@ def add(fn): add_to_subprinter_list(fn, name) printers_by_regexp.append((compiled, fn)) return fn + return add + # Forget all pretty-printer lookup functions defined in the module name # |module_name|, if any exist. Use this at the top of each pretty-printer # module like this: @@ -166,18 +172,21 @@ def add_to_subprinter_list(subprinter, name): subprinter.enabled = True subprinters.append(subprinter) + # Remove |subprinter| from our list of all SpiderMonkey subprinters. def remove_from_subprinter_list(subprinter): subprinters.remove(subprinter) + # An exception class meaning, "This objfile has no SpiderMonkey in it." class NotSpiderMonkeyObjfileError(TypeError): pass + # TypeCache: a cache for frequently used information about an objfile. # # When a new SpiderMonkey objfile is loaded, we construct an instance of @@ -202,13 +211,13 @@ def __init__(self, objfile): # the objfile in whose scope lookups should occur. But simply # knowing that we need to lookup the types afresh is probably # enough. - self.void_t = gdb.lookup_type('void') + self.void_t = gdb.lookup_type("void") self.void_ptr_t = self.void_t.pointer() - self.uintptr_t = gdb.lookup_type('uintptr_t') + self.uintptr_t = gdb.lookup_type("uintptr_t") try: - self.JSString_ptr_t = gdb.lookup_type('JSString').pointer() - self.JSSymbol_ptr_t = gdb.lookup_type('JS::Symbol').pointer() - self.JSObject_ptr_t = gdb.lookup_type('JSObject').pointer() + self.JSString_ptr_t = gdb.lookup_type("JSString").pointer() + self.JSSymbol_ptr_t = gdb.lookup_type("JS::Symbol").pointer() + self.JSObject_ptr_t = gdb.lookup_type("JSObject").pointer() except gdb.error: raise NotSpiderMonkeyObjfileError @@ -221,6 +230,7 @@ def __init__(self, objfile): self.mod_ExecutableAllocator = None self.mod_IonGraph = None + # Yield a series of all the types that |t| implements, by following typedefs # and iterating over base classes. Specifically: # - |t| itself is the first value yielded. @@ -271,6 +281,7 @@ def is_struct_or_union(t): def is_struct_or_union_or_enum(t): return t.code in (gdb.TYPE_CODE_STRUCT, gdb.TYPE_CODE_UNION, gdb.TYPE_CODE_ENUM) + # Construct and return a pretty-printer lookup function for objfile, or # return None if the objfile doesn't contain SpiderMonkey code # (specifically, definitions for SpiderMonkey types). @@ -282,8 +293,10 @@ def lookup_for_objfile(objfile): cache = TypeCache(objfile) except NotSpiderMonkeyObjfileError: if gdb.parameter("verbose"): - gdb.write("objfile '%s' has no SpiderMonkey code; not registering pretty-printers\n" - % (objfile.filename,)) + gdb.write( + "objfile '%s' has no SpiderMonkey code; not registering pretty-printers\n" + % (objfile.filename,) + ) return None # Return a pretty-printer for |value|, if we have one. This is the lookup @@ -351,6 +364,7 @@ def check_table_by_type_name(table, t): return lookup + # A base class for pretty-printers for pointer values that handles null # pointers, by declining to construct a pretty-printer for them at all. # Derived classes may simply assume that self.value is non-null. @@ -385,19 +399,19 @@ def __init__(self, value, cache): def to_string(self): # See comment above. - assert not hasattr(self, 'display_hint') or self.display_hint() != 'string' + assert not hasattr(self, "display_hint") or self.display_hint() != "string" concrete_type = self.value.type.strip_typedefs() if concrete_type.code == gdb.TYPE_CODE_PTR: address = self.value.cast(self.cache.void_ptr_t) elif concrete_type.code == gdb.TYPE_CODE_REF: - address = '@' + str(self.value.address.cast(self.cache.void_ptr_t)) + address = "@" + str(self.value.address.cast(self.cache.void_ptr_t)) else: assert not "mozilla.prettyprinters.Pointer applied to bad value type" try: summary = self.summary() except gdb.MemoryError as r: summary = str(r) - v = '(%s) %s %s' % (self.value.type, address, summary) + v = "(%s) %s %s" % (self.value.type, address, summary) return v def summary(self): @@ -419,8 +433,14 @@ def enum_value(t, name): f = t[name] # Monkey-patching is a-okay in polyfills! Just because. if not field_enum_value: - if hasattr(f, 'enumval'): - def field_enum_value(f): return f.enumval + if hasattr(f, "enumval"): + + def field_enum_value(f): + return f.enumval + else: - def field_enum_value(f): return f.bitpos + + def field_enum_value(f): + return f.bitpos + return field_enum_value(f) diff --git a/js/src/gdb/mozilla/unwind.py b/js/src/gdb/mozilla/unwind.py index cb546d62d2cb62..8dd0ec83118bd1 100644 --- a/js/src/gdb/mozilla/unwind.py +++ b/js/src/gdb/mozilla/unwind.py @@ -40,18 +40,18 @@ def debug(something): # Maps frametype enum base names to corresponding class. SizeOfFramePrefix = { - 'FrameType::IonJS': 'ExitFrameLayout', - 'FrameType::BaselineJS': 'JitFrameLayout', - 'FrameType::BaselineStub': 'BaselineStubFrameLayout', - 'FrameType::IonStub': 'JitStubFrameLayout', - 'FrameType::CppToJSJit': 'JitFrameLayout', - 'FrameType::WasmToJSJit': 'JitFrameLayout', - 'FrameType::JSJitToWasm': 'JitFrameLayout', - 'FrameType::Rectifier': 'RectifierFrameLayout', - 'FrameType::IonAccessorIC': 'IonAccessorICFrameLayout', - 'FrameType::IonICCall': 'IonICCallFrameLayout', - 'FrameType::Exit': 'ExitFrameLayout', - 'FrameType::Bailout': 'JitFrameLayout', + "FrameType::IonJS": "ExitFrameLayout", + "FrameType::BaselineJS": "JitFrameLayout", + "FrameType::BaselineStub": "BaselineStubFrameLayout", + "FrameType::IonStub": "JitStubFrameLayout", + "FrameType::CppToJSJit": "JitFrameLayout", + "FrameType::WasmToJSJit": "JitFrameLayout", + "FrameType::JSJitToWasm": "JitFrameLayout", + "FrameType::Rectifier": "RectifierFrameLayout", + "FrameType::IonAccessorIC": "IonAccessorICFrameLayout", + "FrameType::IonICCall": "IonICCallFrameLayout", + "FrameType::Exit": "ExitFrameLayout", + "FrameType::Bailout": "JitFrameLayout", } @@ -88,54 +88,55 @@ def value(self, name): return long(gdb.lookup_symbol(name)[0].value()) def jit_value(self, name): - return self.value('js::jit::' + name) + return self.value("js::jit::" + name) def initialize(self): self.d = {} - self.d['FRAMETYPE_MASK'] = (1 << self.jit_value('FRAMETYPE_BITS')) - 1 - self.d['FRAMESIZE_SHIFT'] = self.jit_value('FRAMESIZE_SHIFT') - self.d['FRAME_HEADER_SIZE_SHIFT'] = self.jit_value('FRAME_HEADER_SIZE_SHIFT') - self.d['FRAME_HEADER_SIZE_MASK'] = self.jit_value('FRAME_HEADER_SIZE_MASK') + self.d["FRAMETYPE_MASK"] = (1 << self.jit_value("FRAMETYPE_BITS")) - 1 + self.d["FRAMESIZE_SHIFT"] = self.jit_value("FRAMESIZE_SHIFT") + self.d["FRAME_HEADER_SIZE_SHIFT"] = self.jit_value("FRAME_HEADER_SIZE_SHIFT") + self.d["FRAME_HEADER_SIZE_MASK"] = self.jit_value("FRAME_HEADER_SIZE_MASK") self.compute_frame_info() - commonFrameLayout = gdb.lookup_type('js::jit::CommonFrameLayout') - self.d['typeCommonFrameLayout'] = commonFrameLayout - self.d['typeCommonFrameLayoutPointer'] = commonFrameLayout.pointer() - self.d['per_tls_context'] = gdb.lookup_global_symbol('js::TlsContext') + commonFrameLayout = gdb.lookup_type("js::jit::CommonFrameLayout") + self.d["typeCommonFrameLayout"] = commonFrameLayout + self.d["typeCommonFrameLayoutPointer"] = commonFrameLayout.pointer() + self.d["per_tls_context"] = gdb.lookup_global_symbol("js::TlsContext") - self.d['void_starstar'] = gdb.lookup_type('void').pointer().pointer() + self.d["void_starstar"] = gdb.lookup_type("void").pointer().pointer() jitframe = gdb.lookup_type("js::jit::JitFrameLayout") - self.d['jitFrameLayoutPointer'] = jitframe.pointer() - - self.d['CalleeToken_Function'] = self.jit_value("CalleeToken_Function") - self.d['CalleeToken_FunctionConstructing'] = self.jit_value( - "CalleeToken_FunctionConstructing") - self.d['CalleeToken_Script'] = self.jit_value("CalleeToken_Script") - self.d['JSFunction'] = gdb.lookup_type("JSFunction").pointer() - self.d['JSScript'] = gdb.lookup_type("JSScript").pointer() - self.d['Value'] = gdb.lookup_type("JS::Value") - - self.d['SOURCE_SLOT'] = self.value('js::ScriptSourceObject::SOURCE_SLOT') - self.d['NativeObject'] = gdb.lookup_type("js::NativeObject").pointer() - self.d['HeapSlot'] = gdb.lookup_type("js::HeapSlot").pointer() - self.d['ScriptSource'] = gdb.lookup_type("js::ScriptSource").pointer() + self.d["jitFrameLayoutPointer"] = jitframe.pointer() + + self.d["CalleeToken_Function"] = self.jit_value("CalleeToken_Function") + self.d["CalleeToken_FunctionConstructing"] = self.jit_value( + "CalleeToken_FunctionConstructing" + ) + self.d["CalleeToken_Script"] = self.jit_value("CalleeToken_Script") + self.d["JSFunction"] = gdb.lookup_type("JSFunction").pointer() + self.d["JSScript"] = gdb.lookup_type("JSScript").pointer() + self.d["Value"] = gdb.lookup_type("JS::Value") + + self.d["SOURCE_SLOT"] = self.value("js::ScriptSourceObject::SOURCE_SLOT") + self.d["NativeObject"] = gdb.lookup_type("js::NativeObject").pointer() + self.d["HeapSlot"] = gdb.lookup_type("js::HeapSlot").pointer() + self.d["ScriptSource"] = gdb.lookup_type("js::ScriptSource").pointer() # ProcessExecutableMemory, used to identify if a pc is in the section # pre-allocated by the JIT. - self.d['MaxCodeBytesPerProcess'] = self.jit_value('MaxCodeBytesPerProcess') - self.d['execMemory'] = gdb.lookup_symbol('::execMemory')[0].value() + self.d["MaxCodeBytesPerProcess"] = self.jit_value("MaxCodeBytesPerProcess") + self.d["execMemory"] = gdb.lookup_symbol("::execMemory")[0].value() # Compute maps related to jit frames. def compute_frame_info(self): - t = gdb.lookup_type('enum js::jit::FrameType') + t = gdb.lookup_type("enum js::jit::FrameType") for field in t.fields(): # Strip off "js::jit::", remains: "FrameType::*". name = field.name[9:] enumval = long(field.enumval) self.d[name] = enumval self.frame_enum_names[enumval] = name - class_type = gdb.lookup_type('js::jit::' + SizeOfFramePrefix[name]) + class_type = gdb.lookup_type("js::jit::" + SizeOfFramePrefix[name]) self.frame_class_types[enumval] = class_type.pointer() @@ -164,21 +165,23 @@ def __init__(self, base, info, cache): self.cache = cache def _decode_jitframe(self, this_frame): - calleetoken = long(this_frame['calleeToken_']) + calleetoken = long(this_frame["calleeToken_"]) tag = calleetoken & 3 calleetoken = calleetoken ^ tag function = None script = None - if (tag == self.cache.CalleeToken_Function or - tag == self.cache.CalleeToken_FunctionConstructing): + if ( + tag == self.cache.CalleeToken_Function + or tag == self.cache.CalleeToken_FunctionConstructing + ): fptr = gdb.Value(calleetoken).cast(self.cache.JSFunction) try: - atom = fptr['atom_'] + atom = fptr["atom_"] if atom: function = str(atom) except gdb.MemoryError: function = "(could not read function name)" - script = fptr['u']['scripted']['s']['script_'] + script = fptr["u"]["scripted"]["s"]["script_"] elif tag == self.cache.CalleeToken_Script: script = gdb.Value(calleetoken).cast(self.cache.JSScript) return {"function": function, "script": script} @@ -204,21 +207,23 @@ def filename(self): if gdb.types.has_field(this_frame.type.target(), "calleeToken_"): script = self._decode_jitframe(this_frame)["script"] if script is not None: - obj = script['sourceObject_']['value'] + obj = script["sourceObject_"]["value"] # Verify that this is a ScriptSource object. # FIXME should also deal with wrappers here. nativeobj = obj.cast(self.cache.NativeObject) # See bug 987069 and despair. At least this # approach won't give exceptions. - class_name = nativeobj['group_']['value']['clasp_']['name'].string( - "ISO-8859-1") + class_name = nativeobj["group_"]["value"]["clasp_"]["name"].string( + "ISO-8859-1" + ) if class_name != "ScriptSource": return FrameDecorator.filename(self) - scriptsourceobj = ( - nativeobj + 1).cast(self.cache.HeapSlot)[self.cache.SOURCE_SLOT] - scriptsource = scriptsourceobj['value']['asBits_'] << 1 + scriptsourceobj = (nativeobj + 1).cast(self.cache.HeapSlot)[ + self.cache.SOURCE_SLOT + ] + scriptsource = scriptsourceobj["value"]["asBits_"] << 1 scriptsource = scriptsource.cast(self.cache.ScriptSource) - return scriptsource['filename_']['mTuple']['mFirstA'].string() + return scriptsource["filename_"]["mTuple"]["mFirstA"].string() return FrameDecorator.filename(self) def frame_args(self): @@ -242,9 +247,9 @@ def frame_args(self): # Synthesize names, since there doesn't seem to be # anything better to do. if i == 0: - name = 'this' + name = "this" else: - name = 'arg%d' % i + name = "arg%d" % i result.append(FrameSymbol(name, args_ptr[i])) return result @@ -329,7 +334,7 @@ def add_frame(self, sp, name=None, this_frame=None): # See whether |pc| is claimed by the Jit. def is_jit_address(self, pc): execMem = self.typecache.execMemory - base = long(execMem['base_']) + base = long(execMem["base_"]) length = self.typecache.MaxCodeBytesPerProcess # If the base pointer is null, then no memory got allocated yet. @@ -345,7 +350,7 @@ def check(self): # Essentially js::TlsContext.get(). def get_tls_context(self): - return self.typecache.per_tls_context.value()['mValue'] + return self.typecache.per_tls_context.value()["mValue"] # |common| is a pointer to a CommonFrameLayout object. Return a # tuple (local_size, header_size, frame_type), where |size| is the @@ -353,10 +358,11 @@ def get_tls_context(self): # the size of this frame's header; and |frame_type| is an integer # representing the previous frame's type. def unpack_descriptor(self, common): - value = long(common['descriptor_']) + value = long(common["descriptor_"]) local_size = value >> self.typecache.FRAMESIZE_SHIFT - header_size = ((value >> self.typecache.FRAME_HEADER_SIZE_SHIFT) & - self.typecache.FRAME_HEADER_SIZE_MASK) + header_size = ( + value >> self.typecache.FRAME_HEADER_SIZE_SHIFT + ) & self.typecache.FRAME_HEADER_SIZE_MASK header_size = header_size * self.typecache.void_starstar.sizeof frame_type = long(value & self.typecache.FRAMETYPE_MASK) if frame_type == self.typecache.frame_type.CppToJSJit: @@ -386,7 +392,7 @@ def create_frame(self, pc, sp, frame, frame_type, pending_frame): # This lets us unwind the necessary registers for the next # frame, and also update our internal state to match. common = frame.cast(self.typecache.typeCommonFrameLayoutPointer) - next_pc = common['returnAddress_'] + next_pc = common["returnAddress_"] (local_size, header_size, next_type) = self.unpack_descriptor(common) next_sp = frame + header_size + local_size @@ -415,8 +421,9 @@ def create_frame(self, pc, sp, frame, frame_type, pending_frame): # other than enter and exit frames. Returns the newly-created # unwind info for gdb. def unwind_ordinary(self, pc, pending_frame): - return self.create_frame(pc, self.next_sp, self.next_sp, - self.next_type, pending_frame) + return self.create_frame( + pc, self.next_sp, self.next_sp, self.next_type, pending_frame + ) # Unwind an exit frame. Returns None if this cannot be done; # otherwise returns the newly-created unwind info for gdb. @@ -426,11 +433,11 @@ def unwind_exit_frame(self, pc, pending_frame): return None elif self.activation is None: cx = self.get_tls_context() - self.activation = cx['jitActivation']['value'] + self.activation = cx["jitActivation"]["value"] else: - self.activation = self.activation['prevJitActivation_'] + self.activation = self.activation["prevJitActivation_"] - packedExitFP = self.activation['packedExitFP_'] + packedExitFP = self.activation["packedExitFP_"] if packedExitFP == 0: return None @@ -443,7 +450,7 @@ def unwind_exit_frame(self, pc, pending_frame): def unwind_entry_frame(self, pc, pending_frame): sp = self.next_sp # Notify the frame filter. - self.add_frame(sp, name='FrameType::CppToJSJit') + self.add_frame(sp, name="FrameType::CppToJSJit") # Make an unwind_info for the per-architecture code to fill in. frame_id = SpiderMonkeyFrameId(sp, pc) unwind_info = pending_frame.create_unwind_info(frame_id) @@ -476,13 +483,13 @@ def unwind(self, pending_frame): class x64UnwinderState(UnwinderState): "The UnwinderState subclass for x86-64." - SP_REGISTER = 'rsp' - PC_REGISTER = 'rip' + SP_REGISTER = "rsp" + PC_REGISTER = "rip" # A register unique to this architecture, that is also likely to # have been saved in any frame. The best thing to use here is # some arch-specific name for PC or SP. - SENTINEL_REGISTER = 'rip' + SENTINEL_REGISTER = "rip" # Must be in sync with Trampoline-x64.cpp:generateEnterJIT. Note # that rip isn't pushed there explicitly, but rather by the @@ -517,8 +524,10 @@ def __init__(self, typecache): # Disabled by default until we figure out issues in gdb. self.enabled = False - gdb.write("SpiderMonkey unwinder is disabled by default, to enable it type:\n" + - "\tenable unwinder .* SpiderMonkey\n") + gdb.write( + "SpiderMonkey unwinder is disabled by default, to enable it type:\n" + + "\tenable unwinder .* SpiderMonkey\n" + ) # Some versions of gdb did not flush the internal frame cache # when enabling or disabling an unwinder. This was fixed in # the same release of gdb that added the breakpoint_created diff --git a/js/src/gdb/progressbar.py b/js/src/gdb/progressbar.py index 7962ab4a54bd74..39c3628b18585a 100644 --- a/js/src/gdb/progressbar.py +++ b/js/src/gdb/progressbar.py @@ -19,33 +19,35 @@ def __init__(self, label, limit, label_width=12): self.fullwidth = None self.barlen = 64 - self.label_width - self.fmt = '\r%-' + str(label_width) + 's %3d%% %-' + str(self.barlen) + 's| %6.1fs' + self.fmt = ( + "\r%-" + str(label_width) + "s %3d%% %-" + str(self.barlen) + "s| %6.1fs" + ) def update(self, value): self.cur = value pct = int(100.0 * self.cur / self.limit) barlen = int(1.0 * self.barlen * self.cur / self.limit) - 1 - bar = '='*barlen + '>' + bar = "=" * barlen + ">" dt = datetime.datetime.now() - self.t0 dt = dt.seconds + dt.microseconds * 1e-6 - line = self.fmt % (self.label[:self.label_width], pct, bar, dt) + line = self.fmt % (self.label[: self.label_width], pct, bar, dt) self.fullwidth = len(line) sys.stdout.write(line) sys.stdout.flush() # Clear the current bar and leave the cursor at the start of the line. def clear(self): - if (self.fullwidth): - sys.stdout.write('\r' + ' ' * self.fullwidth + '\r') + if self.fullwidth: + sys.stdout.write("\r" + " " * self.fullwidth + "\r") self.fullwidth = None def finish(self): self.update(self.limit) - sys.stdout.write('\n') + sys.stdout.write("\n") -if __name__ == '__main__': - pb = ProgressBar('test', 12) +if __name__ == "__main__": + pb = ProgressBar("test", 12) for i in range(12): pb.update(i) time.sleep(0.5) diff --git a/js/src/gdb/run-tests.py b/js/src/gdb/run-tests.py index 6a0c0609e95c76..da8d38cfc182fc 100644 --- a/js/src/gdb/run-tests.py +++ b/js/src/gdb/run-tests.py @@ -32,7 +32,7 @@ def _relpath(path, start=None): # Work out how much of the filepath is shared by start and path. i = len(os.path.commonprefix([start_list, path_list])) - rel_list = [os.pardir] * (len(start_list)-i) + path_list[i:] + rel_list = [os.pardir] * (len(start_list) - i) + path_list[i:] if not rel_list: return os.curdir return os.path.join(*rel_list) @@ -41,7 +41,7 @@ def _relpath(path, start=None): os.path.relpath = _relpath # Characters that need to be escaped when used in shell words. -shell_need_escapes = re.compile('[^\w\d%+,-./:=@\'"]', re.DOTALL) +shell_need_escapes = re.compile("[^\w\d%+,-./:=@'\"]", re.DOTALL) # Characters that need to be escaped within double-quoted strings. shell_dquote_escapes = re.compile('[^\w\d%+,-./:=@"]', re.DOTALL) @@ -51,31 +51,30 @@ def quote(s): if shell_need_escapes.search(s): if s.find("'") < 0: return "'" + s + "'" - return '"' + shell_dquote_escapes.sub('\\g<0>', s) + '"' + return '"' + shell_dquote_escapes.sub("\\g<0>", s) + '"' return s - return ' '.join([quote(_) for _ in l]) + return " ".join([quote(_) for _ in l]) # An instance of this class collects the lists of passing, failing, and # timing-out tests, runs the progress bar, and prints a summary at the end. class Summary(object): - class SummaryBar(progressbar.ProgressBar): def __init__(self, limit): - super(Summary.SummaryBar, self).__init__('', limit, 24) + super(Summary.SummaryBar, self).__init__("", limit, 24) def start(self): - self.label = '[starting ]' + self.label = "[starting ]" self.update(0) def counts(self, run, failures, timeouts): - self.label = '[%4d|%4d|%4d|%4d]' % (run - failures, failures, timeouts, run) + self.label = "[%4d|%4d|%4d|%4d]" % (run - failures, failures, timeouts, run) self.update(run) def __init__(self, num_tests): self.run = 0 - self.failures = [] # kind of judgemental; "unexpecteds"? + self.failures = [] # kind of judgemental; "unexpecteds"? self.timeouts = [] if not OPTIONS.hide_progress: self.bar = Summary.SummaryBar(num_tests) @@ -88,6 +87,7 @@ def start(self): def update(self): if not OPTIONS.hide_progress: self.bar.counts(self.run, len(self.failures), len(self.timeouts)) + # Call 'thunk' to show some output, while getting the progress bar out of the way. def interleave_output(self, thunk): @@ -124,10 +124,11 @@ def finish(self): try: with open(OPTIONS.worklist) as out: for test in self.failures: - out.write(test.name + '\n') + out.write(test.name + "\n") except IOError as err: - sys.stderr.write("Error writing worklist file '%s': %s" - % (OPTIONS.worklist, err)) + sys.stderr.write( + "Error writing worklist file '%s': %s" % (OPTIONS.worklist, err) + ) sys.exit(1) if OPTIONS.write_failures: @@ -136,8 +137,10 @@ def finish(self): for test in self.failures: test.show(out) except IOError as err: - sys.stderr.write("Error writing worklist file '%s': %s" - % (OPTIONS.write_failures, err)) + sys.stderr.write( + "Error writing worklist file '%s': %s" + % (OPTIONS.write_failures, err) + ) sys.exit(1) if self.timeouts: @@ -152,7 +155,7 @@ def finish(self): class Test(TaskPool.Task): def __init__(self, path, summary): super(Test, self).__init__() - self.test_path = path # path to .py test file + self.test_path = path # path to .py test file self.summary = summary # test.name is the name of the test relative to the top of the test @@ -160,22 +163,31 @@ def __init__(self, path, summary): # and when writing test lists. self.name = os.path.relpath(self.test_path, OPTIONS.testdir) - self.stdout = '' - self.stderr = '' + self.stdout = "" + self.stderr = "" self.returncode = None def cmd(self): - testlibdir = os.path.normpath(os.path.join(OPTIONS.testdir, '..', 'lib-for-tests')) - return [OPTIONS.gdb_executable, - '-nw', # Don't create a window (unnecessary?) - '-nx', # Don't read .gdbinit. - '--ex', 'add-auto-load-safe-path %s' % (OPTIONS.bindir,), - '--ex', 'set env LD_LIBRARY_PATH %s' % (OPTIONS.bindir,), - '--ex', 'file %s' % (os.path.join(OPTIONS.bindir, 'gdb-tests'),), - '--eval-command', 'python testlibdir=%r' % (testlibdir,), - '--eval-command', 'python testscript=%r' % (self.test_path,), - '--eval-command', 'python exec(open(%r).read())' % os.path.join(testlibdir, - 'catcher.py')] + testlibdir = os.path.normpath( + os.path.join(OPTIONS.testdir, "..", "lib-for-tests") + ) + return [ + OPTIONS.gdb_executable, + "-nw", # Don't create a window (unnecessary?) + "-nx", # Don't read .gdbinit. + "--ex", + "add-auto-load-safe-path %s" % (OPTIONS.bindir,), + "--ex", + "set env LD_LIBRARY_PATH %s" % (OPTIONS.bindir,), + "--ex", + "file %s" % (os.path.join(OPTIONS.bindir, "gdb-tests"),), + "--eval-command", + "python testlibdir=%r" % (testlibdir,), + "--eval-command", + "python testscript=%r" % (self.test_path,), + "--eval-command", + "python exec(open(%r).read())" % os.path.join(testlibdir, "catcher.py"), + ] def start(self, pipe, deadline): super(Test, self).start(pipe, deadline) @@ -205,27 +217,27 @@ def show_cmd(self, out): def show_output(self, out): if self.stdout: - out.write('Standard output:') - out.write('\n' + self.stdout + '\n') + out.write("Standard output:") + out.write("\n" + self.stdout + "\n") if self.stderr: - out.write('Standard error:') - out.write('\n' + self.stderr + '\n') + out.write("Standard error:") + out.write("\n" + self.stderr + "\n") def show(self, out): - out.write(self.name + '\n') + out.write(self.name + "\n") if OPTIONS.write_failure_output: self.show_cmd(out) self.show_output(out) - out.write('GDB exit code: %r\n' % (self.returncode,)) + out.write("GDB exit code: %r\n" % (self.returncode,)) def find_tests(dir, substring=None): ans = [] for dirpath, _, filenames in os.walk(dir): - if dirpath == '.': + if dirpath == ".": continue for filename in filenames: - if not filename.endswith('.py'): + if not filename.endswith(".py"): continue test = os.path.join(dirpath, filename) if substring is None or substring in os.path.relpath(test, dir): @@ -234,19 +246,26 @@ def find_tests(dir, substring=None): def build_test_exec(builddir): - subprocess.check_call(['make'], cwd=builddir) + subprocess.check_call(["make"], cwd=builddir) def run_tests(tests, summary): jobs = OPTIONS.workercount # python 3.3 fixed a bug with concurrently writing .pyc files. # https://bugs.python.org/issue13146 - embedded_version = subprocess.check_output([ - OPTIONS.gdb_executable, - '--batch', - '--ex', 'python import sys; print(sys.hexversion)' - ]).decode('ascii').strip() - if hex(int(embedded_version)) < '0x3030000': + embedded_version = ( + subprocess.check_output( + [ + OPTIONS.gdb_executable, + "--batch", + "--ex", + "python import sys; print(sys.hexversion)", + ] + ) + .decode("ascii") + .strip() + ) + if hex(int(embedded_version)) < "0x3030000": jobs = 1 pool = TaskPool(tests, job_limit=jobs, timeout=OPTIONS.timeout) @@ -267,43 +286,104 @@ def main(argv): # The [TESTS] optional arguments are paths of test files relative # to the jit-test/tests directory. from optparse import OptionParser - op = OptionParser(usage='%prog [options] OBJDIR [TESTS...]') - op.add_option('-s', '--show-cmd', dest='show_cmd', action='store_true', - help='show GDB shell command run') - op.add_option('-o', '--show-output', dest='show_output', action='store_true', - help='show output from GDB') - op.add_option('-x', '--exclude', dest='exclude', action='append', - help='exclude given test dir or path') - op.add_option('-t', '--timeout', dest='timeout', type=float, default=150.0, - help='set test timeout in seconds') - op.add_option('-j', '--worker-count', dest='workercount', type=int, - help='Run [WORKERCOUNT] tests at a time') - op.add_option('--no-progress', dest='hide_progress', action='store_true', - help='hide progress bar') - op.add_option('--worklist', dest='worklist', metavar='FILE', - help='Read tests to run from [FILE] (or run all if [FILE] not found);\n' - 'write failures back to [FILE]') - op.add_option('-r', '--read-tests', dest='read_tests', metavar='FILE', - help='Run test files listed in [FILE]') - op.add_option('-w', '--write-failures', dest='write_failures', metavar='FILE', - help='Write failing tests to [FILE]') - op.add_option('--write-failure-output', dest='write_failure_output', action='store_true', - help='With --write-failures=FILE, additionally write the output of failed ' - 'tests to [FILE]') - op.add_option('--gdb', dest='gdb_executable', metavar='EXECUTABLE', default='gdb', - help='Run tests with [EXECUTABLE], rather than plain \'gdb\'.') - op.add_option('--srcdir', dest='srcdir', - default=os.path.abspath(os.path.join(script_dir, '..')), - help='Use SpiderMonkey sources in [SRCDIR].') - op.add_option('--testdir', dest='testdir', default=os.path.join(script_dir, 'tests'), - help='Find tests in [TESTDIR].') - op.add_option('--builddir', dest='builddir', - help='Build test executable from [BUILDDIR].') - op.add_option('--bindir', dest='bindir', - help='Run test executable from [BINDIR].') + + op = OptionParser(usage="%prog [options] OBJDIR [TESTS...]") + op.add_option( + "-s", + "--show-cmd", + dest="show_cmd", + action="store_true", + help="show GDB shell command run", + ) + op.add_option( + "-o", + "--show-output", + dest="show_output", + action="store_true", + help="show output from GDB", + ) + op.add_option( + "-x", + "--exclude", + dest="exclude", + action="append", + help="exclude given test dir or path", + ) + op.add_option( + "-t", + "--timeout", + dest="timeout", + type=float, + default=150.0, + help="set test timeout in seconds", + ) + op.add_option( + "-j", + "--worker-count", + dest="workercount", + type=int, + help="Run [WORKERCOUNT] tests at a time", + ) + op.add_option( + "--no-progress", + dest="hide_progress", + action="store_true", + help="hide progress bar", + ) + op.add_option( + "--worklist", + dest="worklist", + metavar="FILE", + help="Read tests to run from [FILE] (or run all if [FILE] not found);\n" + "write failures back to [FILE]", + ) + op.add_option( + "-r", + "--read-tests", + dest="read_tests", + metavar="FILE", + help="Run test files listed in [FILE]", + ) + op.add_option( + "-w", + "--write-failures", + dest="write_failures", + metavar="FILE", + help="Write failing tests to [FILE]", + ) + op.add_option( + "--write-failure-output", + dest="write_failure_output", + action="store_true", + help="With --write-failures=FILE, additionally write the output of failed " + "tests to [FILE]", + ) + op.add_option( + "--gdb", + dest="gdb_executable", + metavar="EXECUTABLE", + default="gdb", + help="Run tests with [EXECUTABLE], rather than plain 'gdb'.", + ) + op.add_option( + "--srcdir", + dest="srcdir", + default=os.path.abspath(os.path.join(script_dir, "..")), + help="Use SpiderMonkey sources in [SRCDIR].", + ) + op.add_option( + "--testdir", + dest="testdir", + default=os.path.join(script_dir, "tests"), + help="Find tests in [TESTDIR].", + ) + op.add_option( + "--builddir", dest="builddir", help="Build test executable from [BUILDDIR]." + ) + op.add_option("--bindir", dest="bindir", help="Run test executable from [BINDIR].") (OPTIONS, args) = op.parse_args(argv) if len(args) < 1: - op.error('missing OBJDIR argument') + op.error("missing OBJDIR argument") OPTIONS.objdir = os.path.abspath(args[0]) test_args = args[1:] @@ -314,9 +394,9 @@ def main(argv): # Compute defaults for OPTIONS.builddir and OPTIONS.bindir now, since we've # computed OPTIONS.objdir. if not OPTIONS.builddir: - OPTIONS.builddir = os.path.join(OPTIONS.objdir, 'js', 'src', 'gdb') + OPTIONS.builddir = os.path.join(OPTIONS.objdir, "js", "src", "gdb") if not OPTIONS.bindir: - OPTIONS.bindir = os.path.join(OPTIONS.objdir, 'dist', 'bin') + OPTIONS.bindir = os.path.join(OPTIONS.objdir, "dist", "bin") test_set = set() @@ -328,21 +408,24 @@ def main(argv): try: with open(OPTIONS.worklist) as f: for line in f: - test_set.update(os.path.join(OPTIONS.testdir, line.strip('\n'))) + test_set.update(os.path.join(OPTIONS.testdir, line.strip("\n"))) except IOError: # With worklist, a missing file means to start the process with # the complete list of tests. - sys.stderr.write("Couldn't read worklist file '%s'; running all tests\n" - % (OPTIONS.worklist,)) + sys.stderr.write( + "Couldn't read worklist file '%s'; running all tests\n" + % (OPTIONS.worklist,) + ) test_set = set(find_tests(OPTIONS.testdir)) if OPTIONS.read_tests: try: with open(OPTIONS.read_tests) as f: for line in f: - test_set.update(os.path.join(OPTIONS.testdir, line.strip('\n'))) + test_set.update(os.path.join(OPTIONS.testdir, line.strip("\n"))) except IOError as err: - sys.stderr.write("Error trying to read test file '%s': %s\n" - % (OPTIONS.read_tests, err)) + sys.stderr.write( + "Error trying to read test file '%s': %s\n" % (OPTIONS.read_tests, err) + ) sys.exit(1) # If none of the above options were passed, and no tests were listed @@ -383,5 +466,5 @@ def main(argv): sys.exit(0) -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/js/src/gdb/taskpool.py b/js/src/gdb/taskpool.py index 5b28b68278ae55..363031183d57ff 100644 --- a/js/src/gdb/taskpool.py +++ b/js/src/gdb/taskpool.py @@ -68,7 +68,7 @@ def onTimeout(self): class TerminateTask(Exception): pass - def __init__(self, tasks, cwd='.', job_limit=4, timeout=150): + def __init__(self, tasks, cwd=".", job_limit=4, timeout=150): self.pending = iter(tasks) self.cwd = cwd self.job_limit = job_limit @@ -78,13 +78,18 @@ def __init__(self, tasks, cwd='.', job_limit=4, timeout=150): def run_all(self): # The currently running tasks: a set of Task instances. running = set() - with open(os.devnull, 'r') as devnull: + with open(os.devnull, "r") as devnull: while True: while len(running) < self.job_limit and self.next_pending: task = self.next_pending - p = Popen(task.cmd(), bufsize=16384, - stdin=devnull, stdout=PIPE, stderr=PIPE, - cwd=self.cwd) + p = Popen( + task.cmd(), + bufsize=16384, + stdin=devnull, + stdout=PIPE, + stderr=PIPE, + cwd=self.cwd, + ) # Put the stdout and stderr pipes in non-blocking mode. See # the post-'select' code below for details. @@ -107,10 +112,12 @@ def run_all(self): secs_to_next_deadline = max(min([t.deadline for t in running]) - now, 0) # Wait for output or a timeout. - stdouts_and_stderrs = ([t.pipe.stdout for t in running] - + [t.pipe.stderr for t in running]) - (readable, w, x) = select.select(stdouts_and_stderrs, [], [], - secs_to_next_deadline) + stdouts_and_stderrs = [t.pipe.stdout for t in running] + [ + t.pipe.stderr for t in running + ] + (readable, w, x) = select.select( + stdouts_and_stderrs, [], [], secs_to_next_deadline + ) finished = set() terminate = set() for t in running: @@ -124,14 +131,14 @@ def run_all(self): output = t.pipe.stdout.read(16384) if len(output): try: - t.onStdout(output.decode('utf-8')) + t.onStdout(output.decode("utf-8")) except TerminateTask: terminate.add(t) if t.pipe.stderr in readable: output = t.pipe.stderr.read(16384) if len(output): try: - t.onStderr(output.decode('utf-8')) + t.onStderr(output.decode("utf-8")) except TerminateTask: terminate.add(t) else: @@ -173,13 +180,14 @@ def get_cpu_count(): # Python 2.6+ try: import multiprocessing + return multiprocessing.cpu_count() except (ImportError, NotImplementedError): pass # POSIX try: - res = int(os.sysconf('SC_NPROCESSORS_ONLN')) + res = int(os.sysconf("SC_NPROCESSORS_ONLN")) if res > 0: return res except (AttributeError, ValueError): @@ -187,7 +195,7 @@ def get_cpu_count(): # Windows try: - res = int(os.environ['NUMBER_OF_PROCESSORS']) + res = int(os.environ["NUMBER_OF_PROCESSORS"]) if res > 0: return res except (KeyError, ValueError): @@ -196,7 +204,7 @@ def get_cpu_count(): return 1 -if __name__ == '__main__': +if __name__ == "__main__": # Test TaskPool by using it to implement the unique 'sleep sort' algorithm. def sleep_sort(ns, timeout): sorted = [] @@ -210,20 +218,20 @@ def start(self, pipe, deadline): super(SortableTask, self).start(pipe, deadline) def cmd(self): - return ['sh', '-c', 'echo out; sleep %d; echo err>&2' % (self.n,)] + return ["sh", "-c", "echo out; sleep %d; echo err>&2" % (self.n,)] def onStdout(self, text): - print('%d stdout: %r' % (self.n, text)) + print("%d stdout: %r" % (self.n, text)) def onStderr(self, text): - print('%d stderr: %r' % (self.n, text)) + print("%d stderr: %r" % (self.n, text)) def onFinished(self, returncode): - print('%d (rc=%d)' % (self.n, returncode)) + print("%d (rc=%d)" % (self.n, returncode)) sorted.append(self.n) def onTimeout(self): - print('%d timed out' % (self.n,)) + print("%d timed out" % (self.n,)) p = TaskPool([SortableTask(_) for _ in ns], job_limit=len(ns), timeout=timeout) p.run_all() diff --git a/js/src/gdb/tests/enum-printers.py b/js/src/gdb/tests/enum-printers.py index cdd0c61633f13f..ff2c088dc8abdf 100644 --- a/js/src/gdb/tests/enum-printers.py +++ b/js/src/gdb/tests/enum-printers.py @@ -4,44 +4,44 @@ import mozilla.prettyprinters -@mozilla.prettyprinters.pretty_printer('unscoped_no_storage') +@mozilla.prettyprinters.pretty_printer("unscoped_no_storage") class my_typedef(object): def __init__(self, value, cache): pass def to_string(self): - return 'unscoped_no_storage::success' + return "unscoped_no_storage::success" -@mozilla.prettyprinters.pretty_printer('unscoped_with_storage') +@mozilla.prettyprinters.pretty_printer("unscoped_with_storage") class my_typedef(object): def __init__(self, value, cache): pass def to_string(self): - return 'unscoped_with_storage::success' + return "unscoped_with_storage::success" -@mozilla.prettyprinters.pretty_printer('scoped_no_storage') +@mozilla.prettyprinters.pretty_printer("scoped_no_storage") class my_typedef(object): def __init__(self, value, cache): pass def to_string(self): - return 'scoped_no_storage::success' + return "scoped_no_storage::success" -@mozilla.prettyprinters.pretty_printer('scoped_with_storage') +@mozilla.prettyprinters.pretty_printer("scoped_with_storage") class my_typedef(object): def __init__(self, value, cache): pass def to_string(self): - return 'scoped_with_storage::success' + return "scoped_with_storage::success" -run_fragment('enum_printers.one') -assert_pretty('i1', 'unscoped_no_storage::success') -assert_pretty('i2', 'unscoped_with_storage::success') -assert_pretty('i3', 'scoped_no_storage::success') -assert_pretty('i4', 'scoped_with_storage::success') +run_fragment("enum_printers.one") +assert_pretty("i1", "unscoped_no_storage::success") +assert_pretty("i2", "unscoped_with_storage::success") +assert_pretty("i3", "scoped_no_storage::success") +assert_pretty("i4", "scoped_with_storage::success") diff --git a/js/src/gdb/tests/test-ExecutableAllocator.py b/js/src/gdb/tests/test-ExecutableAllocator.py index 88dc292d0c8254..bec2dda62307b0 100644 --- a/js/src/gdb/tests/test-ExecutableAllocator.py +++ b/js/src/gdb/tests/test-ExecutableAllocator.py @@ -3,19 +3,20 @@ # As it caused by the way we instanciate this file # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'JS::GCCellPtr') +assert_subprinter_registered("SpiderMonkey", "JS::GCCellPtr") -run_fragment('ExecutableAllocator.empty') +run_fragment("ExecutableAllocator.empty") -assert_pretty('execAlloc', 'ExecutableAllocator([])') +assert_pretty("execAlloc", "ExecutableAllocator([])") -run_fragment('ExecutableAllocator.onepool') +run_fragment("ExecutableAllocator.onepool") -reExecPool = 'ExecutablePool [a-f0-9]{8,}-[a-f0-9]{8,}' -assert_regexp_pretty('pool', reExecPool) -assert_regexp_pretty('execAlloc', 'ExecutableAllocator\(\[' + reExecPool + '\]\)') +reExecPool = "ExecutablePool [a-f0-9]{8,}-[a-f0-9]{8,}" +assert_regexp_pretty("pool", reExecPool) +assert_regexp_pretty("execAlloc", "ExecutableAllocator\(\[" + reExecPool + "\]\)") -run_fragment('ExecutableAllocator.twopools') +run_fragment("ExecutableAllocator.twopools") assert_regexp_pretty( - 'execAlloc', 'ExecutableAllocator\(\[' + reExecPool + ', ' + reExecPool + '\]\)') + "execAlloc", "ExecutableAllocator\(\[" + reExecPool + ", " + reExecPool + "\]\)" +) diff --git a/js/src/gdb/tests/test-GCCellPtr.py b/js/src/gdb/tests/test-GCCellPtr.py index 05d9194ce000af..56207c88637747 100644 --- a/js/src/gdb/tests/test-GCCellPtr.py +++ b/js/src/gdb/tests/test-GCCellPtr.py @@ -1,18 +1,18 @@ # Tests for GCCellPtr pretty-printing # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'JS::GCCellPtr') +assert_subprinter_registered("SpiderMonkey", "JS::GCCellPtr") -run_fragment('GCCellPtr.simple') +run_fragment("GCCellPtr.simple") -assert_pretty('nulll', 'JS::GCCellPtr(nullptr)') -assert_pretty('object', 'JS::GCCellPtr((JSObject*) )') -assert_pretty('string', 'JS::GCCellPtr((JSString*) )') -assert_pretty('symbol', 'JS::GCCellPtr((JS::Symbol*) )') -assert_pretty('bigint', 'JS::GCCellPtr((JS::BigInt*) )') -assert_pretty('shape', 'JS::GCCellPtr((js::Shape*) )') -assert_pretty('objectGroup', 'JS::GCCellPtr((js::ObjectGroup*) )') -assert_pretty('baseShape', 'JS::GCCellPtr((js::BaseShape*) )') -assert_pretty('script', 'JS::GCCellPtr((js::BaseScript*) )') -assert_pretty('scope', 'JS::GCCellPtr((js::Scope*) )') -assert_pretty('regExpShared', 'JS::GCCellPtr((js::RegExpShared*) )') +assert_pretty("nulll", "JS::GCCellPtr(nullptr)") +assert_pretty("object", "JS::GCCellPtr((JSObject*) )") +assert_pretty("string", "JS::GCCellPtr((JSString*) )") +assert_pretty("symbol", "JS::GCCellPtr((JS::Symbol*) )") +assert_pretty("bigint", "JS::GCCellPtr((JS::BigInt*) )") +assert_pretty("shape", "JS::GCCellPtr((js::Shape*) )") +assert_pretty("objectGroup", "JS::GCCellPtr((js::ObjectGroup*) )") +assert_pretty("baseShape", "JS::GCCellPtr((js::BaseShape*) )") +assert_pretty("script", "JS::GCCellPtr((js::BaseScript*) )") +assert_pretty("scope", "JS::GCCellPtr((js::Scope*) )") +assert_pretty("regExpShared", "JS::GCCellPtr((js::RegExpShared*) )") diff --git a/js/src/gdb/tests/test-Interpreter.py b/js/src/gdb/tests/test-Interpreter.py index 83d03c54a1b43f..7a0279dc4d6501 100644 --- a/js/src/gdb/tests/test-Interpreter.py +++ b/js/src/gdb/tests/test-Interpreter.py @@ -3,15 +3,21 @@ # As it caused by the way we instanciate this file # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'js::InterpreterRegs') +assert_subprinter_registered("SpiderMonkey", "js::InterpreterRegs") -run_fragment('Interpreter.Regs') +run_fragment("Interpreter.Regs") -assert_pretty('regs', '{ fp_ = , sp = fp_.slots() + 2, pc = (JSOp::True) }') +assert_pretty("regs", "{ fp_ = , sp = fp_.slots() + 2, pc = (JSOp::True) }") -run_fragment('Interpreter.AbstractFramePtr') +run_fragment("Interpreter.AbstractFramePtr") -assert_pretty('ifptr', 'AbstractFramePtr ((js::InterpreterFrame *) ) = {ptr_ = 146464513}') -assert_pretty('bfptr', 'AbstractFramePtr ((js::jit::BaselineFrame *) ) = {ptr_ = 3135025122}') assert_pretty( - 'rfptr', 'AbstractFramePtr ((js::jit::RematerializedFrame *) ) = {ptr_ = 3669732611}') + "ifptr", "AbstractFramePtr ((js::InterpreterFrame *) ) = {ptr_ = 146464513}" +) +assert_pretty( + "bfptr", "AbstractFramePtr ((js::jit::BaselineFrame *) ) = {ptr_ = 3135025122}" +) +assert_pretty( + "rfptr", + "AbstractFramePtr ((js::jit::RematerializedFrame *) ) = {ptr_ = 3669732611}", +) diff --git a/js/src/gdb/tests/test-JSObject-null.py b/js/src/gdb/tests/test-JSObject-null.py index e94cc67bba12d2..fcda681aea45b3 100644 --- a/js/src/gdb/tests/test-JSObject-null.py +++ b/js/src/gdb/tests/test-JSObject-null.py @@ -1,8 +1,8 @@ # flake8: noqa: F821 -gdb.execute('set print address on') +gdb.execute("set print address on") -run_fragment('JSObject.null') +run_fragment("JSObject.null") -assert_pretty('null', '0x0') -assert_pretty('nullRaw', '0x0') +assert_pretty("null", "0x0") +assert_pretty("nullRaw", "0x0") diff --git a/js/src/gdb/tests/test-JSObject.py b/js/src/gdb/tests/test-JSObject.py index 2cd8746d76618f..a9a5ca41a62516 100644 --- a/js/src/gdb/tests/test-JSObject.py +++ b/js/src/gdb/tests/test-JSObject.py @@ -1,23 +1,23 @@ # Printing JSObjects. # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'ptr-to-JSObject') -assert_subprinter_registered('SpiderMonkey', 'ref-to-JSObject') +assert_subprinter_registered("SpiderMonkey", "ptr-to-JSObject") +assert_subprinter_registered("SpiderMonkey", "ref-to-JSObject") -run_fragment('JSObject.simple') +run_fragment("JSObject.simple") # These patterns look a little strange because of prologue.py's 'set print # address off', which avoids putting varying addresses in the output. After # the '(JSObject *) ', there is a 'void *' value printing as the empty # string. -assert_pretty('glob', '(JSObject *) [object global] delegate') -assert_pretty('plain', '(JSObject *) [object Object]') -assert_pretty('func', '(JSObject *) [object Function "dys"]') -assert_pretty('anon', '(JSObject *) [object Function ]') -assert_pretty('funcPtr', '(JSFunction *) [object Function "formFollows"]') +assert_pretty("glob", "(JSObject *) [object global] delegate") +assert_pretty("plain", "(JSObject *) [object Object]") +assert_pretty("func", '(JSObject *) [object Function "dys"]') +assert_pretty("anon", "(JSObject *) [object Function ]") +assert_pretty("funcPtr", '(JSFunction *) [object Function "formFollows"]') -assert_pretty('badClassName', '(JSObject *) [object \\307X]') +assert_pretty("badClassName", "(JSObject *) [object \\307X]") -assert_pretty('plainRef', '(JSObject &) @ [object Object]') -assert_pretty('funcRef', '(JSFunction &) @ [object Function "formFollows"]') +assert_pretty("plainRef", "(JSObject &) @ [object Object]") +assert_pretty("funcRef", '(JSFunction &) @ [object Function "formFollows"]') diff --git a/js/src/gdb/tests/test-JSString-null.py b/js/src/gdb/tests/test-JSString-null.py index 7be3932096dbcd..26bbfcf4287f88 100644 --- a/js/src/gdb/tests/test-JSString-null.py +++ b/js/src/gdb/tests/test-JSString-null.py @@ -1,8 +1,8 @@ # flake8: noqa: F821 -gdb.execute('set print address on') +gdb.execute("set print address on") -run_fragment('JSString.null') +run_fragment("JSString.null") -assert_pretty('null', '0x0') -assert_pretty('nullRaw', '0x0') +assert_pretty("null", "0x0") +assert_pretty("nullRaw", "0x0") diff --git a/js/src/gdb/tests/test-JSString-subclasses.py b/js/src/gdb/tests/test-JSString-subclasses.py index 55c3bc336ad3a9..b685f46aff21f0 100644 --- a/js/src/gdb/tests/test-JSString-subclasses.py +++ b/js/src/gdb/tests/test-JSString-subclasses.py @@ -2,6 +2,6 @@ # We can print pointers to subclasses of JSString. -run_fragment('JSString.subclasses') +run_fragment("JSString.subclasses") -assert_pretty('linear', '"Hi!"') +assert_pretty("linear", '"Hi!"') diff --git a/js/src/gdb/tests/test-JSString.py b/js/src/gdb/tests/test-JSString.py index 58b9a679bde888..99cebc4ab5ee21 100644 --- a/js/src/gdb/tests/test-JSString.py +++ b/js/src/gdb/tests/test-JSString.py @@ -1,24 +1,24 @@ # Printing JSStrings. # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'ptr-to-JSString') -run_fragment('JSString.simple') +assert_subprinter_registered("SpiderMonkey", "ptr-to-JSString") +run_fragment("JSString.simple") -assert_pretty('empty', '""') -assert_pretty('x', '"x"') -assert_pretty('z', '"z"') -assert_pretty('xz', '"xz"') +assert_pretty("empty", '""') +assert_pretty("x", '"x"') +assert_pretty("z", '"z"') +assert_pretty("xz", '"xz"') -stars = gdb.parse_and_eval('stars') +stars = gdb.parse_and_eval("stars") assert_eq(str(stars), "'*' ") -doubleStars = gdb.parse_and_eval('doubleStars') +doubleStars = gdb.parse_and_eval("doubleStars") assert_eq(str(doubleStars), "'*' ") -assert_pretty('xRaw', '"x"') +assert_pretty("xRaw", '"x"') # JSAtom * -run_fragment('JSString.atom') +run_fragment("JSString.atom") -assert_pretty('molybdenum', '"molybdenum"') +assert_pretty("molybdenum", '"molybdenum"') diff --git a/js/src/gdb/tests/test-JSSymbol.py b/js/src/gdb/tests/test-JSSymbol.py index 52506335e64071..a0dba5c036bdea 100644 --- a/js/src/gdb/tests/test-JSSymbol.py +++ b/js/src/gdb/tests/test-JSSymbol.py @@ -1,11 +1,11 @@ # Printing JS::Symbols. # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'ptr-to-JS::Symbol') +assert_subprinter_registered("SpiderMonkey", "ptr-to-JS::Symbol") -run_fragment('JSSymbol.simple') +run_fragment("JSSymbol.simple") -assert_pretty('unique', 'Symbol()') -assert_pretty('unique_with_desc', 'Symbol("Hello!")') -assert_pretty('registry', 'Symbol.for("Hello!")') -assert_pretty('well_known', 'Symbol.iterator') +assert_pretty("unique", "Symbol()") +assert_pretty("unique_with_desc", 'Symbol("Hello!")') +assert_pretty("registry", 'Symbol.for("Hello!")') +assert_pretty("well_known", "Symbol.iterator") diff --git a/js/src/gdb/tests/test-Root-null.py b/js/src/gdb/tests/test-Root-null.py index 8566d8c998865a..839fa069c5b0ae 100644 --- a/js/src/gdb/tests/test-Root-null.py +++ b/js/src/gdb/tests/test-Root-null.py @@ -14,8 +14,8 @@ # mistake. # flake8: noqa: F821 -gdb.execute('set print address on') +gdb.execute("set print address on") -run_fragment('Root.null') +run_fragment("Root.null") -assert_pretty('null', '0x0') +assert_pretty("null", "0x0") diff --git a/js/src/gdb/tests/test-Root.py b/js/src/gdb/tests/test-Root.py index 6b9e8e44325d89..f3b72b9fdacd7a 100644 --- a/js/src/gdb/tests/test-Root.py +++ b/js/src/gdb/tests/test-Root.py @@ -3,28 +3,28 @@ # As it caused by the way we instanciate this file # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'instantiations-of-JS::Rooted') -assert_subprinter_registered('SpiderMonkey', 'instantiations-of-JS::Handle') -assert_subprinter_registered('SpiderMonkey', 'instantiations-of-JS::MutableHandle') -assert_subprinter_registered('SpiderMonkey', 'instantiations-of-js::BarrieredBase') +assert_subprinter_registered("SpiderMonkey", "instantiations-of-JS::Rooted") +assert_subprinter_registered("SpiderMonkey", "instantiations-of-JS::Handle") +assert_subprinter_registered("SpiderMonkey", "instantiations-of-JS::MutableHandle") +assert_subprinter_registered("SpiderMonkey", "instantiations-of-js::BarrieredBase") -run_fragment('Root.handle') +run_fragment("Root.handle") -assert_pretty('obj', '(JSObject * const) [object global] delegate') -assert_pretty('mutableObj', '(JSObject *) [object global] delegate') +assert_pretty("obj", "(JSObject * const) [object global] delegate") +assert_pretty("mutableObj", "(JSObject *) [object global] delegate") -run_fragment('Root.HeapSlot') +run_fragment("Root.HeapSlot") # This depends on implementation details of arrays, but since HeapSlot is # not a public type, I'm not sure how to avoid doing *something* ugly. -assert_pretty('((js::NativeObject *) array.ptr)->elements_[0]', '$JS::Value("plinth")') +assert_pretty("((js::NativeObject *) array.ptr)->elements_[0]", '$JS::Value("plinth")') -run_fragment('Root.barriers') +run_fragment("Root.barriers") -assert_pretty('prebarriered', '(JSObject *) [object Object]') -assert_pretty('heapptr', '(JSObject *) [object Object]') -assert_pretty('relocatable', '(JSObject *) [object Object]') -assert_pretty('val', '$JS::Value((JSObject *) [object Object])') -assert_pretty('heapValue', '$JS::Value((JSObject *) [object Object])') -assert_pretty('prebarrieredValue', '$JS::Value((JSObject *) [object Object])') -assert_pretty('relocatableValue', '$JS::Value((JSObject *) [object Object])') +assert_pretty("prebarriered", "(JSObject *) [object Object]") +assert_pretty("heapptr", "(JSObject *) [object Object]") +assert_pretty("relocatable", "(JSObject *) [object Object]") +assert_pretty("val", "$JS::Value((JSObject *) [object Object])") +assert_pretty("heapValue", "$JS::Value((JSObject *) [object Object])") +assert_pretty("prebarrieredValue", "$JS::Value((JSObject *) [object Object])") +assert_pretty("relocatableValue", "$JS::Value((JSObject *) [object Object])") diff --git a/js/src/gdb/tests/test-asmjs.py b/js/src/gdb/tests/test-asmjs.py index a0797476482a3a..9e3ed7fc8915de 100644 --- a/js/src/gdb/tests/test-asmjs.py +++ b/js/src/gdb/tests/test-asmjs.py @@ -5,12 +5,12 @@ # handling that signal. # flake8: noqa: F821 -run_fragment('asmjs.segfault') +run_fragment("asmjs.segfault") # If SIGSEGV handling is broken, GDB would have stopped at the SIGSEGV signal. # The breakpoint would not have hit, and run_fragment would have thrown. # # So if we get here, and the asm.js code actually ran, we win. -assert_pretty('ok', 'true') -assert_pretty('rval', '$JS::Value("ok")') +assert_pretty("ok", "true") +assert_pretty("rval", '$JS::Value("ok")') diff --git a/js/src/gdb/tests/test-jsbytecode.py b/js/src/gdb/tests/test-jsbytecode.py index e7df00d2cdf45f..e69732d4c7af8f 100644 --- a/js/src/gdb/tests/test-jsbytecode.py +++ b/js/src/gdb/tests/test-jsbytecode.py @@ -1,9 +1,9 @@ # Basic unit tests for jsbytecode* pretty-printer. # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'ptr-to-jsbytecode') +assert_subprinter_registered("SpiderMonkey", "ptr-to-jsbytecode") -run_fragment('jsbytecode.simple') +run_fragment("jsbytecode.simple") -assert_pretty('ok', 'true') -assert_pretty('code', ' (JSOp::Debugger)') +assert_pretty("ok", "true") +assert_pretty("code", " (JSOp::Debugger)") diff --git a/js/src/gdb/tests/test-jsid.py b/js/src/gdb/tests/test-jsid.py index 81f46377920862..7945c12f4c74d9 100644 --- a/js/src/gdb/tests/test-jsid.py +++ b/js/src/gdb/tests/test-jsid.py @@ -1,20 +1,20 @@ # Tests for jsid pretty-printing # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'JS::PropertyKey') +assert_subprinter_registered("SpiderMonkey", "JS::PropertyKey") -run_fragment('jsid.simple') +run_fragment("jsid.simple") -assert_pretty('string_id', '$jsid("moon")') -assert_pretty('int_id', '$jsid(1729)') -unique_symbol_pretty = str(gdb.parse_and_eval('unique_symbol_id')).split('@')[0] +assert_pretty("string_id", '$jsid("moon")') +assert_pretty("int_id", "$jsid(1729)") +unique_symbol_pretty = str(gdb.parse_and_eval("unique_symbol_id")).split("@")[0] assert_eq(unique_symbol_pretty, '$jsid(Symbol("moon"))') -assert_pretty('registry_symbol_id', '$jsid(Symbol.for("moon"))') -assert_pretty('well_known_symbol_id', '$jsid(Symbol.iterator)') -assert_pretty('void_id', 'JSID_VOID') -assert_pretty('empty_id', 'JSID_EMPTY') +assert_pretty("registry_symbol_id", '$jsid(Symbol.for("moon"))') +assert_pretty("well_known_symbol_id", "$jsid(Symbol.iterator)") +assert_pretty("void_id", "JSID_VOID") +assert_pretty("empty_id", "JSID_EMPTY") -run_fragment('jsid.handles') +run_fragment("jsid.handles") -assert_pretty('jsid_handle', '$jsid("shovel")') -assert_pretty('mutable_jsid_handle', '$jsid("shovel")') +assert_pretty("jsid_handle", '$jsid("shovel")') +assert_pretty("mutable_jsid_handle", '$jsid("shovel")') diff --git a/js/src/gdb/tests/test-jsop.py b/js/src/gdb/tests/test-jsop.py index 9c1c2ba8169e99..fea0da6059394f 100644 --- a/js/src/gdb/tests/test-jsop.py +++ b/js/src/gdb/tests/test-jsop.py @@ -1,9 +1,9 @@ # Basic unit tests for JSOp pretty-printer. # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'JSOp') +assert_subprinter_registered("SpiderMonkey", "JSOp") -run_fragment('jsop.simple') +run_fragment("jsop.simple") -assert_pretty('undefined', 'JSOp::Undefined') -assert_pretty('debugger', 'JSOp::Debugger') +assert_pretty("undefined", "JSOp::Undefined") +assert_pretty("debugger", "JSOp::Debugger") diff --git a/js/src/gdb/tests/test-jsval.py b/js/src/gdb/tests/test-jsval.py index 4a20fb57d54e17..a7befa917cfa76 100644 --- a/js/src/gdb/tests/test-jsval.py +++ b/js/src/gdb/tests/test-jsval.py @@ -1,21 +1,23 @@ # Basic unit tests for jsval pretty-printer. # flake8: noqa: F821 -assert_subprinter_registered('SpiderMonkey', 'JS::Value') +assert_subprinter_registered("SpiderMonkey", "JS::Value") -run_fragment('jsval.simple') +run_fragment("jsval.simple") -assert_pretty('fortytwo', '$JS::Int32Value(42)') -assert_pretty('fortytwoD', '$JS::DoubleValue(42.0)') -assert_pretty('negone', '$JS::Int32Value(-1)') -assert_pretty('undefined', '$JS::UndefinedValue()') -assert_pretty('null', '$JS::NullValue()') -assert_pretty('js_true', '$JS::BooleanValue(true)') -assert_pretty('js_false', '$JS::BooleanValue(false)') -assert_pretty('elements_hole', '$JS::MagicValue(JS_ELEMENTS_HOLE)') -assert_pretty('empty_string', '$JS::Value("")') -assert_pretty('friendly_string', '$JS::Value("Hello!")') -assert_pretty('symbol', '$JS::Value(Symbol.for("Hello!"))') -assert_pretty('bi', '$JS::BigIntValue()') -assert_pretty('global', '$JS::Value((JSObject *) [object global] delegate)') -assert_pretty('onehundredthirtysevenonehundredtwentyeighths', '$JS::DoubleValue(1.0703125)') +assert_pretty("fortytwo", "$JS::Int32Value(42)") +assert_pretty("fortytwoD", "$JS::DoubleValue(42.0)") +assert_pretty("negone", "$JS::Int32Value(-1)") +assert_pretty("undefined", "$JS::UndefinedValue()") +assert_pretty("null", "$JS::NullValue()") +assert_pretty("js_true", "$JS::BooleanValue(true)") +assert_pretty("js_false", "$JS::BooleanValue(false)") +assert_pretty("elements_hole", "$JS::MagicValue(JS_ELEMENTS_HOLE)") +assert_pretty("empty_string", '$JS::Value("")') +assert_pretty("friendly_string", '$JS::Value("Hello!")') +assert_pretty("symbol", '$JS::Value(Symbol.for("Hello!"))') +assert_pretty("bi", "$JS::BigIntValue()") +assert_pretty("global", "$JS::Value((JSObject *) [object global] delegate)") +assert_pretty( + "onehundredthirtysevenonehundredtwentyeighths", "$JS::DoubleValue(1.0703125)" +) diff --git a/js/src/gdb/tests/test-prettyprinters.py b/js/src/gdb/tests/test-prettyprinters.py index 3b52c96a865f08..f3329c028875e5 100644 --- a/js/src/gdb/tests/test-prettyprinters.py +++ b/js/src/gdb/tests/test-prettyprinters.py @@ -4,7 +4,7 @@ import mozilla.prettyprinters -run_fragment('prettyprinters.implemented_types') +run_fragment("prettyprinters.implemented_types") def implemented_type_names(expr): @@ -13,13 +13,13 @@ def implemented_type_names(expr): return [str(_) for _ in it] -assert_eq(implemented_type_names('i'), ['int']) -assert_eq(implemented_type_names('a'), ['A', 'int']) -assert_eq(implemented_type_names('b'), ['B', 'A', 'int']) -assert_eq(implemented_type_names('c'), ['C']) -assert_eq(implemented_type_names('c_'), ['C_', 'C']) -assert_eq(implemented_type_names('e'), ['E', 'C', 'D']) -assert_eq(implemented_type_names('e_'), ['E_', 'E', 'C', 'D']) +assert_eq(implemented_type_names("i"), ["int"]) +assert_eq(implemented_type_names("a"), ["A", "int"]) +assert_eq(implemented_type_names("b"), ["B", "A", "int"]) +assert_eq(implemented_type_names("c"), ["C"]) +assert_eq(implemented_type_names("c_"), ["C_", "C"]) +assert_eq(implemented_type_names("e"), ["E", "C", "D"]) +assert_eq(implemented_type_names("e_"), ["E_", "E", "C", "D"]) # Some compilers strip trivial typedefs in the debuginfo from classes' base # classes. Sometimes this can be fixed with -fno-eliminate-unused-debug-types, @@ -27,14 +27,14 @@ def implemented_type_names(expr): # # It would probably be better to figure out how to make the compiler emit them, # since I think this test is here for a reason. -if gdb.lookup_type('F').fields()[0].name == 'C_': +if gdb.lookup_type("F").fields()[0].name == "C_": # We have the typedef info. - assert_eq(implemented_type_names('f'), ['F', 'C_', 'D_', 'C', 'D']) - assert_eq(implemented_type_names('h'), ['H', 'F', 'G', 'C_', 'D_', 'C', 'D']) + assert_eq(implemented_type_names("f"), ["F", "C_", "D_", "C", "D"]) + assert_eq(implemented_type_names("h"), ["H", "F", "G", "C_", "D_", "C", "D"]) else: - assert_eq(implemented_type_names('f'), ['F', 'C', 'D']) - assert_eq(implemented_type_names('h'), ['H', 'F', 'G', 'C', 'D']) + assert_eq(implemented_type_names("f"), ["F", "C", "D"]) + assert_eq(implemented_type_names("h"), ["H", "F", "G", "C", "D"]) # Check that our pretty-printers aren't interfering with printing other types. -assert_pretty('10', '10') -assert_pretty('(void*) 0', '') # Because of 'set print address off' +assert_pretty("10", "10") +assert_pretty("(void*) 0", "") # Because of 'set print address off' diff --git a/js/src/gdb/tests/test-unwind.py b/js/src/gdb/tests/test-unwind.py index 841e5ccbee58af..d12c4b0f6ed33a 100644 --- a/js/src/gdb/tests/test-unwind.py +++ b/js/src/gdb/tests/test-unwind.py @@ -7,9 +7,10 @@ def do_unwinder_test(): # The unwinder is disabled by default for the moment. Turn it on to check # that the unwinder works as expected. import gdb + gdb.execute("enable unwinder .* SpiderMonkey") - run_fragment('unwind.simple', 'Something') + run_fragment("unwind.simple", "Something") first = True # The unwinder is a bit flaky still but should at least be able to @@ -50,10 +51,11 @@ def do_unwinder_test(): # Only on the right platforms. -if platform.machine() == 'x86_64' and platform.system() == 'Linux': +if platform.machine() == "x86_64" and platform.system() == "Linux": # Only test when gdb has the unwinder feature. try: import gdb.unwinder # NOQA: F401 + do_unwinder_test() except Exception: pass diff --git a/js/src/gdb/tests/typedef-printers.py b/js/src/gdb/tests/typedef-printers.py index d4df7cd5b46a35..888379ee637807 100644 --- a/js/src/gdb/tests/typedef-printers.py +++ b/js/src/gdb/tests/typedef-printers.py @@ -5,14 +5,14 @@ import mozilla.prettyprinters -@mozilla.prettyprinters.pretty_printer('my_typedef') +@mozilla.prettyprinters.pretty_printer("my_typedef") class my_typedef(object): def __init__(self, value, cache): pass def to_string(self): - return 'huzzah' + return "huzzah" -run_fragment('typedef_printers.one') -assert_pretty('i', 'huzzah') +run_fragment("typedef_printers.one") +assert_pretty("i", "huzzah") diff --git a/js/src/irregexp/import-irregexp.py b/js/src/irregexp/import-irregexp.py index 691adab8800c58..f136b33fbbf2cd 100755 --- a/js/src/irregexp/import-irregexp.py +++ b/js/src/irregexp/import-irregexp.py @@ -36,27 +36,29 @@ def get_hash(path): # Get the hash for the current git revision cwd = os.getcwd() os.chdir(path) - command = ['git', 'rev-parse', 'HEAD'] - result = subprocess.check_output(command, encoding='utf-8') + command = ["git", "rev-parse", "HEAD"] + result = subprocess.check_output(command, encoding="utf-8") os.chdir(cwd) return result.rstrip() def copy_and_update_includes(src_path, dst_path): # List of header files that need to include the shim header - need_shim = ['property-sequences.h', - 'regexp-ast.h', - 'regexp-bytecode-peephole.h', - 'regexp-bytecodes.h', - 'regexp-dotprinter.h', - 'regexp-error.h', - 'regexp.h', - 'regexp-macro-assembler.h', - 'regexp-stack.h', - 'special-case.h'] - - src = open(str(src_path), 'r') - dst = open(str(dst_path), 'w') + need_shim = [ + "property-sequences.h", + "regexp-ast.h", + "regexp-bytecode-peephole.h", + "regexp-bytecodes.h", + "regexp-dotprinter.h", + "regexp-error.h", + "regexp.h", + "regexp-macro-assembler.h", + "regexp-stack.h", + "special-case.h", + ] + + src = open(str(src_path), "r") + dst = open(str(dst_path), "w") # 1. Rewrite includes of V8 regexp headers: regexp_include = re.compile('#include "src/regexp') @@ -77,7 +79,7 @@ def copy_and_update_includes(src_path, dst_path): for line in src: if adding_shim_now: - if line == '\n': + if line == "\n": dst.write('#include "irregexp/RegExpShim.h"\n') need_to_add_shim = False adding_shim_now = False @@ -92,11 +94,13 @@ def copy_and_update_includes(src_path, dst_path): def import_from(srcdir, dstdir): - excluded = ['OWNERS', - 'regexp.cc', - 'regexp-utils.cc', - 'regexp-utils.h', - 'regexp-macro-assembler-arch.h'] + excluded = [ + "OWNERS", + "regexp.cc", + "regexp-utils.cc", + "regexp-utils.h", + "regexp-macro-assembler-arch.h", + ] for file in srcdir.iterdir(): if file.is_dir(): @@ -107,38 +111,37 @@ def import_from(srcdir, dstdir): # Update IRREGEXP_VERSION file hash = get_hash(srcdir) - version_file = open(str(dstdir / 'IRREGEXP_VERSION'), 'w') - version_file.write('Imported using import-irregexp.py from:\n') - version_file.write('https://github.com/v8/v8/tree/%s/src/regexp\n' % hash) + version_file = open(str(dstdir / "IRREGEXP_VERSION"), "w") + version_file.write("Imported using import-irregexp.py from:\n") + version_file.write("https://github.com/v8/v8/tree/%s/src/regexp\n" % hash) -if __name__ == '__main__': +if __name__ == "__main__": import argparse import tempfile # This script should be run from js/src/irregexp to work correctly. current_path = Path(os.getcwd()) - expected_path = 'js/src/irregexp' + expected_path = "js/src/irregexp" if not current_path.match(expected_path): - raise RuntimeError('%s must be run from %s' % (sys.argv[0], - expected_path)) + raise RuntimeError("%s must be run from %s" % (sys.argv[0], expected_path)) - parser = argparse.ArgumentParser(description='Import irregexp from v8') - parser.add_argument('-p', '--path', help='path to v8/src/regexp') + parser = argparse.ArgumentParser(description="Import irregexp from v8") + parser.add_argument("-p", "--path", help="path to v8/src/regexp") args = parser.parse_args() if args.path: src_path = Path(args.path) - if not (src_path / 'regexp.h').exists(): - print('Usage:\n import-irregexp.py --path ') + if not (src_path / "regexp.h").exists(): + print("Usage:\n import-irregexp.py --path ") sys.exit(1) import_from(src_path, current_path) sys.exit(0) with tempfile.TemporaryDirectory() as tempdir: - v8_git = 'https://github.com/v8/v8.git' - clone = 'git clone --depth 1 %s %s' % (v8_git, tempdir) + v8_git = "https://github.com/v8/v8.git" + clone = "git clone --depth 1 %s %s" % (v8_git, tempdir) os.system(clone) - src_path = Path(tempdir) / 'src/regexp' + src_path = Path(tempdir) / "src/regexp" import_from(src_path, current_path) diff --git a/js/src/irregexp/moz.build b/js/src/irregexp/moz.build index c65f1085e8a257..9891a0bf3d68af 100644 --- a/js/src/irregexp/moz.build +++ b/js/src/irregexp/moz.build @@ -4,44 +4,40 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!..', - '..' -] +LOCAL_INCLUDES += ["!..", ".."] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") UNIFIED_SOURCES += [ - 'imported/regexp-ast.cc', - 'imported/regexp-bytecode-generator.cc', - 'imported/regexp-bytecode-peephole.cc', - 'imported/regexp-bytecodes.cc', - 'imported/regexp-compiler-tonode.cc', - 'imported/regexp-dotprinter.cc', - 'imported/regexp-interpreter.cc', - 'imported/regexp-macro-assembler-tracer.cc', - 'imported/regexp-macro-assembler.cc', - 'imported/regexp-parser.cc', - 'imported/regexp-stack.cc', - 'RegExpAPI.cpp', - 'RegExpShim.cpp', - 'util/UnicodeShim.cpp' + "imported/regexp-ast.cc", + "imported/regexp-bytecode-generator.cc", + "imported/regexp-bytecode-peephole.cc", + "imported/regexp-bytecodes.cc", + "imported/regexp-compiler-tonode.cc", + "imported/regexp-dotprinter.cc", + "imported/regexp-interpreter.cc", + "imported/regexp-macro-assembler-tracer.cc", + "imported/regexp-macro-assembler.cc", + "imported/regexp-parser.cc", + "imported/regexp-stack.cc", + "RegExpAPI.cpp", + "RegExpShim.cpp", + "util/UnicodeShim.cpp", ] SOURCES += [ - 'imported/regexp-compiler.cc', # Bug 1643693 - 'RegExpNativeMacroAssembler.cpp' + "imported/regexp-compiler.cc", # Bug 1643693 + "RegExpNativeMacroAssembler.cpp", ] -if CONFIG['JS_HAS_INTL_API']: - CXXFLAGS += ['-DV8_INTL_SUPPORT'] - UNIFIED_SOURCES += ['imported/property-sequences.cc', - 'imported/special-case.cc'] +if CONFIG["JS_HAS_INTL_API"]: + CXXFLAGS += ["-DV8_INTL_SUPPORT"] + UNIFIED_SOURCES += ["imported/property-sequences.cc", "imported/special-case.cc"] # Make sure all irregexp code is built with libfuzzer # coverage instrumentation in FUZZING mode. -if CONFIG['FUZZING_INTERFACES'] and CONFIG['LIBFUZZER']: - include('/tools/fuzzing/libfuzzer-config.mozbuild') +if CONFIG["FUZZING_INTERFACES"] and CONFIG["LIBFUZZER"]: + include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/js/src/jit-test/jit_test.py b/js/src/jit-test/jit_test.py index 7ba148f8e3bcd6..914a0400473ff5 100755 --- a/js/src/jit-test/jit_test.py +++ b/js/src/jit-test/jit_test.py @@ -22,9 +22,10 @@ def add_tests_dir_to_path(): from os.path import dirname, exists, join, realpath + js_src_dir = dirname(dirname(realpath(sys.argv[0]))) - assert exists(join(js_src_dir, 'jsapi.h')) - sys.path.insert(0, join(js_src_dir, 'tests')) + assert exists(join(js_src_dir, "jsapi.h")) + sys.path.insert(0, join(js_src_dir, "tests")) add_tests_dir_to_path() @@ -56,18 +57,18 @@ def choose_item(jobs, max_items, display): # Don't present a choice if there are too many tests if job_count > max_items: - raise Exception('Too many jobs.') + raise Exception("Too many jobs.") for i, job in enumerate(jobs, 1): print("{}) {}".format(i, display(job))) - item = read_input('Which one:\n') + item = read_input("Which one:\n") try: item = int(item) if item > job_count or item < 1: - raise Exception('Input isn\'t between 1 and {}'.format(job_count)) + raise Exception("Input isn't between 1 and {}".format(job_count)) except ValueError: - raise Exception('Unrecognized input') + raise Exception("Unrecognized input") return jobs[item - 1] @@ -76,134 +77,302 @@ def main(argv): # The [TESTS] optional arguments are paths of test files relative # to the jit-test/tests directory. import argparse - op = argparse.ArgumentParser(description='Run jit-test JS shell tests') - op.add_argument('-s', '--show-cmd', dest='show_cmd', action='store_true', - help='show js shell command run') - op.add_argument('-f', '--show-failed-cmd', dest='show_failed', - action='store_true', - help='show command lines of failed tests') - op.add_argument('-o', '--show-output', dest='show_output', - action='store_true', - help='show output from js shell') - op.add_argument('-F', '--failed-only', dest='failed_only', - action='store_true', - help="if --show-output is given, only print output for" - " failed tests") - op.add_argument('--no-show-failed', dest='no_show_failed', - action='store_true', - help="don't print output for failed tests" - " (no-op with --show-output)") - op.add_argument('-x', '--exclude', dest='exclude', - default=[], action='append', - help='exclude given test dir or path') - op.add_argument('--exclude-from', dest='exclude_from', type=str, - help='exclude each test dir or path in FILE') - op.add_argument('--slow', dest='run_slow', action='store_true', - help='also run tests marked as slow') - op.add_argument('--no-slow', dest='run_slow', action='store_false', - help='do not run tests marked as slow (the default)') - op.add_argument('-t', '--timeout', dest='timeout', type=float, default=150.0, - help='set test timeout in seconds') - op.add_argument('--no-progress', dest='hide_progress', action='store_true', - help='hide progress bar') - op.add_argument('--tinderbox', dest='format', action='store_const', - const='automation', - help='Use automation-parseable output format') - op.add_argument('--format', dest='format', default='none', - choices=('automation', 'none'), - help='Output format (default %(default)s).') - op.add_argument('--args', dest='shell_args', metavar='ARGS', default='', - help='extra args to pass to the JS shell') - op.add_argument('--feature-args', dest='feature_args', metavar='ARGS', - default='', - help='even more args to pass to the JS shell ' - '(for compatibility with jstests.py)') - op.add_argument('-w', '--write-failures', dest='write_failures', - metavar='FILE', - help='Write a list of failed tests to [FILE]') - op.add_argument('-C', '--check-output', action='store_true', dest='check_output', - help='Run tests to check output for different jit-flags') - op.add_argument('-r', '--read-tests', dest='read_tests', metavar='FILE', - help='Run test files listed in [FILE]') - op.add_argument('-R', '--retest', dest='retest', metavar='FILE', - help='Retest using test list file [FILE]') - op.add_argument('-g', '--debug', action='store_const', const='gdb', dest='debugger', - help='Run a single test under the gdb debugger') - op.add_argument('-G', '--debug-rr', action='store_const', const='rr', dest='debugger', - help='Run a single test under the rr debugger') - op.add_argument('--debugger', type=str, - help='Run a single test under the specified debugger') - op.add_argument('--valgrind', dest='valgrind', action='store_true', - help='Enable the |valgrind| flag, if valgrind is in $PATH.') - op.add_argument('--unusable-error-status', action='store_true', - help='Ignore incorrect exit status on tests that should return nonzero.') - op.add_argument('--valgrind-all', dest='valgrind_all', action='store_true', - help='Run all tests with valgrind, if valgrind is in $PATH.') - op.add_argument('--avoid-stdio', dest='avoid_stdio', action='store_true', - help='Use js-shell file indirection instead of piping stdio.') - op.add_argument('--write-failure-output', dest='write_failure_output', - action='store_true', - help='With --write-failures=FILE, additionally write the' - ' output of failed tests to [FILE]') - op.add_argument('--jitflags', dest='jitflags', default='none', - choices=valid_jitflags(), - help='IonMonkey option combinations (default %(default)s).') - op.add_argument('--ion', dest='jitflags', action='store_const', const='ion', - help='Run tests once with --ion-eager and once with' - ' --baseline-eager (equivalent to --jitflags=ion)') - op.add_argument('--tbpl', dest='jitflags', action='store_const', const='all', - help='Run tests with all IonMonkey option combinations' - ' (equivalent to --jitflags=all)') - op.add_argument('-j', '--worker-count', dest='max_jobs', type=int, - default=max(1, get_cpu_count()), - help='Number of tests to run in parallel (default %(default)s).') - op.add_argument('--remote', action='store_true', - help='Run tests on a remote device') - op.add_argument('--deviceIP', action='store', - type=str, dest='device_ip', - help='IP address of remote device to test') - op.add_argument('--devicePort', action='store', - type=int, dest='device_port', default=20701, - help='port of remote device to test') - op.add_argument('--deviceSerial', action='store', - type=str, dest='device_serial', default=None, - help='ADB device serial number of remote device to test') - op.add_argument('--remoteTestRoot', dest='remote_test_root', action='store', - type=str, default='/data/local/tmp/test_root', - help='The remote directory to use as test root' - ' (e.g. %(default)s)') - op.add_argument('--localLib', dest='local_lib', action='store', - type=str, - help='The location of libraries to push -- preferably' - ' stripped') - op.add_argument('--repeat', type=int, default=1, - help='Repeat tests the given number of times.') - op.add_argument('--this-chunk', type=int, default=1, - help='The test chunk to run.') - op.add_argument('--total-chunks', type=int, default=1, - help='The total number of test chunks.') - op.add_argument('--ignore-timeouts', dest='ignore_timeouts', metavar='FILE', - help='Ignore timeouts of tests listed in [FILE]') - op.add_argument('--test-reflect-stringify', dest="test_reflect_stringify", - help="instead of running tests, use them to test the " - "Reflect.stringify code in specified file") + + op = argparse.ArgumentParser(description="Run jit-test JS shell tests") + op.add_argument( + "-s", + "--show-cmd", + dest="show_cmd", + action="store_true", + help="show js shell command run", + ) + op.add_argument( + "-f", + "--show-failed-cmd", + dest="show_failed", + action="store_true", + help="show command lines of failed tests", + ) + op.add_argument( + "-o", + "--show-output", + dest="show_output", + action="store_true", + help="show output from js shell", + ) + op.add_argument( + "-F", + "--failed-only", + dest="failed_only", + action="store_true", + help="if --show-output is given, only print output for" " failed tests", + ) + op.add_argument( + "--no-show-failed", + dest="no_show_failed", + action="store_true", + help="don't print output for failed tests" " (no-op with --show-output)", + ) + op.add_argument( + "-x", + "--exclude", + dest="exclude", + default=[], + action="append", + help="exclude given test dir or path", + ) + op.add_argument( + "--exclude-from", + dest="exclude_from", + type=str, + help="exclude each test dir or path in FILE", + ) + op.add_argument( + "--slow", + dest="run_slow", + action="store_true", + help="also run tests marked as slow", + ) + op.add_argument( + "--no-slow", + dest="run_slow", + action="store_false", + help="do not run tests marked as slow (the default)", + ) + op.add_argument( + "-t", + "--timeout", + dest="timeout", + type=float, + default=150.0, + help="set test timeout in seconds", + ) + op.add_argument( + "--no-progress", + dest="hide_progress", + action="store_true", + help="hide progress bar", + ) + op.add_argument( + "--tinderbox", + dest="format", + action="store_const", + const="automation", + help="Use automation-parseable output format", + ) + op.add_argument( + "--format", + dest="format", + default="none", + choices=("automation", "none"), + help="Output format (default %(default)s).", + ) + op.add_argument( + "--args", + dest="shell_args", + metavar="ARGS", + default="", + help="extra args to pass to the JS shell", + ) + op.add_argument( + "--feature-args", + dest="feature_args", + metavar="ARGS", + default="", + help="even more args to pass to the JS shell " + "(for compatibility with jstests.py)", + ) + op.add_argument( + "-w", + "--write-failures", + dest="write_failures", + metavar="FILE", + help="Write a list of failed tests to [FILE]", + ) + op.add_argument( + "-C", + "--check-output", + action="store_true", + dest="check_output", + help="Run tests to check output for different jit-flags", + ) + op.add_argument( + "-r", + "--read-tests", + dest="read_tests", + metavar="FILE", + help="Run test files listed in [FILE]", + ) + op.add_argument( + "-R", + "--retest", + dest="retest", + metavar="FILE", + help="Retest using test list file [FILE]", + ) + op.add_argument( + "-g", + "--debug", + action="store_const", + const="gdb", + dest="debugger", + help="Run a single test under the gdb debugger", + ) + op.add_argument( + "-G", + "--debug-rr", + action="store_const", + const="rr", + dest="debugger", + help="Run a single test under the rr debugger", + ) + op.add_argument( + "--debugger", type=str, help="Run a single test under the specified debugger" + ) + op.add_argument( + "--valgrind", + dest="valgrind", + action="store_true", + help="Enable the |valgrind| flag, if valgrind is in $PATH.", + ) + op.add_argument( + "--unusable-error-status", + action="store_true", + help="Ignore incorrect exit status on tests that should return nonzero.", + ) + op.add_argument( + "--valgrind-all", + dest="valgrind_all", + action="store_true", + help="Run all tests with valgrind, if valgrind is in $PATH.", + ) + op.add_argument( + "--avoid-stdio", + dest="avoid_stdio", + action="store_true", + help="Use js-shell file indirection instead of piping stdio.", + ) + op.add_argument( + "--write-failure-output", + dest="write_failure_output", + action="store_true", + help="With --write-failures=FILE, additionally write the" + " output of failed tests to [FILE]", + ) + op.add_argument( + "--jitflags", + dest="jitflags", + default="none", + choices=valid_jitflags(), + help="IonMonkey option combinations (default %(default)s).", + ) + op.add_argument( + "--ion", + dest="jitflags", + action="store_const", + const="ion", + help="Run tests once with --ion-eager and once with" + " --baseline-eager (equivalent to --jitflags=ion)", + ) + op.add_argument( + "--tbpl", + dest="jitflags", + action="store_const", + const="all", + help="Run tests with all IonMonkey option combinations" + " (equivalent to --jitflags=all)", + ) + op.add_argument( + "-j", + "--worker-count", + dest="max_jobs", + type=int, + default=max(1, get_cpu_count()), + help="Number of tests to run in parallel (default %(default)s).", + ) + op.add_argument( + "--remote", action="store_true", help="Run tests on a remote device" + ) + op.add_argument( + "--deviceIP", + action="store", + type=str, + dest="device_ip", + help="IP address of remote device to test", + ) + op.add_argument( + "--devicePort", + action="store", + type=int, + dest="device_port", + default=20701, + help="port of remote device to test", + ) + op.add_argument( + "--deviceSerial", + action="store", + type=str, + dest="device_serial", + default=None, + help="ADB device serial number of remote device to test", + ) + op.add_argument( + "--remoteTestRoot", + dest="remote_test_root", + action="store", + type=str, + default="/data/local/tmp/test_root", + help="The remote directory to use as test root" " (e.g. %(default)s)", + ) + op.add_argument( + "--localLib", + dest="local_lib", + action="store", + type=str, + help="The location of libraries to push -- preferably" " stripped", + ) + op.add_argument( + "--repeat", type=int, default=1, help="Repeat tests the given number of times." + ) + op.add_argument("--this-chunk", type=int, default=1, help="The test chunk to run.") + op.add_argument( + "--total-chunks", type=int, default=1, help="The total number of test chunks." + ) + op.add_argument( + "--ignore-timeouts", + dest="ignore_timeouts", + metavar="FILE", + help="Ignore timeouts of tests listed in [FILE]", + ) + op.add_argument( + "--test-reflect-stringify", + dest="test_reflect_stringify", + help="instead of running tests, use them to test the " + "Reflect.stringify code in specified file", + ) # --enable-webrender is ignored as it is not relevant for JIT # tests, but is required for harness compatibility. - op.add_argument('--enable-webrender', action='store_true', - dest="enable_webrender", default=False, - help=argparse.SUPPRESS) - op.add_argument('js_shell', metavar='JS_SHELL', help='JS shell to run tests with') + op.add_argument( + "--enable-webrender", + action="store_true", + dest="enable_webrender", + default=False, + help=argparse.SUPPRESS, + ) + op.add_argument("js_shell", metavar="JS_SHELL", help="JS shell to run tests with") options, test_args = op.parse_known_args(argv) js_shell = which(options.js_shell) test_environment = get_environment_overlay(js_shell) if not (os.path.isfile(js_shell) and os.access(js_shell, os.X_OK)): - if (platform.system() != 'Windows' or - os.path.isfile(js_shell) or not - os.path.isfile(js_shell + ".exe") or not - os.access(js_shell + ".exe", os.X_OK)): - op.error('shell is not executable: ' + js_shell) + if ( + platform.system() != "Windows" + or os.path.isfile(js_shell) + or not os.path.isfile(js_shell + ".exe") + or not os.access(js_shell + ".exe", os.X_OK) + ): + op.error("shell is not executable: " + js_shell) if jittests.stdio_might_be_broken(): # Prefer erring on the side of caution and not using stdio if @@ -232,17 +401,18 @@ def main(argv): try: f = open(options.read_tests) for line in f: - test_list.append(os.path.join(jittests.TEST_DIR, - line.strip('\n'))) + test_list.append(os.path.join(jittests.TEST_DIR, line.strip("\n"))) f.close() except IOError: if options.retest: read_all = True else: - sys.stderr.write("Exception thrown trying to read test file" - " '{}'\n".format(options.read_tests)) + sys.stderr.write( + "Exception thrown trying to read test file" + " '{}'\n".format(options.read_tests) + ) traceback.print_exc() - sys.stderr.write('---\n') + sys.stderr.write("---\n") if read_all: test_list = jittests.find_tests() @@ -258,12 +428,10 @@ def main(argv): exclude_list = [] for exclude in options.exclude: exclude_list += jittests.find_tests(exclude) - test_list = [test for test in test_list - if test not in set(exclude_list)] + test_list = [test for test in test_list if test not in set(exclude_list)] if not test_list: - print("No tests found matching command line arguments.", - file=sys.stderr) + print("No tests found matching command line arguments.", file=sys.stderr) sys.exit(0) test_list = [jittests.JitTest.from_file(_, options) for _ in test_list] @@ -285,8 +453,10 @@ def main(argv): test_list = test_list[start:end] if not test_list: - print("No tests found matching command line arguments after filtering.", - file=sys.stderr) + print( + "No tests found matching command line arguments after filtering.", + file=sys.stderr, + ) sys.exit(0) # The full test list is ready. Now create copies for each JIT configuration. @@ -307,7 +477,7 @@ def main(argv): with open(options.ignore_timeouts) as f: ignore = set() for line in f.readlines(): - path = line.strip('\n') + path = line.strip("\n") ignore.add(path) options.ignore_timeouts = ignore except IOError: @@ -315,25 +485,30 @@ def main(argv): else: options.ignore_timeouts = set() - prefix = [js_shell] + shlex.split(options.shell_args) + shlex.split(options.feature_args) - prologue = os.path.join(jittests.LIB_DIR, 'prologue.js') + prefix = ( + [js_shell] + shlex.split(options.shell_args) + shlex.split(options.feature_args) + ) + prologue = os.path.join(jittests.LIB_DIR, "prologue.js") if options.remote: - prologue = posixpath.join(options.remote_test_root, - 'tests', 'tests', 'lib', 'prologue.js') + prologue = posixpath.join( + options.remote_test_root, "tests", "tests", "lib", "prologue.js" + ) - prefix += ['-f', prologue] + prefix += ["-f", prologue] if options.debugger: if job_count > 1: - print('Multiple tests match command line' - ' arguments, debugger can only run one') + print( + "Multiple tests match command line" + " arguments, debugger can only run one" + ) jobs = list(job_list) def display_job(job): flags = "" if len(job.jitflags) != 0: - flags = "({})".format(' '.join(job.jitflags)) - return '{} {}'.format(job.path, flags) + flags = "({})".format(" ".join(job.jitflags)) + return "{} {}".format(job.path, flags) try: tc = choose_item(jobs, max_items=50, display=display_job) @@ -342,23 +517,28 @@ def display_job(job): else: tc = next(job_list) - if options.debugger == 'gdb': - debug_cmd = ['gdb', '--args'] - elif options.debugger == 'lldb': - debug_cmd = ['lldb', '--'] - elif options.debugger == 'rr': - debug_cmd = ['rr', 'record'] + if options.debugger == "gdb": + debug_cmd = ["gdb", "--args"] + elif options.debugger == "lldb": + debug_cmd = ["lldb", "--"] + elif options.debugger == "rr": + debug_cmd = ["rr", "record"] else: debug_cmd = options.debugger.split() with change_env(test_environment): - if options.debugger == 'rr': - subprocess.call(debug_cmd + - tc.command(prefix, jittests.LIB_DIR, jittests.MODULE_DIR)) - os.execvp('rr', ['rr', 'replay']) + if options.debugger == "rr": + subprocess.call( + debug_cmd + + tc.command(prefix, jittests.LIB_DIR, jittests.MODULE_DIR) + ) + os.execvp("rr", ["rr", "replay"]) else: - os.execvp(debug_cmd[0], debug_cmd + - tc.command(prefix, jittests.LIB_DIR, jittests.MODULE_DIR)) + os.execvp( + debug_cmd[0], + debug_cmd + + tc.command(prefix, jittests.LIB_DIR, jittests.MODULE_DIR), + ) sys.exit() try: @@ -372,12 +552,14 @@ def display_job(job): sys.exit(2) except OSError: if not os.path.exists(prefix[0]): - print("JS shell argument: file does not exist:" - " '{}'".format(prefix[0]), file=sys.stderr) + print( + "JS shell argument: file does not exist:" " '{}'".format(prefix[0]), + file=sys.stderr, + ) sys.exit(1) else: raise -if __name__ == '__main__': +if __name__ == "__main__": main(sys.argv[1:]) diff --git a/js/src/jit/GenerateCacheIRFiles.py b/js/src/jit/GenerateCacheIRFiles.py index 1ce56286ca07b3..c500641a7f3e94 100644 --- a/js/src/jit/GenerateCacheIRFiles.py +++ b/js/src/jit/GenerateCacheIRFiles.py @@ -27,19 +27,22 @@ def generate_header(c_out, includeguard, contents): - c_out.write(HEADER_TEMPLATE % { - 'includeguard': includeguard, - 'contents': contents, - }) + c_out.write( + HEADER_TEMPLATE + % { + "includeguard": includeguard, + "contents": contents, + } + ) def load_yaml(yaml_path): # First invoke preprocessor.py so that we can use #ifdef JS_SIMULATOR in # the YAML file. pp = Preprocessor() - pp.context.update(buildconfig.defines['ALLDEFINES']) + pp.context.update(buildconfig.defines["ALLDEFINES"]) pp.out = six.StringIO() - pp.do_filter('substitution') + pp.do_filter("substitution") pp.do_include(yaml_path) contents = pp.out.getvalue() @@ -52,6 +55,7 @@ class OrderedLoader(yaml.Loader): def construct_mapping(loader, node): loader.flatten_mapping(node) return OrderedDict(loader.construct_pairs(node)) + tag = yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG OrderedLoader.add_constructor(tag, construct_mapping) return yaml.load(contents, OrderedLoader) @@ -60,45 +64,43 @@ def construct_mapping(loader, node): # Information for generating CacheIRWriter code for a single argument. Tuple # stores the C++ argument type and the CacheIRWriter method to call. arg_writer_info = { - 'ValId': ('ValOperandId', 'writeOperandId'), - 'ObjId': ('ObjOperandId', 'writeOperandId'), - 'StringId': ('StringOperandId', 'writeOperandId'), - 'SymbolId': ('SymbolOperandId', 'writeOperandId'), - 'BooleanId': ('BooleanOperandId', 'writeOperandId'), - 'Int32Id': ('Int32OperandId', 'writeOperandId'), - 'NumberId': ('NumberOperandId', 'writeOperandId'), - 'BigIntId': ('BigIntOperandId', 'writeOperandId'), - 'ValueTagId': ('ValueTagOperandId', 'writeOperandId'), - 'RawId': ('OperandId', 'writeOperandId'), - - 'ShapeField': ('Shape*', 'writeShapeField'), - 'GroupField': ('ObjectGroup*', 'writeGroupField'), - 'ObjectField': ('JSObject*', 'writeObjectField'), - 'StringField': ('JSString*', 'writeStringField'), - 'AtomField': ('JSAtom*', 'writeStringField'), - 'PropertyNameField': ('PropertyName*', 'writeStringField'), - 'SymbolField': ('JS::Symbol*', 'writeSymbolField'), - 'BaseScriptField': ('BaseScript*', 'writeBaseScriptField'), - 'RawInt32Field': ('uint32_t', 'writeRawInt32Field'), - 'RawPointerField': ('const void*', 'writeRawPointerField'), - 'IdField': ('jsid', 'writeIdField'), - 'ValueField': ('const Value&', 'writeValueField'), - 'RawInt64Field': ('uint64_t', 'writeRawInt64Field'), - - 'JSOpImm': ('JSOp', 'writeJSOpImm'), - 'BoolImm': ('bool', 'writeBoolImm'), - 'ByteImm': ('uint32_t', 'writeByteImm'), # uint32_t to enable fits-in-byte asserts. - 'GuardClassKindImm': ('GuardClassKind', 'writeGuardClassKindImm'), - 'ValueTypeImm': ('ValueType', 'writeValueTypeImm'), - 'JSWhyMagicImm': ('JSWhyMagic', 'writeJSWhyMagicImm'), - 'CallFlagsImm': ('CallFlags', 'writeCallFlagsImm'), - 'ScalarTypeImm': ('Scalar::Type', 'writeScalarTypeImm'), - 'MetaTwoByteKindImm': ('MetaTwoByteKind', 'writeMetaTwoByteKindImm'), - 'UnaryMathFunctionImm': ('UnaryMathFunction', 'writeUnaryMathFunctionImm'), - 'Int32Imm': ('int32_t', 'writeInt32Imm'), - 'UInt32Imm': ('uint32_t', 'writeUInt32Imm'), - 'JSNativeImm': ('JSNative', 'writeJSNativeImm'), - 'StaticStringImm': ('const char*', 'writeStaticStringImm'), + "ValId": ("ValOperandId", "writeOperandId"), + "ObjId": ("ObjOperandId", "writeOperandId"), + "StringId": ("StringOperandId", "writeOperandId"), + "SymbolId": ("SymbolOperandId", "writeOperandId"), + "BooleanId": ("BooleanOperandId", "writeOperandId"), + "Int32Id": ("Int32OperandId", "writeOperandId"), + "NumberId": ("NumberOperandId", "writeOperandId"), + "BigIntId": ("BigIntOperandId", "writeOperandId"), + "ValueTagId": ("ValueTagOperandId", "writeOperandId"), + "RawId": ("OperandId", "writeOperandId"), + "ShapeField": ("Shape*", "writeShapeField"), + "GroupField": ("ObjectGroup*", "writeGroupField"), + "ObjectField": ("JSObject*", "writeObjectField"), + "StringField": ("JSString*", "writeStringField"), + "AtomField": ("JSAtom*", "writeStringField"), + "PropertyNameField": ("PropertyName*", "writeStringField"), + "SymbolField": ("JS::Symbol*", "writeSymbolField"), + "BaseScriptField": ("BaseScript*", "writeBaseScriptField"), + "RawInt32Field": ("uint32_t", "writeRawInt32Field"), + "RawPointerField": ("const void*", "writeRawPointerField"), + "IdField": ("jsid", "writeIdField"), + "ValueField": ("const Value&", "writeValueField"), + "RawInt64Field": ("uint64_t", "writeRawInt64Field"), + "JSOpImm": ("JSOp", "writeJSOpImm"), + "BoolImm": ("bool", "writeBoolImm"), + "ByteImm": ("uint32_t", "writeByteImm"), # uint32_t to enable fits-in-byte asserts. + "GuardClassKindImm": ("GuardClassKind", "writeGuardClassKindImm"), + "ValueTypeImm": ("ValueType", "writeValueTypeImm"), + "JSWhyMagicImm": ("JSWhyMagic", "writeJSWhyMagicImm"), + "CallFlagsImm": ("CallFlags", "writeCallFlagsImm"), + "ScalarTypeImm": ("Scalar::Type", "writeScalarTypeImm"), + "MetaTwoByteKindImm": ("MetaTwoByteKind", "writeMetaTwoByteKindImm"), + "UnaryMathFunctionImm": ("UnaryMathFunction", "writeUnaryMathFunctionImm"), + "Int32Imm": ("int32_t", "writeInt32Imm"), + "UInt32Imm": ("uint32_t", "writeUInt32Imm"), + "JSNativeImm": ("JSNative", "writeJSNativeImm"), + "StaticStringImm": ("const char*", "writeStaticStringImm"), } @@ -121,34 +123,34 @@ def gen_writer_method(name, args, custom_writer): # Method names start with a lowercase letter. method_name = name[0].lower() + name[1:] if custom_writer: - method_name += '_' + method_name += "_" method_args = [] - ret_type = 'void' - args_code = '' + ret_type = "void" + args_code = "" if args: for arg_name, arg_type in six.iteritems(args): cpp_type, write_method = arg_writer_info[arg_type] - if arg_name == 'result': + if arg_name == "result": ret_type = cpp_type - args_code += ' {} result(newOperandId());\\\n'.format(cpp_type) - args_code += ' writeOperandId(result);\\\n' + args_code += " {} result(newOperandId());\\\n".format(cpp_type) + args_code += " writeOperandId(result);\\\n" else: - method_args.append('{} {}'.format(cpp_type, arg_name)) - args_code += ' {}({});\\\n'.format(write_method, arg_name) + method_args.append("{} {}".format(cpp_type, arg_name)) + args_code += " {}({});\\\n".format(write_method, arg_name) - code = '' + code = "" if custom_writer: - code += 'private:\\\n' - code += '{} {}({}) {{\\\n'.format(ret_type, method_name, ', '.join(method_args)) - code += ' writeOp(CacheOp::{});\\\n'.format(name) + code += "private:\\\n" + code += "{} {}({}) {{\\\n".format(ret_type, method_name, ", ".join(method_args)) + code += " writeOp(CacheOp::{});\\\n".format(name) code += args_code - code += ' assertLengthMatches();\\\n' - if ret_type != 'void': - code += ' return result;\\\n' - code += '}' + code += " assertLengthMatches();\\\n" + if ret_type != "void": + code += " return result;\\\n" + code += "}" if custom_writer: - code += '\\\npublic:' + code += "\\\npublic:" return code @@ -156,45 +158,43 @@ def gen_writer_method(name, args, custom_writer): # Tuple stores the C++ type, the suffix used for arguments/variables of this # type, and the expression to read this type from CacheIRReader. arg_reader_info = { - 'ValId': ('ValOperandId', 'Id', 'reader.valOperandId()'), - 'ObjId': ('ObjOperandId', 'Id', 'reader.objOperandId()'), - 'StringId': ('StringOperandId', 'Id', 'reader.stringOperandId()'), - 'SymbolId': ('SymbolOperandId', 'Id', 'reader.symbolOperandId()'), - 'BooleanId': ('BooleanOperandId', 'Id', 'reader.booleanOperandId()'), - 'Int32Id': ('Int32OperandId', 'Id', 'reader.int32OperandId()'), - 'NumberId': ('NumberOperandId', 'Id', 'reader.numberOperandId()'), - 'BigIntId': ('BigIntOperandId', 'Id', 'reader.bigIntOperandId()'), - 'ValueTagId': ('ValueTagOperandId', 'Id', 'reader.valueTagOperandId()'), - 'RawId': ('uint32_t', 'Id', 'reader.rawOperandId()'), - - 'ShapeField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'GroupField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'ObjectField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'StringField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'AtomField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'PropertyNameField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'SymbolField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'BaseScriptField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'RawInt32Field': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'RawPointerField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'IdField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'ValueField': ('uint32_t', 'Offset', 'reader.stubOffset()'), - 'RawInt64Field': ('uint32_t', 'Offset', 'reader.stubOffset()'), - - 'JSOpImm': ('JSOp', '', 'reader.jsop()'), - 'BoolImm': ('bool', '', 'reader.readBool()'), - 'ByteImm': ('uint8_t', '', 'reader.readByte()'), - 'GuardClassKindImm': ('GuardClassKind', '', 'reader.guardClassKind()'), - 'ValueTypeImm': ('ValueType', '', 'reader.valueType()'), - 'JSWhyMagicImm': ('JSWhyMagic', '', 'reader.whyMagic()'), - 'CallFlagsImm': ('CallFlags', '', 'reader.callFlags()'), - 'ScalarTypeImm': ('Scalar::Type', '', 'reader.scalarType()'), - 'MetaTwoByteKindImm': ('MetaTwoByteKind', '', 'reader.metaKind()'), - 'UnaryMathFunctionImm': ('UnaryMathFunction', '', 'reader.unaryMathFunction()'), - 'Int32Imm': ('int32_t', '', 'reader.int32Immediate()'), - 'UInt32Imm': ('uint32_t', '', 'reader.uint32Immediate()'), - 'JSNativeImm': ('JSNative', '', 'reinterpret_cast(reader.pointer())'), - 'StaticStringImm': ('const char*', '', 'reinterpret_cast(reader.pointer())'), + "ValId": ("ValOperandId", "Id", "reader.valOperandId()"), + "ObjId": ("ObjOperandId", "Id", "reader.objOperandId()"), + "StringId": ("StringOperandId", "Id", "reader.stringOperandId()"), + "SymbolId": ("SymbolOperandId", "Id", "reader.symbolOperandId()"), + "BooleanId": ("BooleanOperandId", "Id", "reader.booleanOperandId()"), + "Int32Id": ("Int32OperandId", "Id", "reader.int32OperandId()"), + "NumberId": ("NumberOperandId", "Id", "reader.numberOperandId()"), + "BigIntId": ("BigIntOperandId", "Id", "reader.bigIntOperandId()"), + "ValueTagId": ("ValueTagOperandId", "Id", "reader.valueTagOperandId()"), + "RawId": ("uint32_t", "Id", "reader.rawOperandId()"), + "ShapeField": ("uint32_t", "Offset", "reader.stubOffset()"), + "GroupField": ("uint32_t", "Offset", "reader.stubOffset()"), + "ObjectField": ("uint32_t", "Offset", "reader.stubOffset()"), + "StringField": ("uint32_t", "Offset", "reader.stubOffset()"), + "AtomField": ("uint32_t", "Offset", "reader.stubOffset()"), + "PropertyNameField": ("uint32_t", "Offset", "reader.stubOffset()"), + "SymbolField": ("uint32_t", "Offset", "reader.stubOffset()"), + "BaseScriptField": ("uint32_t", "Offset", "reader.stubOffset()"), + "RawInt32Field": ("uint32_t", "Offset", "reader.stubOffset()"), + "RawPointerField": ("uint32_t", "Offset", "reader.stubOffset()"), + "IdField": ("uint32_t", "Offset", "reader.stubOffset()"), + "ValueField": ("uint32_t", "Offset", "reader.stubOffset()"), + "RawInt64Field": ("uint32_t", "Offset", "reader.stubOffset()"), + "JSOpImm": ("JSOp", "", "reader.jsop()"), + "BoolImm": ("bool", "", "reader.readBool()"), + "ByteImm": ("uint8_t", "", "reader.readByte()"), + "GuardClassKindImm": ("GuardClassKind", "", "reader.guardClassKind()"), + "ValueTypeImm": ("ValueType", "", "reader.valueType()"), + "JSWhyMagicImm": ("JSWhyMagic", "", "reader.whyMagic()"), + "CallFlagsImm": ("CallFlags", "", "reader.callFlags()"), + "ScalarTypeImm": ("Scalar::Type", "", "reader.scalarType()"), + "MetaTwoByteKindImm": ("MetaTwoByteKind", "", "reader.metaKind()"), + "UnaryMathFunctionImm": ("UnaryMathFunction", "", "reader.unaryMathFunction()"), + "Int32Imm": ("int32_t", "", "reader.int32Immediate()"), + "UInt32Imm": ("uint32_t", "", "reader.uint32Immediate()"), + "JSNativeImm": ("JSNative", "", "reinterpret_cast(reader.pointer())"), + "StaticStringImm": ("const char*", "", "reinterpret_cast(reader.pointer())"), } @@ -202,7 +202,7 @@ def gen_compiler_method(name, args): """Generates CacheIRCompiler or WarpCacheIRTranspiler header code for a single opcode.""" - method_name = 'emit' + name + method_name = "emit" + name # We generate the signature of the method that needs to be implemented and a # separate function forwarding to it. For example: @@ -215,75 +215,73 @@ def gen_compiler_method(name, args): # } cpp_args = [] method_args = [] - args_code = '' + args_code = "" if args: for arg_name, arg_type in six.iteritems(args): cpp_type, suffix, readexpr = arg_reader_info[arg_type] cpp_name = arg_name + suffix cpp_args.append(cpp_name) - method_args.append('{} {}'.format(cpp_type, cpp_name)) - args_code += ' {} {} = {};\\\n'.format(cpp_type, cpp_name, readexpr) + method_args.append("{} {}".format(cpp_type, cpp_name)) + args_code += " {} {} = {};\\\n".format(cpp_type, cpp_name, readexpr) # Generate signature. - code = 'MOZ_MUST_USE bool {}({});\\\n'.format(method_name, ', '.join(method_args)) + code = "MOZ_MUST_USE bool {}({});\\\n".format(method_name, ", ".join(method_args)) # Generate the method forwarding to it. - code += 'MOZ_MUST_USE bool {}(CacheIRReader& reader) {{\\\n'.format(method_name) + code += "MOZ_MUST_USE bool {}(CacheIRReader& reader) {{\\\n".format(method_name) code += args_code - code += ' return {}({});\\\n'.format(method_name, ', '.join(cpp_args)) - code += '}\\\n' + code += " return {}({});\\\n".format(method_name, ", ".join(cpp_args)) + code += "}\\\n" return code # For each argument type, the method name for printing it. arg_spewer_method = { - 'ValId': 'spewOperandId', - 'ObjId': 'spewOperandId', - 'StringId': 'spewOperandId', - 'SymbolId': 'spewOperandId', - 'BooleanId': 'spewOperandId', - 'Int32Id': 'spewOperandId', - 'NumberId': 'spewOperandId', - 'BigIntId': 'spewOperandId', - 'ValueTagId': 'spewOperandId', - 'RawId': 'spewRawOperandId', - - 'ShapeField': 'spewField', - 'GroupField': 'spewField', - 'ObjectField': 'spewField', - 'StringField': 'spewField', - 'AtomField': 'spewField', - 'PropertyNameField': 'spewField', - 'SymbolField': 'spewField', - 'BaseScriptField': 'spewField', - 'RawInt32Field': 'spewField', - 'RawPointerField': 'spewField', - 'IdField': 'spewField', - 'ValueField': 'spewField', - 'RawInt64Field': 'spewField', - - 'JSOpImm': 'spewJSOpImm', - 'BoolImm': 'spewBoolImm', - 'ByteImm': 'spewByteImm', - 'GuardClassKindImm': 'spewGuardClassKindImm', - 'ValueTypeImm': 'spewValueTypeImm', - 'JSWhyMagicImm': 'spewJSWhyMagicImm', - 'CallFlagsImm': 'spewCallFlagsImm', - 'ScalarTypeImm': 'spewScalarTypeImm', - 'MetaTwoByteKindImm': 'spewMetaTwoByteKindImm', - 'UnaryMathFunctionImm': 'spewUnaryMathFunctionImm', - 'Int32Imm': 'spewInt32Imm', - 'UInt32Imm': 'spewUInt32Imm', - 'JSNativeImm': 'spewJSNativeImm', - 'StaticStringImm': 'spewStaticStringImm', + "ValId": "spewOperandId", + "ObjId": "spewOperandId", + "StringId": "spewOperandId", + "SymbolId": "spewOperandId", + "BooleanId": "spewOperandId", + "Int32Id": "spewOperandId", + "NumberId": "spewOperandId", + "BigIntId": "spewOperandId", + "ValueTagId": "spewOperandId", + "RawId": "spewRawOperandId", + "ShapeField": "spewField", + "GroupField": "spewField", + "ObjectField": "spewField", + "StringField": "spewField", + "AtomField": "spewField", + "PropertyNameField": "spewField", + "SymbolField": "spewField", + "BaseScriptField": "spewField", + "RawInt32Field": "spewField", + "RawPointerField": "spewField", + "IdField": "spewField", + "ValueField": "spewField", + "RawInt64Field": "spewField", + "JSOpImm": "spewJSOpImm", + "BoolImm": "spewBoolImm", + "ByteImm": "spewByteImm", + "GuardClassKindImm": "spewGuardClassKindImm", + "ValueTypeImm": "spewValueTypeImm", + "JSWhyMagicImm": "spewJSWhyMagicImm", + "CallFlagsImm": "spewCallFlagsImm", + "ScalarTypeImm": "spewScalarTypeImm", + "MetaTwoByteKindImm": "spewMetaTwoByteKindImm", + "UnaryMathFunctionImm": "spewUnaryMathFunctionImm", + "Int32Imm": "spewInt32Imm", + "UInt32Imm": "spewUInt32Imm", + "JSNativeImm": "spewJSNativeImm", + "StaticStringImm": "spewStaticStringImm", } def gen_spewer_method(name, args): """Generates spewer code for a single opcode.""" - method_name = 'spew' + name + method_name = "spew" + name # Generate code like this: # @@ -294,7 +292,7 @@ def gen_spewer_method(name, args): # spewField("shapeOffset", reader.stubOffset()); # spewOpEnd(); # } - args_code = '' + args_code = "" if args: is_first = True for arg_name, arg_type in six.iteritems(args): @@ -302,15 +300,15 @@ def gen_spewer_method(name, args): arg_name += suffix spew_method = arg_spewer_method[arg_type] if not is_first: - args_code += ' spewArgSeparator();\\\n' + args_code += " spewArgSeparator();\\\n" args_code += ' {}("{}", {});\\\n'.format(spew_method, arg_name, readexpr) is_first = False - code = 'void {}(CacheIRReader& reader) {{\\\n'.format(method_name) - code += ' spewOp(CacheOp::{});\\\n'.format(name) + code = "void {}(CacheIRReader& reader) {{\\\n".format(method_name) + code += " spewOp(CacheOp::{});\\\n".format(name) code += args_code - code += ' spewOpEnd();\\\n' - code += '}\\\n' + code += " spewOpEnd();\\\n" + code += "}\\\n" return code @@ -318,7 +316,7 @@ def gen_spewer_method(name, args): def gen_clone_method(name, args): """Generates code for cloning a single opcode.""" - method_name = 'clone' + name + method_name = "clone" + name # Generate code like this: # @@ -332,36 +330,37 @@ def gen_clone_method(name, args): # writer.assertLengthMatches(); # } - args_code = '' + args_code = "" if args: for arg_name, arg_type in six.iteritems(args): - if arg_type == 'RawId': - arg_type = 'ValId' + if arg_type == "RawId": + arg_type = "ValId" read_type, suffix, readexpr = arg_reader_info[arg_type] read_name = arg_name + suffix value_name = read_name - args_code += ' {} {} = {};\\\n'.format(read_type, read_name, readexpr) + args_code += " {} {} = {};\\\n".format(read_type, read_name, readexpr) write_type, write_method = arg_writer_info[arg_type] - if arg_name == 'result': - args_code += ' writer.newOperandId();\\\n' - if suffix == 'Offset': + if arg_name == "result": + args_code += " writer.newOperandId();\\\n" + if suffix == "Offset": # If the write function takes T&, the intermediate variable # should be of type T. - if write_type.endswith('&'): + if write_type.endswith("&"): write_type = write_type[:-1] value_name = arg_name - args_code += ' {} {} = get{}({});\\\n'.format(write_type, value_name, - arg_type, read_name) - args_code += ' writer.{}({});\\\n'.format(write_method, value_name) - - code = 'void {}'.format(method_name) - code += '(CacheIRReader& reader, CacheIRWriter& writer) {{\\\n' - code += ' writer.writeOp(CacheOp::{});\\\n'.format(name) + args_code += " {} {} = get{}({});\\\n".format( + write_type, value_name, arg_type, read_name + ) + args_code += " writer.{}({});\\\n".format(write_method, value_name) + + code = "void {}".format(method_name) + code += "(CacheIRReader& reader, CacheIRWriter& writer) {{\\\n" + code += " writer.writeOp(CacheOp::{});\\\n".format(name) code += args_code - code += ' writer.assertLengthMatches();\\\n' - code += '}}\\\n' + code += " writer.assertLengthMatches();\\\n" + code += "}}\\\n" return code @@ -370,47 +369,43 @@ def gen_clone_method(name, args): # This is used to generate the CacheIROpArgLengths array. CacheIRWriter asserts # the number of bytes written matches the value in that array. arg_length = { - 'ValId': 1, - 'ObjId': 1, - 'StringId': 1, - 'SymbolId': 1, - 'BooleanId': 1, - 'Int32Id': 1, - 'NumberId': 1, - 'BigIntId': 1, - 'ValueTagId': 1, - 'RawId': 1, - - 'ShapeField': 1, - 'GroupField': 1, - 'ObjectField': 1, - 'StringField': 1, - 'AtomField': 1, - 'PropertyNameField': 1, - 'SymbolField': 1, - 'BaseScriptField': 1, - 'RawInt32Field': 1, - 'RawPointerField': 1, - 'RawInt64Field': 1, - 'IdField': 1, - 'ValueField': 1, - - 'ByteImm': 1, - 'BoolImm': 1, - 'CallFlagsImm': 1, - 'ScalarTypeImm': 1, - 'MetaTwoByteKindImm': 1, - 'UnaryMathFunctionImm': 1, - 'JSOpImm': 1, - 'ValueTypeImm': 1, - 'GuardClassKindImm': 1, - 'JSWhyMagicImm': 1, - - 'Int32Imm': 4, - 'UInt32Imm': 4, - - 'JSNativeImm': 'sizeof(uintptr_t)', - 'StaticStringImm': 'sizeof(uintptr_t)', + "ValId": 1, + "ObjId": 1, + "StringId": 1, + "SymbolId": 1, + "BooleanId": 1, + "Int32Id": 1, + "NumberId": 1, + "BigIntId": 1, + "ValueTagId": 1, + "RawId": 1, + "ShapeField": 1, + "GroupField": 1, + "ObjectField": 1, + "StringField": 1, + "AtomField": 1, + "PropertyNameField": 1, + "SymbolField": 1, + "BaseScriptField": 1, + "RawInt32Field": 1, + "RawPointerField": 1, + "RawInt64Field": 1, + "IdField": 1, + "ValueField": 1, + "ByteImm": 1, + "BoolImm": 1, + "CallFlagsImm": 1, + "ScalarTypeImm": 1, + "MetaTwoByteKindImm": 1, + "UnaryMathFunctionImm": 1, + "JSOpImm": 1, + "ValueTypeImm": 1, + "GuardClassKindImm": 1, + "JSWhyMagicImm": 1, + "Int32Imm": 4, + "UInt32Imm": 4, + "JSNativeImm": "sizeof(uintptr_t)", + "StaticStringImm": "sizeof(uintptr_t)", } @@ -445,32 +440,33 @@ def generate_cacheirops_header(c_out, yaml_path): clone_methods = [] for op in data: - name = op['name'] + name = op["name"] - args = op['args'] + args = op["args"] assert args is None or isinstance(args, OrderedDict) - shared = op['shared'] + shared = op["shared"] assert isinstance(shared, bool) - transpile = op['transpile'] + transpile = op["transpile"] assert isinstance(transpile, bool) # Unscored Ops default to UINT32_MAX - cost_estimate = op.get('cost_estimate', int(0xffffffff)) + cost_estimate = op.get("cost_estimate", int(0xFFFFFFFF)) assert isinstance(cost_estimate, int) - custom_writer = op.get('custom_writer', False) + custom_writer = op.get("custom_writer", False) assert isinstance(custom_writer, bool) if args: - args_length = ' + '.join([str(arg_length[v]) for v in args.values()]) + args_length = " + ".join([str(arg_length[v]) for v in args.values()]) else: - args_length = '0' + args_length = "0" - transpile_str = ('true' if transpile else 'false') - ops_items.append('_({}, {}, {}, {})'.format( - name, args_length, transpile_str, cost_estimate)) + transpile_str = "true" if transpile else "false" + ops_items.append( + "_({}, {}, {}, {})".format(name, args_length, transpile_str, cost_estimate) + ) writer_methods.append(gen_writer_method(name, args, custom_writer)) @@ -481,42 +477,42 @@ def generate_cacheirops_header(c_out, yaml_path): if transpile: transpiler_methods.append(gen_compiler_method(name, args)) - transpiler_ops.append('_({})'.format(name)) + transpiler_ops.append("_({})".format(name)) spewer_methods.append(gen_spewer_method(name, args)) clone_methods.append(gen_clone_method(name, args)) - contents = '#define CACHE_IR_OPS(_)\\\n' - contents += '\\\n'.join(ops_items) - contents += '\n\n' + contents = "#define CACHE_IR_OPS(_)\\\n" + contents += "\\\n".join(ops_items) + contents += "\n\n" - contents += '#define CACHE_IR_WRITER_GENERATED \\\n' - contents += '\\\n'.join(writer_methods) - contents += '\n\n' + contents += "#define CACHE_IR_WRITER_GENERATED \\\n" + contents += "\\\n".join(writer_methods) + contents += "\n\n" - contents += '#define CACHE_IR_COMPILER_SHARED_GENERATED \\\n' - contents += '\\\n'.join(compiler_shared_methods) - contents += '\n\n' + contents += "#define CACHE_IR_COMPILER_SHARED_GENERATED \\\n" + contents += "\\\n".join(compiler_shared_methods) + contents += "\n\n" - contents += '#define CACHE_IR_COMPILER_UNSHARED_GENERATED \\\n' - contents += '\\\n'.join(compiler_unshared_methods) - contents += '\n\n' + contents += "#define CACHE_IR_COMPILER_UNSHARED_GENERATED \\\n" + contents += "\\\n".join(compiler_unshared_methods) + contents += "\n\n" - contents += '#define CACHE_IR_TRANSPILER_GENERATED \\\n' - contents += '\\\n'.join(transpiler_methods) - contents += '\n\n' + contents += "#define CACHE_IR_TRANSPILER_GENERATED \\\n" + contents += "\\\n".join(transpiler_methods) + contents += "\n\n" - contents += '#define CACHE_IR_TRANSPILER_OPS(_)\\\n' - contents += '\\\n'.join(transpiler_ops) - contents += '\n\n' + contents += "#define CACHE_IR_TRANSPILER_OPS(_)\\\n" + contents += "\\\n".join(transpiler_ops) + contents += "\n\n" - contents += '#define CACHE_IR_SPEWER_GENERATED \\\n' - contents += '\\\n'.join(spewer_methods) - contents += '\n\n' + contents += "#define CACHE_IR_SPEWER_GENERATED \\\n" + contents += "\\\n".join(spewer_methods) + contents += "\n\n" - contents += '#define CACHE_IR_CLONE_GENERATED \\\n' - contents += '\\\n'.join(clone_methods) - contents += '\n\n' + contents += "#define CACHE_IR_CLONE_GENERATED \\\n" + contents += "\\\n".join(clone_methods) + contents += "\n\n" - generate_header(c_out, 'jit_CacheIROpsGenerated_h', contents) + generate_header(c_out, "jit_CacheIROpsGenerated_h", contents) diff --git a/js/src/jit/GenerateOpcodeFiles.py b/js/src/jit/GenerateOpcodeFiles.py index a6057b211caa5f..be41361d78e892 100644 --- a/js/src/jit/GenerateOpcodeFiles.py +++ b/js/src/jit/GenerateOpcodeFiles.py @@ -32,7 +32,7 @@ def get_opcodes(inputs, pat): for line in open(inputfile): match = pat.match(line) if match: - op = match.group('name') + op = match.group("name") if op in ops_set: raise Exception("Duplicate opcode {} in {}".format(op, inputfile)) ops.append(op) @@ -43,19 +43,24 @@ def get_opcodes(inputs, pat): def generate_header(c_out, inputs, pat, includeguard, listname): ops = get_opcodes(inputs, pat) - ops_string = '\\\n'.join(['_(' + op + ')' for op in ops]) - c_out.write(HEADER_TEMPLATE % { - 'ops': ops_string, - 'includeguard': includeguard, - 'listname': listname, - }) + ops_string = "\\\n".join(["_(" + op + ")" for op in ops]) + c_out.write( + HEADER_TEMPLATE + % { + "ops": ops_string, + "includeguard": includeguard, + "listname": listname, + } + ) def generate_mir_header(c_out, *inputs): - pat = re.compile(r"^\s*INSTRUCTION_HEADER(_WITHOUT_TYPEPOLICY)?\((?P\w+)\);?$") - generate_header(c_out, inputs, pat, 'jit_MOpcodesGenerated_h', 'MIR_OPCODE_LIST') + pat = re.compile( + r"^\s*INSTRUCTION_HEADER(_WITHOUT_TYPEPOLICY)?\((?P\w+)\);?$" + ) + generate_header(c_out, inputs, pat, "jit_MOpcodesGenerated_h", "MIR_OPCODE_LIST") def generate_lir_header(c_out, *inputs): pat = re.compile(r"^\s*LIR_HEADER\((?P\w+)\);?$") - generate_header(c_out, inputs, pat, 'jit_LOpcodesGenerated_h', 'LIR_OPCODE_LIST') + generate_header(c_out, inputs, pat, "jit_LOpcodesGenerated_h", "LIR_OPCODE_LIST") diff --git a/js/src/jit/arm/gen-double-encoder-table.py b/js/src/jit/arm/gen-double-encoder-table.py index 93ece8a9602625..fd622da82e19d4 100644 --- a/js/src/jit/arm/gen-double-encoder-table.py +++ b/js/src/jit/arm/gen-double-encoder-table.py @@ -26,10 +26,10 @@ def encodeDouble(value): a = value >> 7 b = (value >> 6) & 1 B = int(b == 0) - cdefgh = value & 0x3f + cdefgh = value & 0x3F return (a << 31) | (B << 30) | (rep(b, 8) << 22) | cdefgh << 16 -print('/* THIS FILE IS AUTOMATICALLY GENERATED BY gen-double-encode-table.py. */') +print("/* THIS FILE IS AUTOMATICALLY GENERATED BY gen-double-encode-table.py. */") for i in range(256): - print(' { 0x%08x, { %d, %d, 0 } },' % (encodeDouble(i), i & 0xf, i >> 4)) + print(" { 0x%08x, { %d, %d, 0 } }," % (encodeDouble(i), i & 0xF, i >> 4)) diff --git a/js/src/jit/moz.build b/js/src/jit/moz.build index 7f895a230e4ef3..b5b4a96b322e62 100644 --- a/js/src/jit/moz.build +++ b/js/src/jit/moz.build @@ -4,267 +4,262 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!..', - '..' -] +LOCAL_INCLUDES += ["!..", ".."] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") -lir_inputs = ['LIR.h', 'shared/LIR-shared.h'] +lir_inputs = ["LIR.h", "shared/LIR-shared.h"] UNIFIED_SOURCES += [ - 'AliasAnalysis.cpp', - 'AlignmentMaskAnalysis.cpp', - 'AutoDetectInvalidation.cpp', - 'BacktrackingAllocator.cpp', - 'Bailouts.cpp', - 'BaselineBailouts.cpp', - 'BaselineCacheIRCompiler.cpp', - 'BaselineCodeGen.cpp', - 'BaselineDebugModeOSR.cpp', - 'BaselineFrame.cpp', - 'BaselineFrameInfo.cpp', - 'BaselineIC.cpp', - 'BaselineInspector.cpp', - 'BaselineJIT.cpp', - 'BitSet.cpp', - 'BytecodeAnalysis.cpp', - 'CacheIR.cpp', - 'CacheIRCompiler.cpp', - 'CacheIRHealth.cpp', - 'CacheIRSpewer.cpp', - 'CodeGenerator.cpp', - 'CompileWrappers.cpp', - 'Disassemble.cpp', - 'EdgeCaseAnalysis.cpp', - 'EffectiveAddressAnalysis.cpp', - 'ExecutableAllocator.cpp', - 'FoldLinearArithConstants.cpp', - 'InlinableNatives.cpp', - 'InstructionReordering.cpp', - 'Ion.cpp', - 'IonAnalysis.cpp', - 'IonBuilder.cpp', - 'IonCacheIRCompiler.cpp', - 'IonCompileTask.cpp', - 'IonIC.cpp', - 'IonOptimizationLevels.cpp', - 'Jit.cpp', - 'JitcodeMap.cpp', - 'JitContext.cpp', - 'JitFrames.cpp', - 'JitOptions.cpp', - 'JitScript.cpp', - 'JitSpewer.cpp', - 'JSJitFrameIter.cpp', - 'JSONSpewer.cpp', - 'KnownClass.cpp', - 'Label.cpp', - 'LICM.cpp', - 'Linker.cpp', - 'LIR.cpp', - 'Lowering.cpp', - 'MacroAssembler.cpp', - 'MCallOptimize.cpp', - 'MIR.cpp', - 'MIRGraph.cpp', - 'MoveResolver.cpp', - 'PerfSpewer.cpp', - 'ProcessExecutableMemory.cpp', - 'RangeAnalysis.cpp', - 'Recover.cpp', - 'RegisterAllocator.cpp', - 'RematerializedFrame.cpp', - 'SafepointIndex.cpp', - 'Safepoints.cpp', - 'ScalarReplacement.cpp', - 'shared/CodeGenerator-shared.cpp', - 'shared/Disassembler-shared.cpp', - 'shared/Lowering-shared.cpp', - 'Sink.cpp', - 'Snapshots.cpp', - 'TIOracle.cpp', - 'TrialInlining.cpp', - 'TypePolicy.cpp', - 'ValueNumbering.cpp', - 'VMFunctions.cpp', - 'WarpBuilder.cpp', - 'WarpBuilderShared.cpp', - 'WarpCacheIRTranspiler.cpp', - 'WarpOracle.cpp', - 'WarpSnapshot.cpp', - 'WasmBCE.cpp', - 'XrayJitInfo.cpp', + "AliasAnalysis.cpp", + "AlignmentMaskAnalysis.cpp", + "AutoDetectInvalidation.cpp", + "BacktrackingAllocator.cpp", + "Bailouts.cpp", + "BaselineBailouts.cpp", + "BaselineCacheIRCompiler.cpp", + "BaselineCodeGen.cpp", + "BaselineDebugModeOSR.cpp", + "BaselineFrame.cpp", + "BaselineFrameInfo.cpp", + "BaselineIC.cpp", + "BaselineInspector.cpp", + "BaselineJIT.cpp", + "BitSet.cpp", + "BytecodeAnalysis.cpp", + "CacheIR.cpp", + "CacheIRCompiler.cpp", + "CacheIRHealth.cpp", + "CacheIRSpewer.cpp", + "CodeGenerator.cpp", + "CompileWrappers.cpp", + "Disassemble.cpp", + "EdgeCaseAnalysis.cpp", + "EffectiveAddressAnalysis.cpp", + "ExecutableAllocator.cpp", + "FoldLinearArithConstants.cpp", + "InlinableNatives.cpp", + "InstructionReordering.cpp", + "Ion.cpp", + "IonAnalysis.cpp", + "IonBuilder.cpp", + "IonCacheIRCompiler.cpp", + "IonCompileTask.cpp", + "IonIC.cpp", + "IonOptimizationLevels.cpp", + "Jit.cpp", + "JitcodeMap.cpp", + "JitContext.cpp", + "JitFrames.cpp", + "JitOptions.cpp", + "JitScript.cpp", + "JitSpewer.cpp", + "JSJitFrameIter.cpp", + "JSONSpewer.cpp", + "KnownClass.cpp", + "Label.cpp", + "LICM.cpp", + "Linker.cpp", + "LIR.cpp", + "Lowering.cpp", + "MacroAssembler.cpp", + "MCallOptimize.cpp", + "MIR.cpp", + "MIRGraph.cpp", + "MoveResolver.cpp", + "PerfSpewer.cpp", + "ProcessExecutableMemory.cpp", + "RangeAnalysis.cpp", + "Recover.cpp", + "RegisterAllocator.cpp", + "RematerializedFrame.cpp", + "SafepointIndex.cpp", + "Safepoints.cpp", + "ScalarReplacement.cpp", + "shared/CodeGenerator-shared.cpp", + "shared/Disassembler-shared.cpp", + "shared/Lowering-shared.cpp", + "Sink.cpp", + "Snapshots.cpp", + "TIOracle.cpp", + "TrialInlining.cpp", + "TypePolicy.cpp", + "ValueNumbering.cpp", + "VMFunctions.cpp", + "WarpBuilder.cpp", + "WarpBuilderShared.cpp", + "WarpCacheIRTranspiler.cpp", + "WarpOracle.cpp", + "WarpSnapshot.cpp", + "WasmBCE.cpp", + "XrayJitInfo.cpp", ] -if CONFIG['JS_CODEGEN_NONE']: - lir_inputs += ['none/LIR-none.h'] - UNIFIED_SOURCES += [ - 'none/Trampoline-none.cpp' - ] -elif CONFIG['JS_CODEGEN_X86'] or CONFIG['JS_CODEGEN_X64']: - lir_inputs += ['x86-shared/LIR-x86-shared.h'] +if CONFIG["JS_CODEGEN_NONE"]: + lir_inputs += ["none/LIR-none.h"] + UNIFIED_SOURCES += ["none/Trampoline-none.cpp"] +elif CONFIG["JS_CODEGEN_X86"] or CONFIG["JS_CODEGEN_X64"]: + lir_inputs += ["x86-shared/LIR-x86-shared.h"] UNIFIED_SOURCES += [ - 'shared/AtomicOperations-shared-jit.cpp', - 'x86-shared/Architecture-x86-shared.cpp', - 'x86-shared/Assembler-x86-shared.cpp', - 'x86-shared/AssemblerBuffer-x86-shared.cpp', - 'x86-shared/CodeGenerator-x86-shared.cpp', - 'x86-shared/Lowering-x86-shared.cpp', - 'x86-shared/MacroAssembler-x86-shared-SIMD-unused.cpp', - 'x86-shared/MacroAssembler-x86-shared-SIMD.cpp', - 'x86-shared/MacroAssembler-x86-shared.cpp', - 'x86-shared/MoveEmitter-x86-shared.cpp', + "shared/AtomicOperations-shared-jit.cpp", + "x86-shared/Architecture-x86-shared.cpp", + "x86-shared/Assembler-x86-shared.cpp", + "x86-shared/AssemblerBuffer-x86-shared.cpp", + "x86-shared/CodeGenerator-x86-shared.cpp", + "x86-shared/Lowering-x86-shared.cpp", + "x86-shared/MacroAssembler-x86-shared-SIMD-unused.cpp", + "x86-shared/MacroAssembler-x86-shared-SIMD.cpp", + "x86-shared/MacroAssembler-x86-shared.cpp", + "x86-shared/MoveEmitter-x86-shared.cpp", ] - if CONFIG['JS_CODEGEN_X64']: - lir_inputs += ['x64/LIR-x64.h'] + if CONFIG["JS_CODEGEN_X64"]: + lir_inputs += ["x64/LIR-x64.h"] UNIFIED_SOURCES += [ - 'x64/Assembler-x64.cpp', - 'x64/Bailouts-x64.cpp', - 'x64/CodeGenerator-x64.cpp', - 'x64/Lowering-x64.cpp', - 'x64/MacroAssembler-x64.cpp', - 'x64/Trampoline-x64.cpp', + "x64/Assembler-x64.cpp", + "x64/Bailouts-x64.cpp", + "x64/CodeGenerator-x64.cpp", + "x64/Lowering-x64.cpp", + "x64/MacroAssembler-x64.cpp", + "x64/Trampoline-x64.cpp", ] else: - lir_inputs += ['x86/LIR-x86.h'] + lir_inputs += ["x86/LIR-x86.h"] UNIFIED_SOURCES += [ - 'x86/Assembler-x86.cpp', - 'x86/Bailouts-x86.cpp', - 'x86/CodeGenerator-x86.cpp', - 'x86/Lowering-x86.cpp', - 'x86/MacroAssembler-x86.cpp', - 'x86/Trampoline-x86.cpp', + "x86/Assembler-x86.cpp", + "x86/Bailouts-x86.cpp", + "x86/CodeGenerator-x86.cpp", + "x86/Lowering-x86.cpp", + "x86/MacroAssembler-x86.cpp", + "x86/Trampoline-x86.cpp", ] -elif CONFIG['JS_CODEGEN_ARM']: - lir_inputs += ['arm/LIR-arm.h'] +elif CONFIG["JS_CODEGEN_ARM"]: + lir_inputs += ["arm/LIR-arm.h"] UNIFIED_SOURCES += [ - 'arm/Architecture-arm.cpp', - 'arm/Assembler-arm.cpp', - 'arm/Bailouts-arm.cpp', - 'arm/CodeGenerator-arm.cpp', - 'arm/disasm/Constants-arm.cpp', - 'arm/disasm/Disasm-arm.cpp', - 'arm/Lowering-arm.cpp', - 'arm/MacroAssembler-arm.cpp', - 'arm/MoveEmitter-arm.cpp', - 'arm/Trampoline-arm.cpp', - 'shared/AtomicOperations-shared-jit.cpp', + "arm/Architecture-arm.cpp", + "arm/Assembler-arm.cpp", + "arm/Bailouts-arm.cpp", + "arm/CodeGenerator-arm.cpp", + "arm/disasm/Constants-arm.cpp", + "arm/disasm/Disasm-arm.cpp", + "arm/Lowering-arm.cpp", + "arm/MacroAssembler-arm.cpp", + "arm/MoveEmitter-arm.cpp", + "arm/Trampoline-arm.cpp", + "shared/AtomicOperations-shared-jit.cpp", ] - if CONFIG['JS_SIMULATOR_ARM']: - UNIFIED_SOURCES += [ - 'arm/Simulator-arm.cpp' - ] - elif CONFIG['OS_ARCH'] == 'Darwin': + if CONFIG["JS_SIMULATOR_ARM"]: + UNIFIED_SOURCES += ["arm/Simulator-arm.cpp"] + elif CONFIG["OS_ARCH"] == "Darwin": SOURCES += [ - 'arm/llvm-compiler-rt/arm/aeabi_idivmod.S', - 'arm/llvm-compiler-rt/arm/aeabi_uidivmod.S', + "arm/llvm-compiler-rt/arm/aeabi_idivmod.S", + "arm/llvm-compiler-rt/arm/aeabi_uidivmod.S", ] -elif CONFIG['JS_CODEGEN_ARM64']: - lir_inputs += ['arm64/LIR-arm64.h'] +elif CONFIG["JS_CODEGEN_ARM64"]: + lir_inputs += ["arm64/LIR-arm64.h"] UNIFIED_SOURCES += [ - 'arm64/Architecture-arm64.cpp', - 'arm64/Assembler-arm64.cpp', - 'arm64/Bailouts-arm64.cpp', - 'arm64/CodeGenerator-arm64.cpp', - 'arm64/Lowering-arm64.cpp', - 'arm64/MacroAssembler-arm64.cpp', - 'arm64/MoveEmitter-arm64.cpp', - 'arm64/Trampoline-arm64.cpp', - 'arm64/vixl/Assembler-vixl.cpp', - 'arm64/vixl/Cpu-Features-vixl.cpp', - 'arm64/vixl/Cpu-vixl.cpp', - 'arm64/vixl/Decoder-vixl.cpp', - 'arm64/vixl/Instructions-vixl.cpp', - 'arm64/vixl/MacroAssembler-vixl.cpp', - 'arm64/vixl/MozAssembler-vixl.cpp', - 'arm64/vixl/MozCpu-vixl.cpp', - 'arm64/vixl/MozInstructions-vixl.cpp', - 'arm64/vixl/Utils-vixl.cpp', - 'shared/AtomicOperations-shared-jit.cpp', + "arm64/Architecture-arm64.cpp", + "arm64/Assembler-arm64.cpp", + "arm64/Bailouts-arm64.cpp", + "arm64/CodeGenerator-arm64.cpp", + "arm64/Lowering-arm64.cpp", + "arm64/MacroAssembler-arm64.cpp", + "arm64/MoveEmitter-arm64.cpp", + "arm64/Trampoline-arm64.cpp", + "arm64/vixl/Assembler-vixl.cpp", + "arm64/vixl/Cpu-Features-vixl.cpp", + "arm64/vixl/Cpu-vixl.cpp", + "arm64/vixl/Decoder-vixl.cpp", + "arm64/vixl/Instructions-vixl.cpp", + "arm64/vixl/MacroAssembler-vixl.cpp", + "arm64/vixl/MozAssembler-vixl.cpp", + "arm64/vixl/MozCpu-vixl.cpp", + "arm64/vixl/MozInstructions-vixl.cpp", + "arm64/vixl/Utils-vixl.cpp", + "shared/AtomicOperations-shared-jit.cpp", ] vixl_werror_sources = [ - 'arm64/vixl/Disasm-vixl.cpp', - 'arm64/vixl/Instrument-vixl.cpp', + "arm64/vixl/Disasm-vixl.cpp", + "arm64/vixl/Instrument-vixl.cpp", ] SOURCES += vixl_werror_sources - if CONFIG['CC_TYPE'] == 'clang-cl': + if CONFIG["CC_TYPE"] == "clang-cl": for f in vixl_werror_sources: - SOURCES[f].flags += ['-Wno-c++11-narrowing'] - if CONFIG['JS_SIMULATOR_ARM64']: + SOURCES[f].flags += ["-Wno-c++11-narrowing"] + if CONFIG["JS_SIMULATOR_ARM64"]: UNIFIED_SOURCES += [ - 'arm64/vixl/Debugger-vixl.cpp', - 'arm64/vixl/Logic-vixl.cpp', - 'arm64/vixl/MozSimulator-vixl.cpp', - 'arm64/vixl/Simulator-vixl.cpp' + "arm64/vixl/Debugger-vixl.cpp", + "arm64/vixl/Logic-vixl.cpp", + "arm64/vixl/MozSimulator-vixl.cpp", + "arm64/vixl/Simulator-vixl.cpp", ] -elif CONFIG['JS_CODEGEN_MIPS32'] or CONFIG['JS_CODEGEN_MIPS64']: - lir_inputs += ['mips-shared/LIR-mips-shared.h'] +elif CONFIG["JS_CODEGEN_MIPS32"] or CONFIG["JS_CODEGEN_MIPS64"]: + lir_inputs += ["mips-shared/LIR-mips-shared.h"] UNIFIED_SOURCES += [ - 'mips-shared/Architecture-mips-shared.cpp', - 'mips-shared/Assembler-mips-shared.cpp', - 'mips-shared/Bailouts-mips-shared.cpp', - 'mips-shared/CodeGenerator-mips-shared.cpp', - 'mips-shared/Lowering-mips-shared.cpp', - 'mips-shared/MacroAssembler-mips-shared.cpp', - 'mips-shared/MoveEmitter-mips-shared.cpp', + "mips-shared/Architecture-mips-shared.cpp", + "mips-shared/Assembler-mips-shared.cpp", + "mips-shared/Bailouts-mips-shared.cpp", + "mips-shared/CodeGenerator-mips-shared.cpp", + "mips-shared/Lowering-mips-shared.cpp", + "mips-shared/MacroAssembler-mips-shared.cpp", + "mips-shared/MoveEmitter-mips-shared.cpp", ] - if CONFIG['JS_CODEGEN_MIPS32']: - lir_inputs += ['mips32/LIR-mips32.h'] + if CONFIG["JS_CODEGEN_MIPS32"]: + lir_inputs += ["mips32/LIR-mips32.h"] UNIFIED_SOURCES += [ - 'mips32/Architecture-mips32.cpp', - 'mips32/Assembler-mips32.cpp', - 'mips32/Bailouts-mips32.cpp', - 'mips32/CodeGenerator-mips32.cpp', - 'mips32/Lowering-mips32.cpp', - 'mips32/MacroAssembler-mips32.cpp', - 'mips32/MoveEmitter-mips32.cpp', - 'mips32/Trampoline-mips32.cpp', + "mips32/Architecture-mips32.cpp", + "mips32/Assembler-mips32.cpp", + "mips32/Bailouts-mips32.cpp", + "mips32/CodeGenerator-mips32.cpp", + "mips32/Lowering-mips32.cpp", + "mips32/MacroAssembler-mips32.cpp", + "mips32/MoveEmitter-mips32.cpp", + "mips32/Trampoline-mips32.cpp", ] - if CONFIG['JS_SIMULATOR_MIPS32']: - UNIFIED_SOURCES += [ - 'mips32/Simulator-mips32.cpp' - ] - elif CONFIG['JS_CODEGEN_MIPS64']: - lir_inputs += ['mips64/LIR-mips64.h'] + if CONFIG["JS_SIMULATOR_MIPS32"]: + UNIFIED_SOURCES += ["mips32/Simulator-mips32.cpp"] + elif CONFIG["JS_CODEGEN_MIPS64"]: + lir_inputs += ["mips64/LIR-mips64.h"] UNIFIED_SOURCES += [ - 'mips64/Architecture-mips64.cpp', - 'mips64/Assembler-mips64.cpp', - 'mips64/Bailouts-mips64.cpp', - 'mips64/CodeGenerator-mips64.cpp', - 'mips64/Lowering-mips64.cpp', - 'mips64/MacroAssembler-mips64.cpp', - 'mips64/MoveEmitter-mips64.cpp', - 'mips64/Trampoline-mips64.cpp', + "mips64/Architecture-mips64.cpp", + "mips64/Assembler-mips64.cpp", + "mips64/Bailouts-mips64.cpp", + "mips64/CodeGenerator-mips64.cpp", + "mips64/Lowering-mips64.cpp", + "mips64/MacroAssembler-mips64.cpp", + "mips64/MoveEmitter-mips64.cpp", + "mips64/Trampoline-mips64.cpp", ] - if CONFIG['JS_SIMULATOR_MIPS64']: - UNIFIED_SOURCES += [ - 'mips64/Simulator-mips64.cpp' - ] + if CONFIG["JS_SIMULATOR_MIPS64"]: + UNIFIED_SOURCES += ["mips64/Simulator-mips64.cpp"] # Generate jit/MOpcodesGenerated.h from jit/MIR.h -GeneratedFile('MOpcodesGenerated.h', - script='GenerateOpcodeFiles.py', - entry_point='generate_mir_header', - inputs=['MIR.h']) +GeneratedFile( + "MOpcodesGenerated.h", + script="GenerateOpcodeFiles.py", + entry_point="generate_mir_header", + inputs=["MIR.h"], +) # Generate jit/LOpcodesGenerated.h from jit/LIR.h, jit/shared/LIR-shared.h, and # platform-specific LIR files. -GeneratedFile('LOpcodesGenerated.h', - script='GenerateOpcodeFiles.py', - entry_point='generate_lir_header', - inputs=lir_inputs) +GeneratedFile( + "LOpcodesGenerated.h", + script="GenerateOpcodeFiles.py", + entry_point="generate_lir_header", + inputs=lir_inputs, +) # Generate jit/CacheIROpsGenerated.h from jit/CacheIROps.yaml -GeneratedFile('CacheIROpsGenerated.h', - script='GenerateCacheIRFiles.py', - entry_point='generate_cacheirops_header', - inputs=['CacheIROps.yaml']) +GeneratedFile( + "CacheIROpsGenerated.h", + script="GenerateCacheIRFiles.py", + entry_point="generate_cacheirops_header", + inputs=["CacheIROps.yaml"], +) diff --git a/js/src/jsapi-tests/moz.build b/js/src/jsapi-tests/moz.build index fc88fb5acad6a5..1379082cfeeb90 100644 --- a/js/src/jsapi-tests/moz.build +++ b/js/src/jsapi-tests/moz.build @@ -4,174 +4,174 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -GeckoProgram('jsapi-tests', linkage=None) +GeckoProgram("jsapi-tests", linkage=None) -include('../js-cxxflags.mozbuild') -include('../js-standalone.mozbuild') +include("../js-cxxflags.mozbuild") +include("../js-standalone.mozbuild") UNIFIED_SOURCES += [ - 'selfTest.cpp', - 'testAddPropertyPropcache.cpp', - 'testArgumentsObject.cpp', - 'testArrayBuffer.cpp', - 'testArrayBufferView.cpp', - 'testArrayBufferWithUserOwnedContents.cpp', - 'testAtomicOperations.cpp', - 'testAtomizeUtf8NonAsciiLatin1CodePoint.cpp', - 'testBigInt.cpp', - 'testBoundFunction.cpp', - 'testBug604087.cpp', - 'testCallArgs.cpp', - 'testCallNonGenericMethodOnProxy.cpp', - 'testChromeBuffer.cpp', - 'testCompileNonSyntactic.cpp', - 'testCompileUtf8.cpp', - 'testDateToLocaleString.cpp', - 'testDebugger.cpp', - 'testDeduplication.cpp', - 'testDeepFreeze.cpp', - 'testDefineGetterSetterNonEnumerable.cpp', - 'testDefineProperty.cpp', - 'testDefinePropertyIgnoredAttributes.cpp', - 'testDeflateStringToUTF8Buffer.cpp', - 'testDifferentNewTargetInvokeConstructor.cpp', - 'testEmptyWindowIsOmitted.cpp', - 'testErrorCopying.cpp', - 'testErrorLineOfContext.cpp', - 'testException.cpp', - 'testExecuteInJSMEnvironment.cpp', - 'testExternalStrings.cpp', - 'testFindSCCs.cpp', - 'testForceLexicalInitialization.cpp', - 'testForOfIterator.cpp', - 'testForwardSetProperty.cpp', - 'testFreshGlobalEvalRedefinition.cpp', - 'testFunctionBinding.cpp', - 'testFunctionProperties.cpp', - 'testGCAllocator.cpp', - 'testGCCellPtr.cpp', - 'testGCChunkPool.cpp', - 'testGCExactRooting.cpp', - 'testGCFinalizeCallback.cpp', - 'testGCGrayMarking.cpp', - 'testGCHeapBarriers.cpp', - 'testGCHooks.cpp', - 'testGCMarking.cpp', - 'testGCOutOfMemory.cpp', - 'testGCStoreBufferRemoval.cpp', - 'testGCUniqueId.cpp', - 'testGCWeakCache.cpp', - 'testGetPropertyDescriptor.cpp', - 'testHashTable.cpp', - 'testIndexToString.cpp', - 'testInformalValueTypeName.cpp', - 'testIntern.cpp', - 'testIntlAvailableLocales.cpp', - 'testIntString.cpp', - 'testIsInsideNursery.cpp', - 'testIteratorObject.cpp', - 'testJSEvaluateScript.cpp', - 'testLookup.cpp', - 'testLooselyEqual.cpp', - 'testMappedArrayBuffer.cpp', - 'testMemoryAssociation.cpp', - 'testMutedErrors.cpp', - 'testNewObject.cpp', - 'testNewTargetInvokeConstructor.cpp', - 'testNullRoot.cpp', - 'testNumberToString.cpp', - 'testObjectEmulatingUndefined.cpp', - 'testOOM.cpp', - 'testParseJSON.cpp', - 'testParserAtom.cpp', - 'testPersistentRooted.cpp', - 'testPreserveJitCode.cpp', - 'testPrintf.cpp', - 'testPrivateGCThingValue.cpp', - 'testProfileStrings.cpp', - 'testPromise.cpp', - 'testPropCache.cpp', - 'testReadableStream.cpp', - 'testRegExp.cpp', - 'testResolveRecursion.cpp', - 'tests.cpp', - 'testSABAccounting.cpp', - 'testSameValue.cpp', - 'testSavedStacks.cpp', - 'testScriptInfo.cpp', - 'testScriptObject.cpp', - 'testScriptSourceCompression.cpp', - 'testSetProperty.cpp', - 'testSetPropertyIgnoringNamedGetter.cpp', - 'testSharedImmutableStringsCache.cpp', - 'testSourcePolicy.cpp', - 'testSparseBitmap.cpp', - 'testStringBuffer.cpp', - 'testStringIsArrayIndex.cpp', - 'testStructuredClone.cpp', - 'testSymbol.cpp', - 'testThreadingConditionVariable.cpp', - 'testThreadingExclusiveData.cpp', - 'testThreadingMutex.cpp', - 'testThreadingThread.cpp', - 'testToSignedOrUnsignedInteger.cpp', - 'testTypedArrays.cpp', - 'testUbiNode.cpp', - 'testUncaughtSymbol.cpp', - 'testUTF8.cpp', - 'testWasmLEB128.cpp', - 'testWeakMap.cpp', - 'testWindowNonConfigurable.cpp', - 'testXDR.cpp', + "selfTest.cpp", + "testAddPropertyPropcache.cpp", + "testArgumentsObject.cpp", + "testArrayBuffer.cpp", + "testArrayBufferView.cpp", + "testArrayBufferWithUserOwnedContents.cpp", + "testAtomicOperations.cpp", + "testAtomizeUtf8NonAsciiLatin1CodePoint.cpp", + "testBigInt.cpp", + "testBoundFunction.cpp", + "testBug604087.cpp", + "testCallArgs.cpp", + "testCallNonGenericMethodOnProxy.cpp", + "testChromeBuffer.cpp", + "testCompileNonSyntactic.cpp", + "testCompileUtf8.cpp", + "testDateToLocaleString.cpp", + "testDebugger.cpp", + "testDeduplication.cpp", + "testDeepFreeze.cpp", + "testDefineGetterSetterNonEnumerable.cpp", + "testDefineProperty.cpp", + "testDefinePropertyIgnoredAttributes.cpp", + "testDeflateStringToUTF8Buffer.cpp", + "testDifferentNewTargetInvokeConstructor.cpp", + "testEmptyWindowIsOmitted.cpp", + "testErrorCopying.cpp", + "testErrorLineOfContext.cpp", + "testException.cpp", + "testExecuteInJSMEnvironment.cpp", + "testExternalStrings.cpp", + "testFindSCCs.cpp", + "testForceLexicalInitialization.cpp", + "testForOfIterator.cpp", + "testForwardSetProperty.cpp", + "testFreshGlobalEvalRedefinition.cpp", + "testFunctionBinding.cpp", + "testFunctionProperties.cpp", + "testGCAllocator.cpp", + "testGCCellPtr.cpp", + "testGCChunkPool.cpp", + "testGCExactRooting.cpp", + "testGCFinalizeCallback.cpp", + "testGCGrayMarking.cpp", + "testGCHeapBarriers.cpp", + "testGCHooks.cpp", + "testGCMarking.cpp", + "testGCOutOfMemory.cpp", + "testGCStoreBufferRemoval.cpp", + "testGCUniqueId.cpp", + "testGCWeakCache.cpp", + "testGetPropertyDescriptor.cpp", + "testHashTable.cpp", + "testIndexToString.cpp", + "testInformalValueTypeName.cpp", + "testIntern.cpp", + "testIntlAvailableLocales.cpp", + "testIntString.cpp", + "testIsInsideNursery.cpp", + "testIteratorObject.cpp", + "testJSEvaluateScript.cpp", + "testLookup.cpp", + "testLooselyEqual.cpp", + "testMappedArrayBuffer.cpp", + "testMemoryAssociation.cpp", + "testMutedErrors.cpp", + "testNewObject.cpp", + "testNewTargetInvokeConstructor.cpp", + "testNullRoot.cpp", + "testNumberToString.cpp", + "testObjectEmulatingUndefined.cpp", + "testOOM.cpp", + "testParseJSON.cpp", + "testParserAtom.cpp", + "testPersistentRooted.cpp", + "testPreserveJitCode.cpp", + "testPrintf.cpp", + "testPrivateGCThingValue.cpp", + "testProfileStrings.cpp", + "testPromise.cpp", + "testPropCache.cpp", + "testReadableStream.cpp", + "testRegExp.cpp", + "testResolveRecursion.cpp", + "tests.cpp", + "testSABAccounting.cpp", + "testSameValue.cpp", + "testSavedStacks.cpp", + "testScriptInfo.cpp", + "testScriptObject.cpp", + "testScriptSourceCompression.cpp", + "testSetProperty.cpp", + "testSetPropertyIgnoringNamedGetter.cpp", + "testSharedImmutableStringsCache.cpp", + "testSourcePolicy.cpp", + "testSparseBitmap.cpp", + "testStringBuffer.cpp", + "testStringIsArrayIndex.cpp", + "testStructuredClone.cpp", + "testSymbol.cpp", + "testThreadingConditionVariable.cpp", + "testThreadingExclusiveData.cpp", + "testThreadingMutex.cpp", + "testThreadingThread.cpp", + "testToSignedOrUnsignedInteger.cpp", + "testTypedArrays.cpp", + "testUbiNode.cpp", + "testUncaughtSymbol.cpp", + "testUTF8.cpp", + "testWasmLEB128.cpp", + "testWeakMap.cpp", + "testWindowNonConfigurable.cpp", + "testXDR.cpp", ] SOURCES += [ # There are clashing definitions of js::jit::AssemblerBuffer. - 'testAssemblerBuffer.cpp', + "testAssemblerBuffer.cpp", ] SOURCES += [ # We don't want this in the C++ files with the test cases. - 'hidePointer.cpp', + "hidePointer.cpp", ] -if not CONFIG['JS_CODEGEN_NONE']: +if not CONFIG["JS_CODEGEN_NONE"]: UNIFIED_SOURCES += [ - 'testJitDCEinGVN.cpp', - 'testJitFoldsTo.cpp', - 'testJitGVN.cpp', - 'testJitMacroAssembler.cpp', - 'testJitMoveEmitterCycles-mips32.cpp', - 'testJitMoveEmitterCycles.cpp', - 'testJitRangeAnalysis.cpp', - 'testJitRegisterSet.cpp', - 'testJitRValueAlloc.cpp', + "testJitDCEinGVN.cpp", + "testJitFoldsTo.cpp", + "testJitGVN.cpp", + "testJitMacroAssembler.cpp", + "testJitMoveEmitterCycles-mips32.cpp", + "testJitMoveEmitterCycles.cpp", + "testJitRangeAnalysis.cpp", + "testJitRegisterSet.cpp", + "testJitRValueAlloc.cpp", ] -if CONFIG['NIGHTLY_BUILD']: +if CONFIG["NIGHTLY_BUILD"]: # The Error interceptor only exists on Nightly. UNIFIED_SOURCES += [ - 'testErrorInterceptor.cpp', - 'testErrorInterceptorGC.cpp', + "testErrorInterceptor.cpp", + "testErrorInterceptorGC.cpp", ] -if CONFIG['OS_ARCH'] not in ('WINNT', 'Darwin') and CONFIG['OS_TARGET'] != 'Android': +if CONFIG["OS_ARCH"] not in ("WINNT", "Darwin") and CONFIG["OS_TARGET"] != "Android": # open_memstream() not available on Windows, macOS, or Android UNIFIED_SOURCES += [ - 'testPrintError.cpp', + "testPrintError.cpp", ] -DEFINES['EXPORT_JS_API'] = True +DEFINES["EXPORT_JS_API"] = True LOCAL_INCLUDES += [ - '!..', - '..', + "!..", + "..", ] USE_LIBS += [ - 'static:js', + "static:js", ] -DEFINES['topsrcdir'] = '%s/js/src' % TOPSRCDIR -OBJDIR_PP_FILES.js.src['jsapi-tests'] += ['jsapi-tests-gdb.py.in'] +DEFINES["topsrcdir"] = "%s/js/src" % TOPSRCDIR +OBJDIR_PP_FILES.js.src["jsapi-tests"] += ["jsapi-tests-gdb.py.in"] diff --git a/js/src/make-source-package.py b/js/src/make-source-package.py index 66d01f88645798..edef6001daab8e 100755 --- a/js/src/make-source-package.py +++ b/js/src/make-source-package.py @@ -13,12 +13,11 @@ import sys from pathlib import Path -logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO) +logging.basicConfig(format="%(levelname)s: %(message)s", level=logging.INFO) def find_command(names): - """Search for command in `names`, and returns the first one that exists. - """ + """Search for command in `names`, and returns the first one that exists.""" for name in names: path = shutil.which(name) @@ -33,83 +32,88 @@ def assert_command(env_var, name): The command name comes from either environment variable or find_command. """ if not name: - logging.error('{} command not found'.format(env_var)) + logging.error("{} command not found".format(env_var)) sys.exit(1) def parse_version(topsrc_dir): - """Parse milestone.txt and return the entire milestone and major version. - """ - milestone_file = topsrc_dir / 'config' / 'milestone.txt' + """Parse milestone.txt and return the entire milestone and major version.""" + milestone_file = topsrc_dir / "config" / "milestone.txt" if not milestone_file.is_file(): - return ('', '', '') + return ("", "", "") - with milestone_file.open('r') as f: + with milestone_file.open("r") as f: for line in f: line = line.strip() if not line: continue - if line.startswith('#'): + if line.startswith("#"): continue - v = line.split('.') - return tuple((v + ['', ''])[:3]) + v = line.split(".") + return tuple((v + ["", ""])[:3]) - return ('', '', '') + return ("", "", "") -tmp_dir = Path('/tmp') +tmp_dir = Path("/tmp") -tar = os.environ.get('TAR', find_command(['tar'])) -assert_command('TAR', tar) +tar = os.environ.get("TAR", find_command(["tar"])) +assert_command("TAR", tar) -rsync = os.environ.get('RSYNC', find_command(['rsync'])) -assert_command('RSYNC', rsync) +rsync = os.environ.get("RSYNC", find_command(["rsync"])) +assert_command("RSYNC", rsync) -autoconf = os.environ.get('AUTOCONF', find_command([ - 'autoconf-2.13', - 'autoconf2.13', - 'autoconf213', -])) -assert_command('AUTOCONF', autoconf) +autoconf = os.environ.get( + "AUTOCONF", + find_command( + [ + "autoconf-2.13", + "autoconf2.13", + "autoconf213", + ] + ), +) +assert_command("AUTOCONF", autoconf) -src_dir = Path(os.environ.get('SRC_DIR', Path(__file__).parent.absolute())) -mozjs_name = os.environ.get('MOZJS_NAME', 'mozjs') -staging_dir = Path(os.environ.get('STAGING', tmp_dir / 'mozjs-src-pkg')) -dist_dir = Path(os.environ.get('DIST', tmp_dir)) +src_dir = Path(os.environ.get("SRC_DIR", Path(__file__).parent.absolute())) +mozjs_name = os.environ.get("MOZJS_NAME", "mozjs") +staging_dir = Path(os.environ.get("STAGING", tmp_dir / "mozjs-src-pkg")) +dist_dir = Path(os.environ.get("DIST", tmp_dir)) topsrc_dir = src_dir.parent.parent.absolute() -parsed_major_version, parsed_minor_version, parsed_patch_version = parse_version(topsrc_dir) +parsed_major_version, parsed_minor_version, parsed_patch_version = parse_version( + topsrc_dir +) -major_version = os.environ.get('MOZJS_MAJOR_VERSION', parsed_major_version) -minor_version = os.environ.get('MOZJS_MINOR_VERSION', parsed_minor_version) -patch_version = os.environ.get('MOZJS_PATCH_VERSION', parsed_patch_version) -alpha = os.environ.get('MOZJS_ALPHA', '') +major_version = os.environ.get("MOZJS_MAJOR_VERSION", parsed_major_version) +minor_version = os.environ.get("MOZJS_MINOR_VERSION", parsed_minor_version) +patch_version = os.environ.get("MOZJS_PATCH_VERSION", parsed_patch_version) +alpha = os.environ.get("MOZJS_ALPHA", "") -version = '{}-{}.{}.{}'.format(mozjs_name, - major_version, - minor_version, - patch_version or alpha or '0') +version = "{}-{}.{}.{}".format( + mozjs_name, major_version, minor_version, patch_version or alpha or "0" +) target_dir = staging_dir / version -package_name = '{}.tar.xz'.format(version) +package_name = "{}.tar.xz".format(version) package_file = dist_dir / package_name -tar_opts = ['-Jcf'] +tar_opts = ["-Jcf"] # Given there might be some external program that reads the following output, # use raw `print`, instead of logging. -print('Environment:') -print(' TAR = {}'.format(tar)) -print(' RSYNC = {}'.format(rsync)) -print(' AUTOCONF = {}'.format(autoconf)) -print(' STAGING = {}'.format(staging_dir)) -print(' DIST = {}'.format(dist_dir)) -print(' SRC_DIR = {}'.format(src_dir)) -print(' MOZJS_NAME = {}'.format(mozjs_name)) -print(' MOZJS_MAJOR_VERSION = {}'.format(major_version)) -print(' MOZJS_MINOR_VERSION = {}'.format(minor_version)) -print(' MOZJS_PATCH_VERSION = {}'.format(patch_version)) -print(' MOZJS_ALPHA = {}'.format(alpha)) -print('') +print("Environment:") +print(" TAR = {}".format(tar)) +print(" RSYNC = {}".format(rsync)) +print(" AUTOCONF = {}".format(autoconf)) +print(" STAGING = {}".format(staging_dir)) +print(" DIST = {}".format(dist_dir)) +print(" SRC_DIR = {}".format(src_dir)) +print(" MOZJS_NAME = {}".format(mozjs_name)) +print(" MOZJS_MAJOR_VERSION = {}".format(major_version)) +print(" MOZJS_MINOR_VERSION = {}".format(minor_version)) +print(" MOZJS_PATCH_VERSION = {}".format(patch_version)) +print(" MOZJS_ALPHA = {}".format(alpha)) +print("") rsync_filter_list = """ # Top-level config and build files @@ -222,27 +226,26 @@ def parse_version(topsrc_dir): MDN hosts the latest SpiderMonkey {major_version} release notes: https://developer.mozilla.org/en-US/docs/SpiderMonkey/{major_version} -""".format(major_version=major_version) +""".format( + major_version=major_version +) def is_mozjs_cargo_member(line): - """Checks if the line in workspace.members is mozjs-related - """ + """Checks if the line in workspace.members is mozjs-related""" return '"js/' in line def is_mozjs_crates_io_local_patch(line): - """Checks if the line in patch.crates-io is mozjs-related - """ + """Checks if the line in patch.crates-io is mozjs-related""" return 'path = "js' in line def clean(): - """Remove temporary directory and package file. - """ - logging.info('Cleaning {} and {} ...'.format(package_file, target_dir)) + """Remove temporary directory and package file.""" + logging.info("Cleaning {} and {} ...".format(package_file, target_dir)) if package_file.exists(): package_file.unlink() if target_dir.exists(): @@ -250,32 +253,35 @@ def clean(): def assert_clean(): - """Assert that target directory does not contain generated files. - """ - makefile_file = target_dir / 'js' / 'src' / 'Makefile' + """Assert that target directory does not contain generated files.""" + makefile_file = target_dir / "js" / "src" / "Makefile" if makefile_file.exists(): - logging.error('found js/src/Makefile. Please clean before packaging.') + logging.error("found js/src/Makefile. Please clean before packaging.") sys.exit(1) def create_target_dir(): if target_dir.exists(): - logging.warning('dist tree {} already exists!'.format(target_dir)) + logging.warning("dist tree {} already exists!".format(target_dir)) else: target_dir.mkdir(parents=True) def sync_files(): # Output of the command should directly go to stdout/stderr. - p = subprocess.Popen([str(rsync), - '--delete-excluded', - '--prune-empty-dirs', - '--quiet', - '--recursive', - '{}/'.format(topsrc_dir), - '{}/'.format(target_dir), - '--filter=. -'], - stdin=subprocess.PIPE) + p = subprocess.Popen( + [ + str(rsync), + "--delete-excluded", + "--prune-empty-dirs", + "--quiet", + "--recursive", + "{}/".format(topsrc_dir), + "{}/".format(target_dir), + "--filter=. -", + ], + stdin=subprocess.PIPE, + ) p.communicate(rsync_filter_list.encode()) @@ -284,10 +290,11 @@ def sync_files(): def copy_cargo_toml(): - cargo_toml_file = topsrc_dir / 'Cargo.toml' - target_cargo_toml_file = target_dir / 'Cargo.toml' + cargo_toml_file = topsrc_dir / "Cargo.toml" + target_cargo_toml_file = target_dir / "Cargo.toml" + + with cargo_toml_file.open("r") as f: - with cargo_toml_file.open('r') as f: class State(enum.Enum): BEFORE_MEMBER = 1 INSIDE_MEMBER = 2 @@ -295,93 +302,93 @@ class State(enum.Enum): INSIDE_PATCH = 4 AFTER_PATCH = 5 - content = '' + content = "" state = State.BEFORE_MEMBER for line in f: if state == State.BEFORE_MEMBER: - if line.strip() == 'members = [': + if line.strip() == "members = [": state = State.INSIDE_MEMBER elif state == State.INSIDE_MEMBER: - if line.strip() == ']': + if line.strip() == "]": state = State.AFTER_MEMBER elif not is_mozjs_cargo_member(line): continue elif state == State.AFTER_MEMBER: - if line.strip() == '[patch.crates-io]': + if line.strip() == "[patch.crates-io]": state = State.INSIDE_PATCH elif state == State.INSIDE_PATCH: - if line.startswith('['): + if line.startswith("["): state = State.AFTER_PATCH - if 'path = ' in line: + if "path = " in line: if not is_mozjs_crates_io_local_patch(line): continue content += line - with target_cargo_toml_file.open('w') as f: + with target_cargo_toml_file.open("w") as f: f.write(content) def generate_configure(): - """Generate configure files to avoid build dependency on autoconf-2.13 - """ + """Generate configure files to avoid build dependency on autoconf-2.13""" - src_configure_in_file = topsrc_dir / 'js' / 'src' / 'configure.in' - src_old_configure_in_file = topsrc_dir / 'js' / 'src' / 'old-configure.in' - dest_configure_file = target_dir / 'js' / 'src' / 'configure' - dest_old_configure_file = target_dir / 'js' / 'src' / 'old-configure' + src_configure_in_file = topsrc_dir / "js" / "src" / "configure.in" + src_old_configure_in_file = topsrc_dir / "js" / "src" / "old-configure.in" + dest_configure_file = target_dir / "js" / "src" / "configure" + dest_old_configure_file = target_dir / "js" / "src" / "old-configure" - shutil.copy2(str(src_configure_in_file), str(dest_configure_file), - follow_symlinks=False) + shutil.copy2( + str(src_configure_in_file), str(dest_configure_file), follow_symlinks=False + ) st = dest_configure_file.stat() - dest_configure_file.chmod( - st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) + dest_configure_file.chmod(st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) - js_src_dir = topsrc_dir / 'js' / 'src' + js_src_dir = topsrc_dir / "js" / "src" - with dest_old_configure_file.open('w') as f: - subprocess.run([str(autoconf), - '--localdir={}'.format(js_src_dir), - str(src_old_configure_in_file)], - stdout=f, - check=True) + with dest_old_configure_file.open("w") as f: + subprocess.run( + [ + str(autoconf), + "--localdir={}".format(js_src_dir), + str(src_old_configure_in_file), + ], + stdout=f, + check=True, + ) def copy_install(): - """Copy or create INSTALL. - """ + """Copy or create INSTALL.""" - staging_install_file = staging_dir / 'INSTALL' - target_install_file = target_dir / 'INSTALL' + staging_install_file = staging_dir / "INSTALL" + target_install_file = target_dir / "INSTALL" if staging_install_file.exists(): shutil.copy2(str(staging_install_file), str(target_install_file)) else: - with target_install_file.open('w') as f: + with target_install_file.open("w") as f: f.write(INSTALL_CONTENT) def copy_readme(): - """Copy or create README. - """ + """Copy or create README.""" - staging_readme_file = staging_dir / 'README' - target_readme_file = target_dir / 'README' + staging_readme_file = staging_dir / "README" + target_readme_file = target_dir / "README" if staging_readme_file.exists(): shutil.copy2(str(staging_readme_file), str(target_readme_file)) else: - with target_readme_file.open('w') as f: + with target_readme_file.open("w") as f: f.write(README_CONTENT) def copy_patches(): - """Copy patches dir, if it exists. - """ + """Copy patches dir, if it exists.""" - staging_patches_dir = staging_dir / 'patches' - top_patches_dir = topsrc_dir / 'patches' - target_patches_dir = target_dir / 'patches' + staging_patches_dir = staging_dir / "patches" + top_patches_dir = topsrc_dir / "patches" + target_patches_dir = target_dir / "patches" if staging_patches_dir.is_dir(): shutil.copytree(str(staging_patches_dir), str(target_patches_dir)) @@ -390,18 +397,16 @@ def copy_patches(): def remove_python_cache(): - """Remove *.pyc and *.pyo files if any. - """ - for f in target_dir.glob('**/*.pyc'): + """Remove *.pyc and *.pyo files if any.""" + for f in target_dir.glob("**/*.pyc"): f.unlink() - for f in target_dir.glob('**/*.pyo'): + for f in target_dir.glob("**/*.pyo"): f.unlink() def stage(): - """Stage source tarball content. - """ - logging.info('Staging source tarball in {}...'.format(target_dir)) + """Stage source tarball content.""" + logging.info("Staging source tarball in {}...".format(target_dir)) create_target_dir() sync_files() @@ -414,17 +419,14 @@ def stage(): def create_tar(): - """Roll the tarball. - """ + """Roll the tarball.""" - logging.info('Packaging source tarball at {}...'.format(package_file)) + logging.info("Packaging source tarball at {}...".format(package_file)) - subprocess.run([str(tar)] + tar_opts + [ - str(package_file), - '-C', - str(staging_dir), - version - ], check=True) + subprocess.run( + [str(tar)] + tar_opts + [str(package_file), "-C", str(staging_dir), version], + check=True, + ) def build(): @@ -434,14 +436,12 @@ def build(): parser = argparse.ArgumentParser(description="Make SpiderMonkey source package") -subparsers = parser.add_subparsers(dest='COMMAND') -subparser_update = subparsers.add_parser('clean', - help='') -subparser_update = subparsers.add_parser('build', - help='') +subparsers = parser.add_subparsers(dest="COMMAND") +subparser_update = subparsers.add_parser("clean", help="") +subparser_update = subparsers.add_parser("build", help="") args = parser.parse_args() -if args.COMMAND == 'clean': +if args.COMMAND == "clean": clean() -elif not args.COMMAND or args.COMMAND == 'build': +elif not args.COMMAND or args.COMMAND == "build": build() diff --git a/js/src/moz.build b/js/src/moz.build index 1a6927025c5e40..a3d41826e1db7c 100755 --- a/js/src/moz.build +++ b/js/src/moz.build @@ -4,230 +4,230 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -include('js-config.mozbuild') -include('js-cxxflags.mozbuild') +include("js-config.mozbuild") +include("js-cxxflags.mozbuild") # Directory metadata -component_engine = ('Core', 'JavaScript Engine') -component_gc = ('Core', 'JavaScript: GC') -component_intl = ('Core', 'JavaScript: Internationalization API') -component_jit = ('Core', 'JavaScript Engine: JIT') -component_stl = ('Core', 'JavaScript: Standard Library') -component_wasm = ('Core', 'Javascript: Web Assembly') +component_engine = ("Core", "JavaScript Engine") +component_gc = ("Core", "JavaScript: GC") +component_intl = ("Core", "JavaScript: Internationalization API") +component_jit = ("Core", "JavaScript Engine: JIT") +component_stl = ("Core", "JavaScript: Standard Library") +component_wasm = ("Core", "Javascript: Web Assembly") FILES_PER_UNIFIED_FILE = 6 # Prevent redefining FILES_PER_UNIFIED_FILE in each sub-directory of js. This # will not escape this directory. -export('FILES_PER_UNIFIED_FILE') +export("FILES_PER_UNIFIED_FILE") -with Files('*'): +with Files("*"): BUG_COMPONENT = component_engine -with Files('wasm/**'): +with Files("wasm/**"): BUG_COMPONENT = component_wasm -with Files('builtin/**'): +with Files("builtin/**"): BUG_COMPONENT = component_stl -with Files('ctypes/**'): - BUG_COMPONENT = ('Core', 'js-ctypes') -with Files('gc/**'): +with Files("ctypes/**"): + BUG_COMPONENT = ("Core", "js-ctypes") +with Files("gc/**"): BUG_COMPONENT = component_gc -with Files('jit/**'): +with Files("jit/**"): BUG_COMPONENT = component_jit # File-specific metadata -for gcfile in ['devtools/rootAnalysis', 'devtools/gc-ubench', 'devtools/gctrace']: +for gcfile in ["devtools/rootAnalysis", "devtools/gc-ubench", "devtools/gctrace"]: with Files(gcfile): BUG_COMPONENT = component_gc -for stlfile in ['jsdate.*', 'jsnum.*']: +for stlfile in ["jsdate.*", "jsnum.*"]: with Files(stlfile): BUG_COMPONENT = component_stl -with Files('builtin/intl/*'): +with Files("builtin/intl/*"): BUG_COMPONENT = component_intl -if CONFIG['ENABLE_WASM_CRANELIFT']: - CONFIGURE_SUBST_FILES += ['rust/extra-bindgen-flags'] +if CONFIG["ENABLE_WASM_CRANELIFT"]: + CONFIGURE_SUBST_FILES += ["rust/extra-bindgen-flags"] -if not CONFIG['JS_DISABLE_SHELL']: +if not CONFIG["JS_DISABLE_SHELL"]: DIRS += [ - 'rust', - 'shell', + "rust", + "shell", ] TEST_DIRS += [ - 'gdb', - 'jsapi-tests', - 'tests', + "gdb", + "jsapi-tests", + "tests", ] - if CONFIG['FUZZING_INTERFACES']: + if CONFIG["FUZZING_INTERFACES"]: TEST_DIRS += [ - 'fuzz-tests', + "fuzz-tests", ] -if CONFIG['FUZZING_INTERFACES'] and CONFIG['LIBFUZZER']: +if CONFIG["FUZZING_INTERFACES"] and CONFIG["LIBFUZZER"]: # In addition to regular coverage provided by trace-pc-guard, # LibFuzzer can use trace-cmp to instrument various compare instructions. # Only use this feature on source files that do a lot of constant # comparisons that would otherwise be hard to guess by LibFuzzer, # as it comes with a larger overhead (requires -use_value_profile=1). - include('/tools/fuzzing/libfuzzer-flags.mozbuild') + include("/tools/fuzzing/libfuzzer-flags.mozbuild") # Any files that are targeted by LibFuzzer should be added here so they can # be built with the necessary instrumentation flags, rather than just building # the whole JS engine with instrumentation, to reduce the amount of noise. SOURCES += [ - 'vm/StructuredClone.cpp', + "vm/StructuredClone.cpp", ] - SOURCES['vm/StructuredClone.cpp'].flags += libfuzzer_flags + SOURCES["vm/StructuredClone.cpp"].flags += libfuzzer_flags else: UNIFIED_SOURCES += [ - 'vm/StructuredClone.cpp', + "vm/StructuredClone.cpp", ] CONFIGURE_SUBST_FILES += [ - 'devtools/rootAnalysis/Makefile', + "devtools/rootAnalysis/Makefile", ] CONFIGURE_DEFINE_FILES += [ - 'js-confdefs.h', + "js-confdefs.h", ] -if not CONFIG['JS_STANDALONE']: +if not CONFIG["JS_STANDALONE"]: CONFIGURE_SUBST_FILES += [ - '../../config/autoconf-js.mk', - '../../config/emptyvars-js.mk', + "../../config/autoconf-js.mk", + "../../config/emptyvars-js.mk", ] CONFIGURE_DEFINE_FILES += [ - 'js-config.h', + "js-config.h", ] -if CONFIG['HAVE_DTRACE']: - GeneratedFile('javascript-trace.h') - EXPORTS += ['!javascript-trace.h'] +if CONFIG["HAVE_DTRACE"]: + GeneratedFile("javascript-trace.h") + EXPORTS += ["!javascript-trace.h"] # Changes to internal header files, used externally, massively slow down # browser builds. Don't add new files here unless you know what you're # doing! EXPORTS += [ - '!js-config.h', - 'jsapi.h', - 'jsfriendapi.h', - 'jspubtd.h', - 'jstypes.h', + "!js-config.h", + "jsapi.h", + "jsfriendapi.h", + "jspubtd.h", + "jstypes.h", ] EXPORTS.js += [ - '../public/AllocationLogging.h', - '../public/AllocationRecording.h', - '../public/AllocPolicy.h', - '../public/Array.h', - '../public/ArrayBuffer.h', - '../public/ArrayBufferMaybeShared.h', - '../public/BigInt.h', - '../public/BuildId.h', - '../public/CallArgs.h', - '../public/CallNonGenericMethod.h', - '../public/CharacterEncoding.h', - '../public/Class.h', - '../public/ComparisonOperators.h', - '../public/CompilationAndEvaluation.h', - '../public/CompileOptions.h', - '../public/ContextOptions.h', - '../public/Conversions.h', - '../public/Date.h', - '../public/Debug.h', - '../public/Equality.h', - '../public/ErrorReport.h', - '../public/Exception.h', - '../public/ForOfIterator.h', - '../public/GCAnnotations.h', - '../public/GCAPI.h', - '../public/GCHashTable.h', - '../public/GCPolicyAPI.h', - '../public/GCTypeMacros.h', - '../public/GCVariant.h', - '../public/GCVector.h', - '../public/HashTable.h', - '../public/HeapAPI.h', - '../public/Id.h', - '../public/Initialization.h', - '../public/JSON.h', - '../public/LocaleSensitive.h', - '../public/MemoryFunctions.h', - '../public/MemoryMetrics.h', - '../public/Modules.h', - '../public/Object.h', - '../public/OffThreadScriptCompilation.h', - '../public/Principals.h', - '../public/Printf.h', - '../public/ProfilingCategory.h', - '../public/ProfilingFrameIterator.h', - '../public/ProfilingStack.h', - '../public/Promise.h', - '../public/PropertyDescriptor.h', - '../public/PropertySpec.h', - '../public/ProtoKey.h', - '../public/Proxy.h', - '../public/Realm.h', - '../public/RealmOptions.h', - '../public/RefCounted.h', - '../public/RegExp.h', - '../public/RegExpFlags.h', - '../public/RequiredDefines.h', - '../public/Result.h', - '../public/RootingAPI.h', - '../public/SavedFrameAPI.h', - '../public/ScalarType.h', - '../public/SharedArrayBuffer.h', - '../public/SliceBudget.h', - '../public/SourceText.h', - '../public/StableStringChars.h', - '../public/Stream.h', - '../public/String.h', - '../public/StructuredClone.h', - '../public/SweepingAPI.h', - '../public/Symbol.h', - '../public/TraceKind.h', - '../public/TraceLoggerAPI.h', - '../public/TracingAPI.h', - '../public/Transcoding.h', - '../public/TypeDecls.h', - '../public/UbiNode.h', - '../public/UbiNodeBreadthFirst.h', - '../public/UbiNodeCensus.h', - '../public/UbiNodeDominatorTree.h', - '../public/UbiNodePostOrder.h', - '../public/UbiNodeShortestPaths.h', - '../public/UbiNodeUtils.h', - '../public/UniquePtr.h', - '../public/Utility.h', - '../public/Value.h', - '../public/ValueArray.h', - '../public/Vector.h', - '../public/Warnings.h', - '../public/WasmModule.h', - '../public/WeakMapPtr.h', - '../public/Wrapper.h', + "../public/AllocationLogging.h", + "../public/AllocationRecording.h", + "../public/AllocPolicy.h", + "../public/Array.h", + "../public/ArrayBuffer.h", + "../public/ArrayBufferMaybeShared.h", + "../public/BigInt.h", + "../public/BuildId.h", + "../public/CallArgs.h", + "../public/CallNonGenericMethod.h", + "../public/CharacterEncoding.h", + "../public/Class.h", + "../public/ComparisonOperators.h", + "../public/CompilationAndEvaluation.h", + "../public/CompileOptions.h", + "../public/ContextOptions.h", + "../public/Conversions.h", + "../public/Date.h", + "../public/Debug.h", + "../public/Equality.h", + "../public/ErrorReport.h", + "../public/Exception.h", + "../public/ForOfIterator.h", + "../public/GCAnnotations.h", + "../public/GCAPI.h", + "../public/GCHashTable.h", + "../public/GCPolicyAPI.h", + "../public/GCTypeMacros.h", + "../public/GCVariant.h", + "../public/GCVector.h", + "../public/HashTable.h", + "../public/HeapAPI.h", + "../public/Id.h", + "../public/Initialization.h", + "../public/JSON.h", + "../public/LocaleSensitive.h", + "../public/MemoryFunctions.h", + "../public/MemoryMetrics.h", + "../public/Modules.h", + "../public/Object.h", + "../public/OffThreadScriptCompilation.h", + "../public/Principals.h", + "../public/Printf.h", + "../public/ProfilingCategory.h", + "../public/ProfilingFrameIterator.h", + "../public/ProfilingStack.h", + "../public/Promise.h", + "../public/PropertyDescriptor.h", + "../public/PropertySpec.h", + "../public/ProtoKey.h", + "../public/Proxy.h", + "../public/Realm.h", + "../public/RealmOptions.h", + "../public/RefCounted.h", + "../public/RegExp.h", + "../public/RegExpFlags.h", + "../public/RequiredDefines.h", + "../public/Result.h", + "../public/RootingAPI.h", + "../public/SavedFrameAPI.h", + "../public/ScalarType.h", + "../public/SharedArrayBuffer.h", + "../public/SliceBudget.h", + "../public/SourceText.h", + "../public/StableStringChars.h", + "../public/Stream.h", + "../public/String.h", + "../public/StructuredClone.h", + "../public/SweepingAPI.h", + "../public/Symbol.h", + "../public/TraceKind.h", + "../public/TraceLoggerAPI.h", + "../public/TracingAPI.h", + "../public/Transcoding.h", + "../public/TypeDecls.h", + "../public/UbiNode.h", + "../public/UbiNodeBreadthFirst.h", + "../public/UbiNodeCensus.h", + "../public/UbiNodeDominatorTree.h", + "../public/UbiNodePostOrder.h", + "../public/UbiNodeShortestPaths.h", + "../public/UbiNodeUtils.h", + "../public/UniquePtr.h", + "../public/Utility.h", + "../public/Value.h", + "../public/ValueArray.h", + "../public/Vector.h", + "../public/Warnings.h", + "../public/WasmModule.h", + "../public/WeakMapPtr.h", + "../public/Wrapper.h", ] # This lives in mozglue/baseprofiler but we re-publish ourselves to support # standalone SpiderMonkey configurations. EXPORTS.js += [ - '../../mozglue/baseprofiler/public/ProfilingCategoryList.h', + "../../mozglue/baseprofiler/public/ProfilingCategoryList.h", ] # Public APIs that are experimental: the precise contour of the APIs may still # change, but they're at least plausible first passes at designing something. # We expose them as-is, buyer beware. EXPORTS.js.experimental += [ - '../public/experimental/CodeCoverage.h', - '../public/experimental/Intl.h', - '../public/experimental/JitInfo.h', - '../public/experimental/SourceHook.h', - '../public/experimental/TypedData.h', + "../public/experimental/CodeCoverage.h", + "../public/experimental/Intl.h", + "../public/experimental/JitInfo.h", + "../public/experimental/SourceHook.h", + "../public/experimental/TypedData.h", ] # Friend APIs are APIs that either basically SpiderMonkey-internal, or their @@ -236,15 +236,15 @@ EXPORTS.js.experimental += [ # a clean design. Use this only if you absolutely must, and feel free to # propose clean APIs to replace what's here! EXPORTS.js.friend += [ - '../public/friend/DOMProxy.h', - '../public/friend/DumpFunctions.h', - '../public/friend/ErrorMessages.h', - '../public/friend/ErrorNumbers.msg', - '../public/friend/JSMEnvironment.h', - '../public/friend/StackLimits.h', - '../public/friend/UsageStatistics.h', - '../public/friend/WindowProxy.h', - '../public/friend/XrayJitInfo.h', + "../public/friend/DOMProxy.h", + "../public/friend/DumpFunctions.h", + "../public/friend/ErrorMessages.h", + "../public/friend/ErrorNumbers.msg", + "../public/friend/JSMEnvironment.h", + "../public/friend/StackLimits.h", + "../public/friend/UsageStatistics.h", + "../public/friend/WindowProxy.h", + "../public/friend/XrayJitInfo.h", ] # "Shadow" API defines the internal layout of public JSAPI types like |JSObject| @@ -262,177 +262,177 @@ EXPORTS.js.friend += [ # to back actual implementations *with* these shadow definitions to eliminate # this requirement.) EXPORTS.js.shadow += [ - '../public/shadow/Function.h', - '../public/shadow/Object.h', - '../public/shadow/ObjectGroup.h', - '../public/shadow/Realm.h', - '../public/shadow/Shape.h', - '../public/shadow/String.h', - '../public/shadow/Symbol.h', - '../public/shadow/Zone.h', + "../public/shadow/Function.h", + "../public/shadow/Object.h", + "../public/shadow/ObjectGroup.h", + "../public/shadow/Realm.h", + "../public/shadow/Shape.h", + "../public/shadow/String.h", + "../public/shadow/Symbol.h", + "../public/shadow/Zone.h", ] UNIFIED_SOURCES += [ - 'builtin/Array.cpp', - 'builtin/AtomicsObject.cpp', - 'builtin/BigInt.cpp', - 'builtin/Boolean.cpp', - 'builtin/DataViewObject.cpp', - 'builtin/Eval.cpp', - 'builtin/FinalizationRegistryObject.cpp', - 'builtin/JSON.cpp', - 'builtin/MapObject.cpp', - 'builtin/ModuleObject.cpp', - 'builtin/Object.cpp', - 'builtin/Profilers.cpp', - 'builtin/Promise.cpp', - 'builtin/Reflect.cpp', - 'builtin/ReflectParse.cpp', - 'builtin/Stream.cpp', - 'builtin/streams/MiscellaneousOperations.cpp', - 'builtin/streams/PipeToState.cpp', - 'builtin/streams/PullIntoDescriptor.cpp', - 'builtin/streams/QueueingStrategies.cpp', - 'builtin/streams/QueueWithSizes.cpp', - 'builtin/streams/ReadableStream.cpp', - 'builtin/streams/ReadableStreamBYOBReader.cpp', - 'builtin/streams/ReadableStreamDefaultController.cpp', - 'builtin/streams/ReadableStreamDefaultControllerOperations.cpp', - 'builtin/streams/ReadableStreamDefaultReader.cpp', - 'builtin/streams/ReadableStreamInternals.cpp', - 'builtin/streams/ReadableStreamOperations.cpp', - 'builtin/streams/ReadableStreamReader.cpp', - 'builtin/streams/StreamAPI.cpp', - 'builtin/streams/TeeState.cpp', - 'builtin/streams/WritableStream.cpp', - 'builtin/streams/WritableStreamDefaultController.cpp', - 'builtin/streams/WritableStreamDefaultControllerOperations.cpp', - 'builtin/streams/WritableStreamDefaultWriter.cpp', - 'builtin/streams/WritableStreamOperations.cpp', - 'builtin/streams/WritableStreamWriterOperations.cpp', - 'builtin/String.cpp', - 'builtin/Symbol.cpp', - 'builtin/TestingFunctions.cpp', - 'builtin/WeakMapObject.cpp', - 'builtin/WeakRefObject.cpp', - 'builtin/WeakSetObject.cpp', - 'ds/Bitmap.cpp', - 'ds/LifoAlloc.cpp', - 'ds/MemoryProtectionExceptionHandler.cpp', - 'jsapi.cpp', - 'jsdate.cpp', - 'jsexn.cpp', - 'jsfriendapi.cpp', - 'jsnum.cpp', - 'proxy/BaseProxyHandler.cpp', - 'proxy/CrossCompartmentWrapper.cpp', - 'proxy/DeadObjectProxy.cpp', - 'proxy/DOMProxy.cpp', - 'proxy/OpaqueCrossCompartmentWrapper.cpp', - 'proxy/Proxy.cpp', - 'proxy/ScriptedProxyHandler.cpp', - 'proxy/SecurityWrapper.cpp', - 'proxy/Wrapper.cpp', - 'threading/Mutex.cpp', - 'threading/ProtectedData.cpp', - 'threading/Thread.cpp', - 'util/AllocationLogging.cpp', - 'util/AllocPolicy.cpp', - 'util/CompleteFile.cpp', - 'util/DumpFunctions.cpp', - 'util/NativeStack.cpp', - 'util/Printf.cpp', - 'util/StringBuffer.cpp', - 'util/StructuredSpewer.cpp', - 'util/Text.cpp', - 'util/Unicode.cpp', - 'vm/Activation.cpp', - 'vm/ArgumentsObject.cpp', - 'vm/ArrayBufferObject.cpp', - 'vm/ArrayBufferObjectMaybeShared.cpp', - 'vm/ArrayBufferViewObject.cpp', - 'vm/AsyncFunction.cpp', - 'vm/AsyncIteration.cpp', - 'vm/BigIntType.cpp', - 'vm/BuildId.cpp', - 'vm/BuiltinObjectKind.cpp', - 'vm/BytecodeLocation.cpp', - 'vm/BytecodeUtil.cpp', - 'vm/Caches.cpp', - 'vm/CallNonGenericMethod.cpp', - 'vm/CharacterEncoding.cpp', - 'vm/CodeCoverage.cpp', - 'vm/Compartment.cpp', - 'vm/CompilationAndEvaluation.cpp', - 'vm/Compression.cpp', - 'vm/DateTime.cpp', - 'vm/EnvironmentObject.cpp', - 'vm/EqualityOperations.cpp', - 'vm/ErrorMessages.cpp', - 'vm/ErrorObject.cpp', - 'vm/ErrorReporting.cpp', - 'vm/Exception.cpp', - 'vm/ForOfIterator.cpp', - 'vm/FrameIter.cpp', - 'vm/FunctionFlags.cpp', - 'vm/GeckoProfiler.cpp', - 'vm/GeneratorObject.cpp', - 'vm/GlobalObject.cpp', - 'vm/HelperThreads.cpp', - 'vm/Id.cpp', - 'vm/Initialization.cpp', - 'vm/Instrumentation.cpp', - 'vm/Iteration.cpp', - 'vm/JitActivation.cpp', - 'vm/JSAtom.cpp', - 'vm/JSContext.cpp', - 'vm/JSFunction.cpp', - 'vm/JSObject.cpp', - 'vm/JSONParser.cpp', - 'vm/JSONPrinter.cpp', - 'vm/JSScript.cpp', - 'vm/List.cpp', - 'vm/MemoryMetrics.cpp', - 'vm/Modules.cpp', - 'vm/NativeObject.cpp', - 'vm/ObjectGroup.cpp', - 'vm/OffThreadPromiseRuntimeState.cpp', - 'vm/OffThreadScriptCompilation.cpp', - 'vm/PIC.cpp', - 'vm/PlainObject.cpp', - 'vm/Printer.cpp', - 'vm/Probes.cpp', - 'vm/PromiseLookup.cpp', - 'vm/ProxyObject.cpp', - 'vm/Realm.cpp', - 'vm/ReceiverGuard.cpp', - 'vm/RegExpObject.cpp', - 'vm/RegExpStatics.cpp', - 'vm/Runtime.cpp', - 'vm/SavedStacks.cpp', - 'vm/Scope.cpp', - 'vm/SelfHosting.cpp', - 'vm/Shape.cpp', - 'vm/SharedArrayObject.cpp', - 'vm/SharedImmutableStringsCache.cpp', - 'vm/SourceHook.cpp', - 'vm/Stack.cpp', - 'vm/StringType.cpp', - 'vm/SymbolType.cpp', - 'vm/TaggedProto.cpp', - 'vm/ThrowMsgKind.cpp', - 'vm/Time.cpp', - 'vm/ToSource.cpp', - 'vm/TypedArrayObject.cpp', - 'vm/TypeInference.cpp', - 'vm/UbiNode.cpp', - 'vm/UbiNodeCensus.cpp', - 'vm/UbiNodeShortestPaths.cpp', - 'vm/UsageStatistics.cpp', - 'vm/Value.cpp', - 'vm/Warnings.cpp', - 'vm/WindowProxy.cpp', - 'vm/Xdr.cpp', + "builtin/Array.cpp", + "builtin/AtomicsObject.cpp", + "builtin/BigInt.cpp", + "builtin/Boolean.cpp", + "builtin/DataViewObject.cpp", + "builtin/Eval.cpp", + "builtin/FinalizationRegistryObject.cpp", + "builtin/JSON.cpp", + "builtin/MapObject.cpp", + "builtin/ModuleObject.cpp", + "builtin/Object.cpp", + "builtin/Profilers.cpp", + "builtin/Promise.cpp", + "builtin/Reflect.cpp", + "builtin/ReflectParse.cpp", + "builtin/Stream.cpp", + "builtin/streams/MiscellaneousOperations.cpp", + "builtin/streams/PipeToState.cpp", + "builtin/streams/PullIntoDescriptor.cpp", + "builtin/streams/QueueingStrategies.cpp", + "builtin/streams/QueueWithSizes.cpp", + "builtin/streams/ReadableStream.cpp", + "builtin/streams/ReadableStreamBYOBReader.cpp", + "builtin/streams/ReadableStreamDefaultController.cpp", + "builtin/streams/ReadableStreamDefaultControllerOperations.cpp", + "builtin/streams/ReadableStreamDefaultReader.cpp", + "builtin/streams/ReadableStreamInternals.cpp", + "builtin/streams/ReadableStreamOperations.cpp", + "builtin/streams/ReadableStreamReader.cpp", + "builtin/streams/StreamAPI.cpp", + "builtin/streams/TeeState.cpp", + "builtin/streams/WritableStream.cpp", + "builtin/streams/WritableStreamDefaultController.cpp", + "builtin/streams/WritableStreamDefaultControllerOperations.cpp", + "builtin/streams/WritableStreamDefaultWriter.cpp", + "builtin/streams/WritableStreamOperations.cpp", + "builtin/streams/WritableStreamWriterOperations.cpp", + "builtin/String.cpp", + "builtin/Symbol.cpp", + "builtin/TestingFunctions.cpp", + "builtin/WeakMapObject.cpp", + "builtin/WeakRefObject.cpp", + "builtin/WeakSetObject.cpp", + "ds/Bitmap.cpp", + "ds/LifoAlloc.cpp", + "ds/MemoryProtectionExceptionHandler.cpp", + "jsapi.cpp", + "jsdate.cpp", + "jsexn.cpp", + "jsfriendapi.cpp", + "jsnum.cpp", + "proxy/BaseProxyHandler.cpp", + "proxy/CrossCompartmentWrapper.cpp", + "proxy/DeadObjectProxy.cpp", + "proxy/DOMProxy.cpp", + "proxy/OpaqueCrossCompartmentWrapper.cpp", + "proxy/Proxy.cpp", + "proxy/ScriptedProxyHandler.cpp", + "proxy/SecurityWrapper.cpp", + "proxy/Wrapper.cpp", + "threading/Mutex.cpp", + "threading/ProtectedData.cpp", + "threading/Thread.cpp", + "util/AllocationLogging.cpp", + "util/AllocPolicy.cpp", + "util/CompleteFile.cpp", + "util/DumpFunctions.cpp", + "util/NativeStack.cpp", + "util/Printf.cpp", + "util/StringBuffer.cpp", + "util/StructuredSpewer.cpp", + "util/Text.cpp", + "util/Unicode.cpp", + "vm/Activation.cpp", + "vm/ArgumentsObject.cpp", + "vm/ArrayBufferObject.cpp", + "vm/ArrayBufferObjectMaybeShared.cpp", + "vm/ArrayBufferViewObject.cpp", + "vm/AsyncFunction.cpp", + "vm/AsyncIteration.cpp", + "vm/BigIntType.cpp", + "vm/BuildId.cpp", + "vm/BuiltinObjectKind.cpp", + "vm/BytecodeLocation.cpp", + "vm/BytecodeUtil.cpp", + "vm/Caches.cpp", + "vm/CallNonGenericMethod.cpp", + "vm/CharacterEncoding.cpp", + "vm/CodeCoverage.cpp", + "vm/Compartment.cpp", + "vm/CompilationAndEvaluation.cpp", + "vm/Compression.cpp", + "vm/DateTime.cpp", + "vm/EnvironmentObject.cpp", + "vm/EqualityOperations.cpp", + "vm/ErrorMessages.cpp", + "vm/ErrorObject.cpp", + "vm/ErrorReporting.cpp", + "vm/Exception.cpp", + "vm/ForOfIterator.cpp", + "vm/FrameIter.cpp", + "vm/FunctionFlags.cpp", + "vm/GeckoProfiler.cpp", + "vm/GeneratorObject.cpp", + "vm/GlobalObject.cpp", + "vm/HelperThreads.cpp", + "vm/Id.cpp", + "vm/Initialization.cpp", + "vm/Instrumentation.cpp", + "vm/Iteration.cpp", + "vm/JitActivation.cpp", + "vm/JSAtom.cpp", + "vm/JSContext.cpp", + "vm/JSFunction.cpp", + "vm/JSObject.cpp", + "vm/JSONParser.cpp", + "vm/JSONPrinter.cpp", + "vm/JSScript.cpp", + "vm/List.cpp", + "vm/MemoryMetrics.cpp", + "vm/Modules.cpp", + "vm/NativeObject.cpp", + "vm/ObjectGroup.cpp", + "vm/OffThreadPromiseRuntimeState.cpp", + "vm/OffThreadScriptCompilation.cpp", + "vm/PIC.cpp", + "vm/PlainObject.cpp", + "vm/Printer.cpp", + "vm/Probes.cpp", + "vm/PromiseLookup.cpp", + "vm/ProxyObject.cpp", + "vm/Realm.cpp", + "vm/ReceiverGuard.cpp", + "vm/RegExpObject.cpp", + "vm/RegExpStatics.cpp", + "vm/Runtime.cpp", + "vm/SavedStacks.cpp", + "vm/Scope.cpp", + "vm/SelfHosting.cpp", + "vm/Shape.cpp", + "vm/SharedArrayObject.cpp", + "vm/SharedImmutableStringsCache.cpp", + "vm/SourceHook.cpp", + "vm/Stack.cpp", + "vm/StringType.cpp", + "vm/SymbolType.cpp", + "vm/TaggedProto.cpp", + "vm/ThrowMsgKind.cpp", + "vm/Time.cpp", + "vm/ToSource.cpp", + "vm/TypedArrayObject.cpp", + "vm/TypeInference.cpp", + "vm/UbiNode.cpp", + "vm/UbiNodeCensus.cpp", + "vm/UbiNodeShortestPaths.cpp", + "vm/UsageStatistics.cpp", + "vm/Value.cpp", + "vm/Warnings.cpp", + "vm/WindowProxy.cpp", + "vm/Xdr.cpp", ] # builtin/RegExp.cpp cannot be built in unified mode because it causes huge @@ -446,149 +446,147 @@ UNIFIED_SOURCES += [ # vm/Interpreter.cpp is gigantic and destroys incremental build times for any # files unlucky enough to be unified with it. SOURCES += [ - 'builtin/RegExp.cpp', - 'jsmath.cpp', - 'util/DoubleToString.cpp', - 'util/Utility.cpp', - 'vm/Interpreter.cpp', - 'vm/ProfilingStack.cpp', + "builtin/RegExp.cpp", + "jsmath.cpp", + "util/DoubleToString.cpp", + "util/Utility.cpp", + "vm/Interpreter.cpp", + "vm/ProfilingStack.cpp", ] -if CONFIG['JS_HAS_INTL_API']: +if CONFIG["JS_HAS_INTL_API"]: UNIFIED_SOURCES += [ - 'builtin/intl/Collator.cpp', - 'builtin/intl/CommonFunctions.cpp', - 'builtin/intl/DateTimeFormat.cpp', - 'builtin/intl/DisplayNames.cpp', - 'builtin/intl/IntlObject.cpp', - 'builtin/intl/LanguageTag.cpp', - 'builtin/intl/LanguageTagGenerated.cpp', - 'builtin/intl/ListFormat.cpp', - 'builtin/intl/Locale.cpp', - 'builtin/intl/NumberFormat.cpp', - 'builtin/intl/PluralRules.cpp', - 'builtin/intl/RelativeTimeFormat.cpp', - 'builtin/intl/SharedIntlData.cpp', + "builtin/intl/Collator.cpp", + "builtin/intl/CommonFunctions.cpp", + "builtin/intl/DateTimeFormat.cpp", + "builtin/intl/DisplayNames.cpp", + "builtin/intl/IntlObject.cpp", + "builtin/intl/LanguageTag.cpp", + "builtin/intl/LanguageTagGenerated.cpp", + "builtin/intl/ListFormat.cpp", + "builtin/intl/Locale.cpp", + "builtin/intl/NumberFormat.cpp", + "builtin/intl/PluralRules.cpp", + "builtin/intl/RelativeTimeFormat.cpp", + "builtin/intl/SharedIntlData.cpp", ] -if CONFIG['MOZ_INSTRUMENTS']: +if CONFIG["MOZ_INSTRUMENTS"]: SOURCES += [ - 'devtools/Instruments.cpp', + "devtools/Instruments.cpp", ] -if CONFIG['ENABLE_TRACE_LOGGING']: +if CONFIG["ENABLE_TRACE_LOGGING"]: SOURCES += [ - 'vm/TraceLogging.cpp', - 'vm/TraceLoggingGraph.cpp', - 'vm/TraceLoggingTypes.cpp', + "vm/TraceLogging.cpp", + "vm/TraceLoggingGraph.cpp", + "vm/TraceLoggingTypes.cpp", ] -if CONFIG['OS_ARCH'] == 'WINNT': +if CONFIG["OS_ARCH"] == "WINNT": UNIFIED_SOURCES += [ - 'threading/windows/CpuCount.cpp', - 'threading/windows/WindowsThread.cpp', + "threading/windows/CpuCount.cpp", + "threading/windows/WindowsThread.cpp", ] else: UNIFIED_SOURCES += [ - 'threading/posix/CpuCount.cpp', - 'threading/posix/PosixThread.cpp', + "threading/posix/CpuCount.cpp", + "threading/posix/PosixThread.cpp", ] -if CONFIG['JS_HAS_CTYPES']: +if CONFIG["JS_HAS_CTYPES"]: SOURCES += [ - 'ctypes/CTypes.cpp', - 'ctypes/Library.cpp', + "ctypes/CTypes.cpp", + "ctypes/Library.cpp", ] - if not CONFIG['MOZ_SYSTEM_FFI']: + if not CONFIG["MOZ_SYSTEM_FFI"]: LOCAL_INCLUDES += [ - '!ctypes/libffi/include', - 'ctypes/libffi/src/%s' % CONFIG['FFI_TARGET_DIR'], + "!ctypes/libffi/include", + "ctypes/libffi/src/%s" % CONFIG["FFI_TARGET_DIR"], ] -if CONFIG['MOZ_VTUNE']: +if CONFIG["MOZ_VTUNE"]: SOURCES += [ - 'vtune/ittnotify_static.c', - 'vtune/jitprofiling.c', - 'vtune/VTuneWrapper.cpp', + "vtune/ittnotify_static.c", + "vtune/jitprofiling.c", + "vtune/VTuneWrapper.cpp", ] - SOURCES['vtune/ittnotify_static.c'].flags += [ - '-Wno-varargs', - '-Wno-unknown-pragmas' + SOURCES["vtune/ittnotify_static.c"].flags += [ + "-Wno-varargs", + "-Wno-unknown-pragmas", ] - if CONFIG['CC_TYPE'] == 'gcc': - SOURCES['vtune/ittnotify_static.c'].flags += [ - '-Wno-stringop-overflow' - ] + if CONFIG["CC_TYPE"] == "gcc": + SOURCES["vtune/ittnotify_static.c"].flags += ["-Wno-stringop-overflow"] -DIRS += [ - 'build', - 'debugger', - 'frontend', - 'gc', - 'irregexp', - 'jit', - 'wasm'] +DIRS += ["build", "debugger", "frontend", "gc", "irregexp", "jit", "wasm"] -if CONFIG['JS_JITSPEW']: - DIRS += ['zydis'] +if CONFIG["JS_JITSPEW"]: + DIRS += ["zydis"] -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" selfhosted_inputs = [ - '../public/friend/ErrorNumbers.msg', - 'builtin/TypedArrayConstants.h', - 'builtin/SelfHostingDefines.h', - 'builtin/Utilities.js', - 'builtin/Array.js', - 'builtin/AsyncFunction.js', - 'builtin/AsyncIteration.js', - 'builtin/BigInt.js', - 'builtin/Classes.js', - 'builtin/Date.js', - 'builtin/Error.js', - 'builtin/Function.js', - 'builtin/Generator.js', - 'builtin/Iterator.js', - 'builtin/Map.js', - 'builtin/Module.js', - 'builtin/Number.js', - 'builtin/Object.js', - 'builtin/Promise.js', - 'builtin/Reflect.js', - 'builtin/RegExp.js', - 'builtin/RegExpGlobalReplaceOpt.h.js', - 'builtin/RegExpLocalReplaceOpt.h.js', - 'builtin/String.js', - 'builtin/Set.js', - 'builtin/Sorting.js', - 'builtin/TypedArray.js', - 'builtin/WeakMap.js', - 'builtin/WeakSet.js' -] + ([ - 'builtin/intl/NumberingSystemsGenerated.h', - 'builtin/intl/Collator.js', - 'builtin/intl/CommonFunctions.js', - 'builtin/intl/CurrencyDataGenerated.js', - 'builtin/intl/DateTimeFormat.js', - 'builtin/intl/DisplayNames.js', - 'builtin/intl/IntlObject.js', - 'builtin/intl/ListFormat.js', - 'builtin/intl/NumberFormat.js', - 'builtin/intl/PluralRules.js', - 'builtin/intl/RelativeTimeFormat.js', - 'builtin/intl/SanctionedSimpleUnitIdentifiersGenerated.js', -] if CONFIG['JS_HAS_INTL_API'] else []) + "../public/friend/ErrorNumbers.msg", + "builtin/TypedArrayConstants.h", + "builtin/SelfHostingDefines.h", + "builtin/Utilities.js", + "builtin/Array.js", + "builtin/AsyncFunction.js", + "builtin/AsyncIteration.js", + "builtin/BigInt.js", + "builtin/Classes.js", + "builtin/Date.js", + "builtin/Error.js", + "builtin/Function.js", + "builtin/Generator.js", + "builtin/Iterator.js", + "builtin/Map.js", + "builtin/Module.js", + "builtin/Number.js", + "builtin/Object.js", + "builtin/Promise.js", + "builtin/Reflect.js", + "builtin/RegExp.js", + "builtin/RegExpGlobalReplaceOpt.h.js", + "builtin/RegExpLocalReplaceOpt.h.js", + "builtin/String.js", + "builtin/Set.js", + "builtin/Sorting.js", + "builtin/TypedArray.js", + "builtin/WeakMap.js", + "builtin/WeakSet.js", +] + ( + [ + "builtin/intl/NumberingSystemsGenerated.h", + "builtin/intl/Collator.js", + "builtin/intl/CommonFunctions.js", + "builtin/intl/CurrencyDataGenerated.js", + "builtin/intl/DateTimeFormat.js", + "builtin/intl/DisplayNames.js", + "builtin/intl/IntlObject.js", + "builtin/intl/ListFormat.js", + "builtin/intl/NumberFormat.js", + "builtin/intl/PluralRules.js", + "builtin/intl/RelativeTimeFormat.js", + "builtin/intl/SanctionedSimpleUnitIdentifiersGenerated.js", + ] + if CONFIG["JS_HAS_INTL_API"] + else [] +) # Prepare self-hosted JS code for embedding -GeneratedFile('selfhosted.out.h', 'selfhosted.js', - script='builtin/embedjs.py', - entry_point='generate_selfhosted', - inputs=selfhosted_inputs) - -if CONFIG['JS_HAS_CTYPES']: - if CONFIG['MOZ_SYSTEM_FFI']: - CXXFLAGS += CONFIG['MOZ_FFI_CFLAGS'] +GeneratedFile( + "selfhosted.out.h", + "selfhosted.js", + script="builtin/embedjs.py", + entry_point="generate_selfhosted", + inputs=selfhosted_inputs, +) + +if CONFIG["JS_HAS_CTYPES"]: + if CONFIG["MOZ_SYSTEM_FFI"]: + CXXFLAGS += CONFIG["MOZ_FFI_CFLAGS"] # Suppress warnings in third-party code. -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - SOURCES['util/DoubleToString.cpp'].flags += ['-Wno-implicit-fallthrough'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + SOURCES["util/DoubleToString.cpp"].flags += ["-Wno-implicit-fallthrough"] diff --git a/js/src/rust/moz.build b/js/src/rust/moz.build index dddee89b2c3440..44fd84a7de787e 100644 --- a/js/src/rust/moz.build +++ b/js/src/rust/moz.build @@ -6,35 +6,35 @@ features = [] -if CONFIG['ENABLE_WASM_CRANELIFT']: - if CONFIG['JS_CODEGEN_X64'] or CONFIG['JS_CODEGEN_X86']: - features += ['cranelift_x86'] - elif CONFIG['JS_CODEGEN_ARM']: - features += ['cranelift_arm32'] - elif CONFIG['JS_CODEGEN_ARM64']: - features += ['cranelift_arm64'] +if CONFIG["ENABLE_WASM_CRANELIFT"]: + if CONFIG["JS_CODEGEN_X64"] or CONFIG["JS_CODEGEN_X86"]: + features += ["cranelift_x86"] + elif CONFIG["JS_CODEGEN_ARM"]: + features += ["cranelift_arm32"] + elif CONFIG["JS_CODEGEN_ARM64"]: + features += ["cranelift_arm64"] else: - features += ['cranelift_none'] + features += ["cranelift_none"] -if CONFIG['MOZ_RUST_SIMD']: - features += ['simd-accel'] +if CONFIG["MOZ_RUST_SIMD"]: + features += ["simd-accel"] -if CONFIG['JS_ENABLE_SMOOSH']: - features += ['smoosh'] +if CONFIG["JS_ENABLE_SMOOSH"]: + features += ["smoosh"] -RustLibrary('jsrust', features) +RustLibrary("jsrust", features) -if CONFIG['JS_SHARED_LIBRARY']: - FINAL_LIBRARY = 'js' +if CONFIG["JS_SHARED_LIBRARY"]: + FINAL_LIBRARY = "js" -if CONFIG['OS_ARCH'] == 'Darwin': +if CONFIG["OS_ARCH"] == "Darwin": # The Rust standard library references libresolv on macOS, so we need to # link it as a workaround. See also bug 1367932. - OS_LIBS += ['-lresolv'] -elif CONFIG['OS_ARCH'] == 'WINNT': + OS_LIBS += ["-lresolv"] +elif CONFIG["OS_ARCH"] == "WINNT": # Extra libraries used by Rust bindings libs in debug builds. OS_LIBS += [ - 'shell32', - 'userenv', - 'ws2_32', + "shell32", + "userenv", + "ws2_32", ] diff --git a/js/src/shell/js-gdb.py b/js/src/shell/js-gdb.py index 6ddd0b10a9ef82..cdc68522681a33 100644 --- a/js/src/shell/js-gdb.py +++ b/js/src/shell/js-gdb.py @@ -9,8 +9,8 @@ from os.path import abspath, dirname -devel_objdir = abspath(os.path.join(dirname(__file__), '..', '..')) -m = re.search(r'[\w ]+: (.*)', gdb.execute("show dir", False, True)) +devel_objdir = abspath(os.path.join(dirname(__file__), "..", "..")) +m = re.search(r"[\w ]+: (.*)", gdb.execute("show dir", False, True)) if m and devel_objdir not in m.group(1).split(":"): gdb.execute("set dir {}:{}".format(devel_objdir, m.group(1))) diff --git a/js/src/shell/moz.build b/js/src/shell/moz.build index 46d6710193ae01..01098928218633 100644 --- a/js/src/shell/moz.build +++ b/js/src/shell/moz.build @@ -4,45 +4,45 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -if CONFIG['JS_SHELL_NAME']: - GeckoProgram(CONFIG['JS_SHELL_NAME'], linkage=None) - if CONFIG['JS_BUNDLED_EDITLINE']: - DIRS += ['../editline'] - USE_LIBS += ['editline'] - USE_LIBS += ['static:js'] +if CONFIG["JS_SHELL_NAME"]: + GeckoProgram(CONFIG["JS_SHELL_NAME"], linkage=None) + if CONFIG["JS_BUNDLED_EDITLINE"]: + DIRS += ["../editline"] + USE_LIBS += ["editline"] + USE_LIBS += ["static:js"] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') -include('../js-standalone.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") +include("../js-standalone.mozbuild") UNIFIED_SOURCES += [ - 'js.cpp', - 'jsoptparse.cpp', - 'jsshell.cpp', - 'ModuleLoader.cpp', - 'OSObject.cpp', - 'WasmTesting.cpp' + "js.cpp", + "jsoptparse.cpp", + "jsshell.cpp", + "ModuleLoader.cpp", + "OSObject.cpp", + "WasmTesting.cpp", ] -if CONFIG['FUZZING_INTERFACES']: - UNIFIED_SOURCES += ['jsrtfuzzing/jsrtfuzzing.cpp'] +if CONFIG["FUZZING_INTERFACES"]: + UNIFIED_SOURCES += ["jsrtfuzzing/jsrtfuzzing.cpp"] USE_LIBS += [ - 'static:fuzzer', + "static:fuzzer", ] -DEFINES['EXPORT_JS_API'] = True +DEFINES["EXPORT_JS_API"] = True LOCAL_INCLUDES += [ - '!..', - '..', + "!..", + "..", ] -OS_LIBS += CONFIG['EDITLINE_LIBS'] +OS_LIBS += CONFIG["EDITLINE_LIBS"] # Place a GDB Python auto-load file next to the shell executable, both in # the build directory and in the dist/bin directory. -FINAL_TARGET_FILES += ['js-gdb.py'] -OBJDIR_FILES.js.src.shell += ['!/dist/bin/js-gdb.py'] +FINAL_TARGET_FILES += ["js-gdb.py"] +OBJDIR_FILES.js.src.shell += ["!/dist/bin/js-gdb.py"] # People expect the js shell to wind up in the top-level JS dir. -OBJDIR_FILES.js.src += ['!/dist/bin/js%s' % CONFIG['BIN_SUFFIX']] +OBJDIR_FILES.js.src += ["!/dist/bin/js%s" % CONFIG["BIN_SUFFIX"]] diff --git a/js/src/tests/compare_bench.py b/js/src/tests/compare_bench.py index cfe9508cd4b392..8b9558af145021 100644 --- a/js/src/tests/compare_bench.py +++ b/js/src/tests/compare_bench.py @@ -19,9 +19,9 @@ def compare(current, baseline): try: baseline_result = baseline[key] except KeyError: - print(key, 'missing from baseline') + print(key, "missing from baseline") continue - val_getter = itemgetter('average_ms', 'stddev_ms') + val_getter = itemgetter("average_ms", "stddev_ms") base_avg, base_stddev = val_getter(baseline_result) current_avg, current_stddev = val_getter(current_result) t_best = current_avg - current_stddev @@ -31,20 +31,22 @@ def compare(current, baseline): if t_worst < base_t_best: # Worst takes less time (better) than baseline's best. speedup = -((t_worst - base_t_best) / base_t_best) * 100 - result = 'faster: {:6.2f}ms < baseline {:6.2f}ms ({:+6.2f}%)'.format( - t_worst, base_t_best, speedup) + result = "faster: {:6.2f}ms < baseline {:6.2f}ms ({:+6.2f}%)".format( + t_worst, base_t_best, speedup + ) percent_speedups.append(speedup) elif t_best > base_t_worst: # Best takes more time (worse) than baseline's worst. slowdown = -((t_best - base_t_worst) / base_t_worst) * 100 - result = 'SLOWER: {:6.2f}ms > baseline {:6.2f}ms ({:+6.2f}%) '.format( - t_best, base_t_worst, slowdown) + result = "SLOWER: {:6.2f}ms > baseline {:6.2f}ms ({:+6.2f}%) ".format( + t_best, base_t_worst, slowdown + ) percent_speedups.append(slowdown) else: - result = 'Meh.' - print('{:30s}: {}'.format(key, result)) + result = "Meh." + print("{:30s}: {}".format(key, result)) if percent_speedups: - print('Average speedup: {:.2f}%'.format(avg(percent_speedups))) + print("Average speedup: {:.2f}%".format(avg(percent_speedups))) def compare_immediate(current_map, baseline_path): @@ -63,15 +65,15 @@ def main(candidate_path, baseline_path): baseline_file.close() -if __name__ == '__main__': +if __name__ == "__main__": parser = optparse.OptionParser(usage=__doc__.strip()) options, args = parser.parse_args() try: candidate_path = args.pop(0) except IndexError: - parser.error('A JSON filepath to compare against baseline is required') + parser.error("A JSON filepath to compare against baseline is required") try: baseline_path = args.pop(0) except IndexError: - parser.error('A JSON filepath for baseline is required') + parser.error("A JSON filepath for baseline is required") main(candidate_path, baseline_path) diff --git a/js/src/tests/jstests.py b/js/src/tests/jstests.py index 582a490bac0731..38312c688a01ce 100755 --- a/js/src/tests/jstests.py +++ b/js/src/tests/jstests.py @@ -27,12 +27,17 @@ from itertools import chain from subprocess import list2cmdline, call -from lib.tests import RefTestCase, get_jitflags, get_cpu_count, \ - get_environment_overlay, change_env +from lib.tests import ( + RefTestCase, + get_jitflags, + get_cpu_count, + get_environment_overlay, + change_env, +) from lib.results import ResultsSink, TestOutput from lib.progressbar import ProgressBar -if sys.platform.startswith('linux') or sys.platform.startswith('darwin'): +if sys.platform.startswith("linux") or sys.platform.startswith("darwin"): from lib.tasks_unix import run_all_tests else: from lib.tasks_win import run_all_tests @@ -54,7 +59,8 @@ class PathOptions(object): def __init__(self, location, requested_paths, excluded_paths): self.requested_paths = requested_paths self.excluded_files, self.excluded_dirs = PathOptions._split_files_and_dirs( - location, excluded_paths) + location, excluded_paths + ) @staticmethod def _split_files_and_dirs(location, paths): @@ -62,7 +68,7 @@ def _split_files_and_dirs(location, paths): files, dirs = set(), set() for path in paths: fullpath = os.path.join(location, path) - if path.endswith('/'): + if path.endswith("/"): dirs.add(path[:-1]) elif os.path.isdir(fullpath): dirs.add(path) @@ -73,7 +79,9 @@ def _split_files_and_dirs(location, paths): def should_run(self, filename): # If any tests are requested by name, skip tests that do not match. - if self.requested_paths and not any(req in filename for req in self.requested_paths): + if self.requested_paths and not any( + req in filename for req in self.requested_paths + ): return False # Skip excluded tests. @@ -81,7 +89,7 @@ def should_run(self, filename): return False for dir in self.excluded_dirs: - if filename.startswith(dir + '/'): + if filename.startswith(dir + "/"): return False return True @@ -97,156 +105,300 @@ def parse_args(): excluded_paths :set: Test paths specifically excluded by the CLI. """ from argparse import ArgumentParser + op = ArgumentParser( - description='Run jstests JS shell tests', - epilog='Shell output format: [ pass | fail | timeout | skip ] progress | time') - op.add_argument('--xul-info', dest='xul_info_src', - help='config data for xulRuntime' - ' (avoids search for config/autoconf.mk)') + description="Run jstests JS shell tests", + epilog="Shell output format: [ pass | fail | timeout | skip ] progress | time", + ) + op.add_argument( + "--xul-info", + dest="xul_info_src", + help="config data for xulRuntime" " (avoids search for config/autoconf.mk)", + ) harness_og = op.add_argument_group("Harness Controls", "Control how tests are run.") - harness_og.add_argument('-j', '--worker-count', type=int, - default=max(1, get_cpu_count()), - help='Number of tests to run in parallel' - ' (default %(default)s)') - harness_og.add_argument('-t', '--timeout', type=float, default=150.0, - help='Set maximum time a test is allows to run' - ' (in seconds).') - harness_og.add_argument('--show-slow', action='store_true', - help='Show tests taking longer than a minimum time' - ' (in seconds).') - harness_og.add_argument('--slow-test-threshold', type=float, default=5.0, - help='Time in seconds a test can take until it is' - 'considered slow (default %(default)s).') - harness_og.add_argument('-a', '--args', dest='shell_args', default='', - help='Extra args to pass to the JS shell.') - harness_og.add_argument('--feature-args', dest='feature_args', default='', - help='Extra args to pass to the JS shell even when feature-testing.') - harness_og.add_argument('--jitflags', dest='jitflags', default='none', - type=str, - help='IonMonkey option combinations. One of all,' - ' debug, ion, and none (default %(default)s).') - harness_og.add_argument('--tbpl', action='store_true', - help='Runs each test in all configurations tbpl' - ' tests.') - harness_og.add_argument('--tbpl-debug', action='store_true', - help='Runs each test in some faster configurations' - ' tbpl tests.') - harness_og.add_argument('-g', '--debug', action='store_true', - help='Run a test in debugger.') - harness_og.add_argument('--debugger', default='gdb -q --args', - help='Debugger command.') - harness_og.add_argument('-J', '--jorendb', action='store_true', - help='Run under JS debugger.') - harness_og.add_argument('--passthrough', action='store_true', - help='Run tests with stdin/stdout attached to' - ' caller.') - harness_og.add_argument('--test-reflect-stringify', dest="test_reflect_stringify", - help="instead of running tests, use them to test the " - "Reflect.stringify code in specified file") - harness_og.add_argument('--valgrind', action='store_true', - help='Run tests in valgrind.') - harness_og.add_argument('--valgrind-args', default='', - help='Extra args to pass to valgrind.') - harness_og.add_argument('--rr', action='store_true', - help='Run tests under RR record-and-replay debugger.') - harness_og.add_argument('-C', '--check-output', action='store_true', - help='Run tests to check output for different jit-flags') - harness_og.add_argument('--remote', action='store_true', - help='Run tests on a remote device') - harness_og.add_argument('--deviceIP', action='store', - type=str, dest='device_ip', - help='IP address of remote device to test') - harness_og.add_argument('--devicePort', action='store', - type=int, dest='device_port', default=20701, - help='port of remote device to test') - harness_og.add_argument('--deviceSerial', action='store', - type=str, dest='device_serial', default=None, - help='ADB device serial number of remote device to test') - harness_og.add_argument('--remoteTestRoot', dest='remote_test_root', action='store', - type=str, default='/data/local/tmp/test_root', - help='The remote directory to use as test root' - ' (e.g. %(default)s)') - harness_og.add_argument('--localLib', dest='local_lib', action='store', - type=str, - help='The location of libraries to push -- preferably' - ' stripped') + harness_og.add_argument( + "-j", + "--worker-count", + type=int, + default=max(1, get_cpu_count()), + help="Number of tests to run in parallel" " (default %(default)s)", + ) + harness_og.add_argument( + "-t", + "--timeout", + type=float, + default=150.0, + help="Set maximum time a test is allows to run" " (in seconds).", + ) + harness_og.add_argument( + "--show-slow", + action="store_true", + help="Show tests taking longer than a minimum time" " (in seconds).", + ) + harness_og.add_argument( + "--slow-test-threshold", + type=float, + default=5.0, + help="Time in seconds a test can take until it is" + "considered slow (default %(default)s).", + ) + harness_og.add_argument( + "-a", + "--args", + dest="shell_args", + default="", + help="Extra args to pass to the JS shell.", + ) + harness_og.add_argument( + "--feature-args", + dest="feature_args", + default="", + help="Extra args to pass to the JS shell even when feature-testing.", + ) + harness_og.add_argument( + "--jitflags", + dest="jitflags", + default="none", + type=str, + help="IonMonkey option combinations. One of all," + " debug, ion, and none (default %(default)s).", + ) + harness_og.add_argument( + "--tbpl", + action="store_true", + help="Runs each test in all configurations tbpl" " tests.", + ) + harness_og.add_argument( + "--tbpl-debug", + action="store_true", + help="Runs each test in some faster configurations" " tbpl tests.", + ) + harness_og.add_argument( + "-g", "--debug", action="store_true", help="Run a test in debugger." + ) + harness_og.add_argument( + "--debugger", default="gdb -q --args", help="Debugger command." + ) + harness_og.add_argument( + "-J", "--jorendb", action="store_true", help="Run under JS debugger." + ) + harness_og.add_argument( + "--passthrough", + action="store_true", + help="Run tests with stdin/stdout attached to" " caller.", + ) + harness_og.add_argument( + "--test-reflect-stringify", + dest="test_reflect_stringify", + help="instead of running tests, use them to test the " + "Reflect.stringify code in specified file", + ) + harness_og.add_argument( + "--valgrind", action="store_true", help="Run tests in valgrind." + ) + harness_og.add_argument( + "--valgrind-args", default="", help="Extra args to pass to valgrind." + ) + harness_og.add_argument( + "--rr", + action="store_true", + help="Run tests under RR record-and-replay debugger.", + ) + harness_og.add_argument( + "-C", + "--check-output", + action="store_true", + help="Run tests to check output for different jit-flags", + ) + harness_og.add_argument( + "--remote", action="store_true", help="Run tests on a remote device" + ) + harness_og.add_argument( + "--deviceIP", + action="store", + type=str, + dest="device_ip", + help="IP address of remote device to test", + ) + harness_og.add_argument( + "--devicePort", + action="store", + type=int, + dest="device_port", + default=20701, + help="port of remote device to test", + ) + harness_og.add_argument( + "--deviceSerial", + action="store", + type=str, + dest="device_serial", + default=None, + help="ADB device serial number of remote device to test", + ) + harness_og.add_argument( + "--remoteTestRoot", + dest="remote_test_root", + action="store", + type=str, + default="/data/local/tmp/test_root", + help="The remote directory to use as test root" " (e.g. %(default)s)", + ) + harness_og.add_argument( + "--localLib", + dest="local_lib", + action="store", + type=str, + help="The location of libraries to push -- preferably" " stripped", + ) input_og = op.add_argument_group("Inputs", "Change what tests are run.") - input_og.add_argument('-f', '--file', dest='test_file', action='append', - help='Get tests from the given file.') - input_og.add_argument('-x', '--exclude-file', action='append', - help='Exclude tests from the given file.') - input_og.add_argument('--wpt', dest='wpt', - choices=['enabled', 'disabled', 'if-running-everything'], - default='if-running-everything', - help="Enable or disable shell web-platform-tests " - "(default: enable if no test paths are specified).") - input_og.add_argument('--include', action='append', dest='requested_paths', default=[], - help='Include the given test file or directory.') - input_og.add_argument('--exclude', action='append', dest='excluded_paths', default=[], - help='Exclude the given test file or directory.') - input_og.add_argument('-d', '--exclude-random', dest='random', - action='store_false', - help='Exclude tests marked as "random."') - input_og.add_argument('--run-skipped', action='store_true', - help='Run tests marked as "skip."') - input_og.add_argument('--run-only-skipped', action='store_true', - help='Run only tests marked as "skip."') - input_og.add_argument('--run-slow-tests', action='store_true', - help='Do not skip tests marked as "slow."') - input_og.add_argument('--no-extensions', action='store_true', - help='Run only tests conforming to the ECMAScript 5' - ' standard.') - input_og.add_argument('--repeat', type=int, default=1, - help='Repeat tests the given number of times.') + input_og.add_argument( + "-f", + "--file", + dest="test_file", + action="append", + help="Get tests from the given file.", + ) + input_og.add_argument( + "-x", + "--exclude-file", + action="append", + help="Exclude tests from the given file.", + ) + input_og.add_argument( + "--wpt", + dest="wpt", + choices=["enabled", "disabled", "if-running-everything"], + default="if-running-everything", + help="Enable or disable shell web-platform-tests " + "(default: enable if no test paths are specified).", + ) + input_og.add_argument( + "--include", + action="append", + dest="requested_paths", + default=[], + help="Include the given test file or directory.", + ) + input_og.add_argument( + "--exclude", + action="append", + dest="excluded_paths", + default=[], + help="Exclude the given test file or directory.", + ) + input_og.add_argument( + "-d", + "--exclude-random", + dest="random", + action="store_false", + help='Exclude tests marked as "random."', + ) + input_og.add_argument( + "--run-skipped", action="store_true", help='Run tests marked as "skip."' + ) + input_og.add_argument( + "--run-only-skipped", + action="store_true", + help='Run only tests marked as "skip."', + ) + input_og.add_argument( + "--run-slow-tests", + action="store_true", + help='Do not skip tests marked as "slow."', + ) + input_og.add_argument( + "--no-extensions", + action="store_true", + help="Run only tests conforming to the ECMAScript 5" " standard.", + ) + input_og.add_argument( + "--repeat", type=int, default=1, help="Repeat tests the given number of times." + ) output_og = op.add_argument_group("Output", "Modify the harness and tests output.") - output_og.add_argument('-s', '--show-cmd', action='store_true', - help='Show exact commandline used to run each test.') - output_og.add_argument('-o', '--show-output', action='store_true', - help="Print each test's output to the file given by" - " --output-file.") - output_og.add_argument('-F', '--failed-only', action='store_true', - help="If a --show-* option is given, only print" - " output for failed tests.") - output_og.add_argument('--no-show-failed', action='store_true', - help="Don't print output for failed tests" - " (no-op with --show-output).") - output_og.add_argument('-O', '--output-file', - help='Write all output to the given file' - ' (default: stdout).') - output_og.add_argument('--failure-file', - help='Write all not-passed tests to the given file.') - output_og.add_argument('--no-progress', dest='hide_progress', - action='store_true', - help='Do not show the progress bar.') - output_og.add_argument('--tinderbox', dest='format', action='store_const', - const='automation', - help='Use automation-parseable output format.') - output_og.add_argument('--format', dest='format', default='none', - choices=['automation', 'none'], - help='Output format. Either automation or none' - ' (default %(default)s).') - output_og.add_argument('--log-wptreport', dest='wptreport', action='store', - help='Path to write a Web Platform Tests report (wptreport)') - output_og.add_argument('--this-chunk', type=int, default=1, - help='The test chunk to run.') - output_og.add_argument('--total-chunks', type=int, default=1, - help='The total number of test chunks.') - - special_og = op.add_argument_group("Special", "Special modes that do not run tests.") - special_og.add_argument('--make-manifests', metavar='BASE_TEST_PATH', - help='Generate reftest manifest files.') - - op.add_argument('--js-shell', metavar='JS_SHELL', - help='JS shell to run tests with') + output_og.add_argument( + "-s", + "--show-cmd", + action="store_true", + help="Show exact commandline used to run each test.", + ) + output_og.add_argument( + "-o", + "--show-output", + action="store_true", + help="Print each test's output to the file given by" " --output-file.", + ) + output_og.add_argument( + "-F", + "--failed-only", + action="store_true", + help="If a --show-* option is given, only print" " output for failed tests.", + ) + output_og.add_argument( + "--no-show-failed", + action="store_true", + help="Don't print output for failed tests" " (no-op with --show-output).", + ) + output_og.add_argument( + "-O", + "--output-file", + help="Write all output to the given file" " (default: stdout).", + ) + output_og.add_argument( + "--failure-file", help="Write all not-passed tests to the given file." + ) + output_og.add_argument( + "--no-progress", + dest="hide_progress", + action="store_true", + help="Do not show the progress bar.", + ) + output_og.add_argument( + "--tinderbox", + dest="format", + action="store_const", + const="automation", + help="Use automation-parseable output format.", + ) + output_og.add_argument( + "--format", + dest="format", + default="none", + choices=["automation", "none"], + help="Output format. Either automation or none" " (default %(default)s).", + ) + output_og.add_argument( + "--log-wptreport", + dest="wptreport", + action="store", + help="Path to write a Web Platform Tests report (wptreport)", + ) + output_og.add_argument( + "--this-chunk", type=int, default=1, help="The test chunk to run." + ) + output_og.add_argument( + "--total-chunks", type=int, default=1, help="The total number of test chunks." + ) + + special_og = op.add_argument_group( + "Special", "Special modes that do not run tests." + ) + special_og.add_argument( + "--make-manifests", + metavar="BASE_TEST_PATH", + help="Generate reftest manifest files.", + ) + + op.add_argument("--js-shell", metavar="JS_SHELL", help="JS shell to run tests with") options, args = op.parse_known_args() # Need a shell unless in a special mode. if not options.make_manifests: if not args: - op.error('missing JS_SHELL argument') + op.error("missing JS_SHELL argument") options.js_shell = os.path.abspath(args.pop(0)) requested_paths = set(args) @@ -257,39 +409,47 @@ def parse_args(): # Fill the debugger field, as needed. if options.debug: - if options.debugger == 'lldb': - debugger_prefix = ['lldb', '--'] + if options.debugger == "lldb": + debugger_prefix = ["lldb", "--"] else: debugger_prefix = options.debugger.split() else: debugger_prefix = [] if options.valgrind: - debugger_prefix = ['valgrind'] + options.valgrind_args.split() - if os.uname()[0] == 'Darwin': - debugger_prefix.append('--dsymutil=yes') + debugger_prefix = ["valgrind"] + options.valgrind_args.split() + if os.uname()[0] == "Darwin": + debugger_prefix.append("--dsymutil=yes") options.show_output = True if options.rr: - debugger_prefix = ['rr', 'record'] + debugger_prefix = ["rr", "record"] js_cmd_args = shlex.split(options.shell_args) + shlex.split(options.feature_args) if options.jorendb: options.passthrough = True options.hide_progress = True options.worker_count = 1 - debugger_path = realpath(os.path.join( - abspath(dirname(abspath(__file__))), - '..', '..', 'examples', 'jorendb.js')) - js_cmd_args.extend(['-d', '-f', debugger_path, '--']) - prefix = RefTestCase.build_js_cmd_prefix(options.js_shell, js_cmd_args, - debugger_prefix) + debugger_path = realpath( + os.path.join( + abspath(dirname(abspath(__file__))), + "..", + "..", + "examples", + "jorendb.js", + ) + ) + js_cmd_args.extend(["-d", "-f", debugger_path, "--"]) + prefix = RefTestCase.build_js_cmd_prefix( + options.js_shell, js_cmd_args, debugger_prefix + ) # If files with lists of tests to run were specified, add them to the # requested tests set. if options.test_file: for test_file in options.test_file: requested_paths |= set( - [line.strip() for line in open(test_file).readlines()]) + [line.strip() for line in open(test_file).readlines()] + ) excluded_paths = set(options.excluded_paths) @@ -297,9 +457,9 @@ def parse_args(): # excluded tests set. if options.exclude_file: for filename in options.exclude_file: - with open(filename, 'r') as fp: + with open(filename, "r") as fp: for line in fp: - if line.startswith('#'): + if line.startswith("#"): continue line = line.strip() if not line: @@ -312,14 +472,16 @@ def parse_args(): if not options.show_cmd: options.show_output = True try: - options.output_fp = open(options.output_file, 'w') + options.output_fp = open(options.output_file, "w") except IOError as ex: raise SystemExit("Failed to open output file: " + str(ex)) # Hide the progress bar if it will get in the way of other output. - options.hide_progress = (options.format == 'automation' or - not ProgressBar.conservative_isatty() or - options.hide_progress) + options.hide_progress = ( + options.format == "automation" + or not ProgressBar.conservative_isatty() + or options.hide_progress + ) return (options, prefix, requested_paths, excluded_paths) @@ -378,16 +540,21 @@ def load_wpt_tests(xul_tester, requested_paths, excluded_paths, update_manifest= logger = wptlogging.setup({}, {}) - test_manifests = manifestupdate.run(repo_root, manifest_root, logger, - update=update_manifest) + test_manifests = manifestupdate.run( + repo_root, manifest_root, logger, update=update_manifest + ) kwargs = vars(wptcommandline.create_parser().parse_args([])) - kwargs.update({ - "config": os.path.join(manifest_root, "_tests", "web-platform", "wptrunner.local.ini"), - "gecko_e10s": False, - "verify": False, - "wasm": xul_tester.test("wasmIsSupported()"), - }) + kwargs.update( + { + "config": os.path.join( + manifest_root, "_tests", "web-platform", "wptrunner.local.ini" + ), + "gecko_e10s": False, + "verify": False, + "wasm": xul_tester.test("wasmIsSupported()"), + } + ) wptcommandline.set_from_config(kwargs) def filter_jsshell_tests(it): @@ -397,23 +564,29 @@ def filter_jsshell_tests(it): yield item_type, path, tests run_info_extras = products.load_product(kwargs["config"], "firefox")[-1](**kwargs) - run_info = wpttest.get_run_info(kwargs["run_info"], - "firefox", - debug=xul_tester.test("isDebugBuild"), - extras=run_info_extras) + run_info = wpttest.get_run_info( + kwargs["run_info"], + "firefox", + debug=xul_tester.test("isDebugBuild"), + extras=run_info_extras, + ) release_or_beta = xul_tester.test("getBuildConfiguration().release_or_beta") run_info["release_or_beta"] = release_or_beta run_info["nightly_build"] = not release_or_beta - early_beta_or_earlier = xul_tester.test("getBuildConfiguration().early_beta_or_earlier") + early_beta_or_earlier = xul_tester.test( + "getBuildConfiguration().early_beta_or_earlier" + ) run_info["early_beta_or_earlier"] = early_beta_or_earlier - path_filter = testloader.TestFilter(test_manifests, - include=requested_paths, - exclude=excluded_paths) - loader = testloader.TestLoader(test_manifests, - ["testharness"], - run_info, - manifest_filters=[path_filter, filter_jsshell_tests]) + path_filter = testloader.TestFilter( + test_manifests, include=requested_paths, exclude=excluded_paths + ) + loader = testloader.TestLoader( + test_manifests, + ["testharness"], + run_info, + manifest_filters=[path_filter, filter_jsshell_tests], + ) extra_helper_paths = [ os.path.join(here, "web-platform-test-shims.js"), @@ -444,7 +617,7 @@ def resolve(test_path, script): wpt, test_path, extra_helper_paths=extra_helper_paths_for_test[:], - wpt=test + wpt=test, ) if option: test_case.options.append(option) @@ -466,8 +639,8 @@ def load_tests(options, requested_paths, excluded_paths): if options.xul_info_src is None: xul_info = manifest.XULInfo.create(options.js_shell) else: - xul_abi, xul_os, xul_debug = options.xul_info_src.split(r':') - xul_debug = xul_debug.lower() == 'true' + xul_abi, xul_os, xul_debug = options.xul_info_src.split(r":") + xul_debug = xul_debug.lower() == "true" xul_info = manifest.XULInfo(xul_abi, xul_os, xul_debug) feature_args = shlex.split(options.feature_args) xul_tester = manifest.XULInfoTester(xul_info, options, feature_args) @@ -478,18 +651,18 @@ def load_tests(options, requested_paths, excluded_paths): test_gen = manifest.load_reftests(test_dir, path_options, xul_tester) # WPT tests are already run in the browser in their own harness. - wpt_enabled = (options.wpt == 'enabled' or - (options.wpt == 'if-running-everything' and - len(requested_paths) == 0 and - not options.make_manifests)) + wpt_enabled = options.wpt == "enabled" or ( + options.wpt == "if-running-everything" + and len(requested_paths) == 0 + and not options.make_manifests + ) if wpt_enabled: - wpt_tests = load_wpt_tests(xul_tester, - requested_paths, - excluded_paths) + wpt_tests = load_wpt_tests(xul_tester, requested_paths, excluded_paths) test_count += len(wpt_tests) test_gen = chain(test_gen, wpt_tests) if options.test_reflect_stringify is not None: + def trs_gen(tests): for test in tests: test.test_reflect_stringify = options.test_reflect_stringify @@ -499,6 +672,7 @@ def trs_gen(tests): test.random = False test.slow = False yield test + test_gen = trs_gen(test_gen) if options.make_manifests: @@ -508,13 +682,14 @@ def trs_gen(tests): # Create a new test list. Apply each TBPL configuration to every test. flags_list = None if options.tbpl: - flags_list = get_jitflags('all') + flags_list = get_jitflags("all") elif options.tbpl_debug: - flags_list = get_jitflags('debug') + flags_list = get_jitflags("debug") else: flags_list = get_jitflags(options.jitflags, none=None) if flags_list: + def flag_gen(tests): for test in tests: for jitflags in flags_list: @@ -522,18 +697,18 @@ def flag_gen(tests): tmp_test.jitflags = copy(test.jitflags) tmp_test.jitflags.extend(jitflags) yield tmp_test + test_count = test_count * len(flags_list) test_gen = flag_gen(test_gen) if options.test_file: paths = set() for test_file in options.test_file: - paths |= set( - [line.strip() for line in open(test_file).readlines()]) + paths |= set([line.strip() for line in open(test_file).readlines()]) test_gen = (_ for _ in test_gen if _.path in paths) if options.no_extensions: - pattern = os.sep + 'extensions' + os.sep + pattern = os.sep + "extensions" + os.sep test_gen = (_ for _ in test_gen if pattern not in _.path) if not options.random: @@ -555,30 +730,35 @@ def flag_gen(tests): def main(): options, prefix, requested_paths, excluded_paths = parse_args() - if options.js_shell is not None and not (isfile(options.js_shell) and - os.access(options.js_shell, os.X_OK)): - if (platform.system() != 'Windows' or - isfile(options.js_shell) or not - isfile(options.js_shell + ".exe") or not - os.access(options.js_shell + ".exe", os.X_OK)): - print('Could not find executable shell: ' + options.js_shell) + if options.js_shell is not None and not ( + isfile(options.js_shell) and os.access(options.js_shell, os.X_OK) + ): + if ( + platform.system() != "Windows" + or isfile(options.js_shell) + or not isfile(options.js_shell + ".exe") + or not os.access(options.js_shell + ".exe", os.X_OK) + ): + print("Could not find executable shell: " + options.js_shell) return 1 test_count, test_gen = load_tests(options, requested_paths, excluded_paths) test_environment = get_environment_overlay(options.js_shell) if test_count == 0: - print('no tests selected') + print("no tests selected") return 1 test_dir = dirname(abspath(__file__)) if options.debug: if test_count > 1: - print('Multiple tests match command line arguments,' - ' debugger can only run one') + print( + "Multiple tests match command line arguments," + " debugger can only run one" + ) for tc in test_gen: - print(' {}'.format(tc.path)) + print(" {}".format(tc.path)) return 2 cmd = next(test_gen).get_command(prefix) @@ -601,11 +781,12 @@ def main(): test_gen = list(test_gen)[start:end] if options.remote: - results = ResultsSink('jstests', options, test_count) + results = ResultsSink("jstests", options, test_count) try: from lib.remote import init_remote_dir, init_device + device = init_device(options) - jtd_tests = posixpath.join(options.remote_test_root, 'tests', 'tests') + jtd_tests = posixpath.join(options.remote_test_root, "tests", "tests") init_remote_dir(device, jtd_tests) device.push(test_dir, jtd_tests, timeout=600) device.chmod(jtd_tests, recursive=True) @@ -620,7 +801,7 @@ def main(): return 0 if results.all_passed() else 1 with changedir(test_dir), change_env(test_environment): - results = ResultsSink('jstests', options, test_count) + results = ResultsSink("jstests", options, test_count) try: for out in run_all_tests(test_gen, prefix, results.pb, options): results.push(out) @@ -638,13 +819,10 @@ def run_test_remote(test, device, prefix, options): cmd = test.get_command(prefix) test_root_parent = os.path.dirname(test.root) - jtd_tests = posixpath.join(options.remote_test_root, 'tests') + jtd_tests = posixpath.join(options.remote_test_root, "tests") cmd = [_.replace(test_root_parent, jtd_tests) for _ in cmd] - env = { - 'TZ': 'PST8PDT', - 'LD_LIBRARY_PATH': os.path.dirname(prefix[0]) - } + env = {"TZ": "PST8PDT", "LD_LIBRARY_PATH": os.path.dirname(prefix[0])} adb_cmd = ADBDevice._escape_command_line(cmd) start = datetime.now() @@ -652,16 +830,16 @@ def run_test_remote(test, device, prefix, options): # Allow ADBError or ADBTimeoutError to terminate the test run, # but handle ADBProcessError in order to support the use of # non-zero exit codes in the JavaScript shell tests. - out = device.shell_output(adb_cmd, env=env, - cwd=options.remote_test_root, - timeout=int(options.timeout)) + out = device.shell_output( + adb_cmd, env=env, cwd=options.remote_test_root, timeout=int(options.timeout) + ) returncode = 0 except ADBProcessError as e: # Treat ignorable intermittent adb communication errors as # skipped tests. out = str(e.adb_process.stdout) returncode = e.adb_process.exitcode - re_ignore = re.compile(r'error: (closed|device .* not found)') + re_ignore = re.compile(r"error: (closed|device .* not found)") if returncode == 1 and re_ignore.search(out): print("Skipping {} due to ignorable adb error {}".format(test.path, out)) test.skip_if_cond = "true" @@ -674,5 +852,5 @@ def run_test_remote(test, device, prefix, options): return TestOutput(test, cmd, out, out, returncode, elapsed, False) -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/js/src/tests/lib/jittests.py b/js/src/tests/lib/jittests.py index 2d498471ea5e23..3fadc32aab5bfb 100755 --- a/js/src/tests/lib/jittests.py +++ b/js/src/tests/lib/jittests.py @@ -15,7 +15,7 @@ from collections import namedtuple from datetime import datetime -if sys.platform.startswith('linux') or sys.platform.startswith('darwin'): +if sys.platform.startswith("linux") or sys.platform.startswith("darwin"): from .tasks_unix import run_all_tests else: from .tasks_win import run_all_tests @@ -28,10 +28,10 @@ TESTS_LIB_DIR = os.path.dirname(os.path.abspath(__file__)) JS_DIR = os.path.dirname(os.path.dirname(TESTS_LIB_DIR)) TOP_SRC_DIR = os.path.dirname(os.path.dirname(JS_DIR)) -TEST_DIR = os.path.join(JS_DIR, 'jit-test', 'tests') -LIB_DIR = os.path.join(JS_DIR, 'jit-test', 'lib') + os.path.sep -MODULE_DIR = os.path.join(JS_DIR, 'jit-test', 'modules') + os.path.sep -JS_TESTS_DIR = posixpath.join(JS_DIR, 'tests') +TEST_DIR = os.path.join(JS_DIR, "jit-test", "tests") +LIB_DIR = os.path.join(JS_DIR, "jit-test", "lib") + os.path.sep +MODULE_DIR = os.path.join(JS_DIR, "jit-test", "modules") + os.path.sep +JS_TESTS_DIR = posixpath.join(JS_DIR, "tests") # Backported from Python 3.1 posixpath.py @@ -51,7 +51,7 @@ def _relpath(path, start=None): # Work out how much of the filepath is shared by start and path. i = len(os.path.commonprefix([start_list, path_list])) - rel_list = [os.pardir] * (len(start_list)-i) + path_list[i:] + rel_list = [os.pardir] * (len(start_list) - i) + path_list[i:] if not rel_list: return os.curdir return os.path.join(*rel_list) @@ -59,13 +59,13 @@ def _relpath(path, start=None): # Mapping of Python chars to their javascript string representation. QUOTE_MAP = { - '\\': '\\\\', - '\b': '\\b', - '\f': '\\f', - '\n': '\\n', - '\r': '\\r', - '\t': '\\t', - '\v': '\\v' + "\\": "\\\\", + "\b": "\\b", + "\f": "\\f", + "\n": "\\n", + "\r": "\\r", + "\t": "\\t", + "\v": "\\v", } # Quote the string S, javascript style. @@ -75,7 +75,7 @@ def js_quote(quote, s): result = quote for c in s: if c == quote: - result += '\\' + quote + result += "\\" + quote elif c in QUOTE_MAP: result += QUOTE_MAP[c] else: @@ -97,16 +97,20 @@ def extend_condition(condition, value): class JitTest: VALGRIND_CMD = [] - paths = (d for d in os.environ['PATH'].split(os.pathsep)) - valgrinds = (os.path.join(d, 'valgrind') for d in paths) + paths = (d for d in os.environ["PATH"].split(os.pathsep)) + valgrinds = (os.path.join(d, "valgrind") for d in paths) if any(os.path.exists(p) for p in valgrinds): VALGRIND_CMD = [ - 'valgrind', '-q', '--smc-check=all-non-file', - '--error-exitcode=1', '--gen-suppressions=all', - '--show-possibly-lost=no', '--leak-check=full', + "valgrind", + "-q", + "--smc-check=all-non-file", + "--error-exitcode=1", + "--gen-suppressions=all", + "--show-possibly-lost=no", + "--leak-check=full", ] - if os.uname()[0] == 'Darwin': - VALGRIND_CMD.append('--dsymutil=yes') + if os.uname()[0] == "Darwin": + VALGRIND_CMD.append("--dsymutil=yes") del paths del valgrinds @@ -143,7 +147,7 @@ def __init__(self, path): # List of other configurations to test with all existing variants. self.test_join = [] # Errors to expect and consider passing - self.expect_error = '' + self.expect_error = "" # Exit status to expect from shell self.expect_status = 0 # Exit status or error output. @@ -154,7 +158,7 @@ def __init__(self, path): # Skip-if condition. We don't have a xulrunner, but we can ask the shell # directly. - self.skip_if_cond = '' + self.skip_if_cond = "" self.skip_variant_if_cond = {} # Expected by the test runner. Always true for jit-tests. @@ -188,7 +192,9 @@ def copy_and_extend_jitflags(self, variant): t.jitflags.extend(variant) for flags in variant: if flags in self.skip_variant_if_cond: - t.skip_if_cond = extend_condition(t.skip_if_cond, self.skip_variant_if_cond[flags]) + t.skip_if_cond = extend_condition( + t.skip_if_cond, self.skip_variant_if_cond[flags] + ) return t def copy_variants(self, variants): @@ -204,7 +210,7 @@ def copy_variants(self, variants): # For each list of jit flags, make a copy of the test. return [self.copy_and_extend_jitflags(v) for v in variants] - COOKIE = b'|jit-test|' + COOKIE = b"|jit-test|" # We would use 500019 (5k19), but quit() only accepts values up to 127, due to fuzzers SKIPPED_EXIT_STATUS = 59 @@ -212,11 +218,11 @@ def copy_variants(self, variants): @classmethod def find_directives(cls, file_name): - meta = '' + meta = "" line = open(file_name, "rb").readline() i = line.find(cls.COOKIE) if i != -1: - meta = ';' + line[i + len(cls.COOKIE):].decode(errors='strict').strip('\n') + meta = ";" + line[i + len(cls.COOKIE) :].decode(errors="strict").strip("\n") return meta @classmethod @@ -229,7 +235,7 @@ def from_file(cls, path, options): # the directive file is the same as in the test file. Only # the first line is considered, just as for the test file. - dir_meta = '' + dir_meta = "" dir_name = os.path.dirname(path) if dir_name in cls.Directives: dir_meta = cls.Directives[dir_name] @@ -242,86 +248,103 @@ def from_file(cls, path, options): filename, file_extension = os.path.splitext(path) meta = cls.find_directives(path) - if meta != '' or dir_meta != '': + if meta != "" or dir_meta != "": meta = meta + dir_meta - parts = meta.split(';') + parts = meta.split(";") for part in parts: part = part.strip() if not part: continue - name, _, value = part.partition(':') + name, _, value = part.partition(":") if value: value = value.strip() - if name == 'error': + if name == "error": test.expect_error = value - elif name == 'exitstatus': + elif name == "exitstatus": try: status = int(value, 0) if status == test.SKIPPED_EXIT_STATUS: - print("warning: jit-tests uses {} as a sentinel" - " return value {}", test.SKIPPED_EXIT_STATUS, path) + print( + "warning: jit-tests uses {} as a sentinel" + " return value {}", + test.SKIPPED_EXIT_STATUS, + path, + ) else: test.expect_status = status except ValueError: - print("warning: couldn't parse exit status" - " {}".format(value)) - elif name == 'thread-count': + print( + "warning: couldn't parse exit status" + " {}".format(value) + ) + elif name == "thread-count": try: - test.jitflags.append('--thread-count={}'.format( - int(value, 0))) + test.jitflags.append( + "--thread-count={}".format(int(value, 0)) + ) except ValueError: - print("warning: couldn't parse thread-count" - " {}".format(value)) - elif name == 'include': + print( + "warning: couldn't parse thread-count" + " {}".format(value) + ) + elif name == "include": test.other_lib_includes.append(value) - elif name == 'local-include': + elif name == "local-include": test.other_script_includes.append(value) - elif name == 'skip-if': + elif name == "skip-if": test.skip_if_cond = extend_condition(test.skip_if_cond, value) - elif name == 'skip-variant-if': + elif name == "skip-variant-if": try: - [variant, condition] = value.split(',') + [variant, condition] = value.split(",") test.skip_variant_if_cond[variant] = extend_condition( - test.skip_if_cond, - condition) + test.skip_if_cond, condition + ) except ValueError: print("warning: couldn't parse skip-variant-if") else: - print('{}: warning: unrecognized |jit-test| attribute' - ' {}'.format(path, part)) + print( + "{}: warning: unrecognized |jit-test| attribute" + " {}".format(path, part) + ) else: - if name == 'slow': + if name == "slow": test.slow = True - elif name == 'allow-oom': + elif name == "allow-oom": test.allow_oom = True - elif name == 'allow-unhandlable-oom': + elif name == "allow-unhandlable-oom": test.allow_unhandlable_oom = True - elif name == 'allow-overrecursed': + elif name == "allow-overrecursed": test.allow_overrecursed = True - elif name == 'valgrind': + elif name == "valgrind": test.valgrind = options.valgrind - elif name == 'tz-pacific': + elif name == "tz-pacific": test.tz_pacific = True - elif name.startswith('test-also='): - test.test_also.append(re.split(r'\s+', name[len('test-also='):])) - elif name.startswith('test-join='): - test.test_join.append(re.split(r'\s+', name[len('test-join='):])) - elif name == 'module': + elif name.startswith("test-also="): + test.test_also.append( + re.split(r"\s+", name[len("test-also=") :]) + ) + elif name.startswith("test-join="): + test.test_join.append( + re.split(r"\s+", name[len("test-join=") :]) + ) + elif name == "module": test.is_module = True - elif name == 'crash': + elif name == "crash": test.expect_crash = True - elif name.startswith('--'): + elif name.startswith("--"): # // |jit-test| --ion-gvn=off; --no-sse4 test.jitflags.append(name) else: - print('{}: warning: unrecognized |jit-test| attribute' - ' {}'.format(path, part)) + print( + "{}: warning: unrecognized |jit-test| attribute" + " {}".format(path, part) + ) if options.valgrind_all: test.valgrind = True if options.test_reflect_stringify is not None: - test.expect_error = '' + test.expect_error = "" test.expect_status = 0 return test @@ -332,8 +355,8 @@ def command(self, prefix, libdir, moduledir, remote_prefix=None): path = self.path.replace(TEST_DIR, remote_prefix) scriptdir_var = os.path.dirname(path) - if not scriptdir_var.endswith('/'): - scriptdir_var += '/' + if not scriptdir_var.endswith("/"): + scriptdir_var += "/" # Platforms where subprocess immediately invokes exec do not care # whether we use double or single quotes. On windows and when using @@ -346,35 +369,40 @@ def command(self, prefix, libdir, moduledir, remote_prefix=None): # Don't merge the expressions: We want separate -e arguments to avoid # semicolons in the command line, bug 1351607. - exprs = ["const platform={}".format(js_quote(quotechar, sys.platform)), - "const libdir={}".format(js_quote(quotechar, libdir)), - "const scriptdir={}".format(js_quote(quotechar, scriptdir_var))] + exprs = [ + "const platform={}".format(js_quote(quotechar, sys.platform)), + "const libdir={}".format(js_quote(quotechar, libdir)), + "const scriptdir={}".format(js_quote(quotechar, scriptdir_var)), + ] # We may have specified '-a' or '-d' twice: once via --jitflags, once # via the "|jit-test|" line. Remove dups because they are toggles. cmd = prefix + [] cmd += list(set(self.jitflags)) for expr in exprs: - cmd += ['-e', expr] + cmd += ["-e", expr] for inc in self.other_lib_includes: - cmd += ['-f', libdir + inc] + cmd += ["-f", libdir + inc] for inc in self.other_script_includes: - cmd += ['-f', scriptdir_var + inc] + cmd += ["-f", scriptdir_var + inc] if self.skip_if_cond: - cmd += ['-e', 'if ({}) quit({})'.format(self.skip_if_cond, self.SKIPPED_EXIT_STATUS)] - cmd += ['--module-load-path', moduledir] + cmd += [ + "-e", + "if ({}) quit({})".format(self.skip_if_cond, self.SKIPPED_EXIT_STATUS), + ] + cmd += ["--module-load-path", moduledir] if self.is_module: - cmd += ['--module', path] + cmd += ["--module", path] elif self.test_reflect_stringify is None: - cmd += ['-f', path] + cmd += ["-f", path] else: - cmd += ['--', self.test_reflect_stringify, "--check", path] + cmd += ["--", self.test_reflect_stringify, "--check", path] if self.valgrind: cmd = self.VALGRIND_CMD + cmd if self.allow_unhandlable_oom or self.expect_crash: - cmd += ['--suppress-minidump'] + cmd += ["--suppress-minidump"] return cmd @@ -391,17 +419,16 @@ def find_tests(substring=None): for dirpath, dirnames, filenames in os.walk(TEST_DIR): dirnames.sort() filenames.sort() - if dirpath == '.': + if dirpath == ".": continue for filename in filenames: - if not filename.endswith('.js'): + if not filename.endswith(".js"): continue - if filename in ('shell.js', 'browser.js'): + if filename in ("shell.js", "browser.js"): continue test = os.path.join(dirpath, filename) - if substring is None \ - or substring in os.path.relpath(test, TEST_DIR): + if substring is None or substring in os.path.relpath(test, TEST_DIR): ans.append(test) return ans @@ -411,19 +438,19 @@ def run_test_remote(test, device, prefix, options): if options.test_reflect_stringify: raise ValueError("can't run Reflect.stringify tests remotely") - cmd = test.command(prefix, - posixpath.join(options.remote_test_root, 'lib/'), - posixpath.join(options.remote_test_root, 'modules/'), - posixpath.join(options.remote_test_root, 'tests')) + cmd = test.command( + prefix, + posixpath.join(options.remote_test_root, "lib/"), + posixpath.join(options.remote_test_root, "modules/"), + posixpath.join(options.remote_test_root, "tests"), + ) if options.show_cmd: print(escape_cmdline(cmd)) - env = { - 'LD_LIBRARY_PATH': os.path.dirname(prefix[0]) - } + env = {"LD_LIBRARY_PATH": os.path.dirname(prefix[0])} if test.tz_pacific: - env['TZ'] = 'PST8PDT' + env["TZ"] = "PST8PDT" # replace with shlex.join when move to Python 3.8+ cmd = ADBDevice._escape_command_line(cmd) @@ -432,16 +459,16 @@ def run_test_remote(test, device, prefix, options): # Allow ADBError or ADBTimeoutError to terminate the test run, # but handle ADBProcessError in order to support the use of # non-zero exit codes in the JavaScript shell tests. - out = device.shell_output(cmd, env=env, - cwd=options.remote_test_root, - timeout=int(options.timeout)) + out = device.shell_output( + cmd, env=env, cwd=options.remote_test_root, timeout=int(options.timeout) + ) returncode = 0 except ADBProcessError as e: # Treat ignorable intermittent adb communication errors as # skipped tests. out = str(e.adb_process.stdout) returncode = e.adb_process.exitcode - re_ignore = re.compile(r'error: (closed|device .* not found)') + re_ignore = re.compile(r"error: (closed|device .* not found)") if returncode == 1 and re_ignore.search(out): print("Skipping {} due to ignorable adb error {}".format(test.path, out)) test.skip_if_cond = "true" @@ -461,15 +488,17 @@ def check_output(out, err, rc, timed_out, test, options): return True if timed_out: - if os.path.normpath(test.relpath_tests).replace(os.sep, '/') \ - in options.ignore_timeouts: + if ( + os.path.normpath(test.relpath_tests).replace(os.sep, "/") + in options.ignore_timeouts + ): return True # The shell sometimes hangs on shutdown on Windows 7 and Windows # Server 2008. See bug 970063 comment 7 for a description of the # problem. Until bug 956899 is fixed, ignore timeouts on these # platforms (versions 6.0 and 6.1). - if sys.platform == 'win32': + if sys.platform == "win32": ver = sys.getwindowsversion() if ver.major == 6 and ver.minor <= 1: return True @@ -479,7 +508,7 @@ def check_output(out, err, rc, timed_out, test, options): # The shell exits with code 3 on uncaught exceptions. # Sometimes 0 is returned on Windows for unknown reasons. # See bug 899697. - if sys.platform in ['win32', 'cygwin']: + if sys.platform in ["win32", "cygwin"]: if rc != 3 and rc != 0: return False else: @@ -488,22 +517,22 @@ def check_output(out, err, rc, timed_out, test, options): return test.expect_error in err - for line in out.split('\n'): - if line.startswith('Trace stats check failed'): + for line in out.split("\n"): + if line.startswith("Trace stats check failed"): return False - for line in err.split('\n'): - if 'Assertion failed:' in line: + for line in err.split("\n"): + if "Assertion failed:" in line: return False if test.expect_crash: # Python 3 on Windows interprets process exit codes as unsigned # integers, where Python 2 used to allow signed integers. Account for # each possibility here. - if sys.platform == 'win32' and rc in (3 - 2 ** 31, 3 + 2 ** 31): + if sys.platform == "win32" and rc in (3 - 2 ** 31, 3 + 2 ** 31): return True - if sys.platform != 'win32' and rc == -11: + if sys.platform != "win32" and rc == -11: return True # When building with ASan enabled, ASan will convert the -11 returned @@ -521,24 +550,31 @@ def check_output(out, err, rc, timed_out, test, options): # Tests which expect a timeout check for exit code 6. # Sometimes 0 is returned on Windows for unknown reasons. # See bug 899697. - if sys.platform in ['win32', 'cygwin'] and rc == 0: + if sys.platform in ["win32", "cygwin"] and rc == 0: return True # Allow a non-zero exit code if we want to allow OOM, but only if we # actually got OOM. - if test.allow_oom and 'out of memory' in err \ - and 'Assertion failure' not in err and 'MOZ_CRASH' not in err: + if ( + test.allow_oom + and "out of memory" in err + and "Assertion failure" not in err + and "MOZ_CRASH" not in err + ): return True # Allow a non-zero exit code if we want to allow unhandlable OOM, but # only if we actually got unhandlable OOM. - if test.allow_unhandlable_oom and 'MOZ_CRASH([unhandlable oom]' in err: + if test.allow_unhandlable_oom and "MOZ_CRASH([unhandlable oom]" in err: return True # Allow a non-zero exit code if we want to all too-much-recursion and # the test actually over-recursed. - if test.allow_overrecursed and 'too much recursion' in err \ - and 'Assertion failure' not in err: + if ( + test.allow_overrecursed + and "too much recursion" in err + and "Assertion failure" not in err + ): return True # Allow a zero exit code if we are running under a sanitizer that @@ -568,18 +604,21 @@ def print_automation_format(ok, res, slog): result = "TEST-PASS" if ok else "TEST-UNEXPECTED-FAIL" message = "Success" if ok else res.describe_failure() jitflags = " ".join(res.test.jitflags) - print("{} | {} | {} (code {}, args \"{}\") [{:.1f} s]".format( - result, res.test.relpath_top, message, res.rc, jitflags, res.dt)) + print( + '{} | {} | {} (code {}, args "{}") [{:.1f} s]'.format( + result, res.test.relpath_top, message, res.rc, jitflags, res.dt + ) + ) details = { - 'message': message, - 'extra': { - 'jitflags': jitflags, - } + "message": message, + "extra": { + "jitflags": jitflags, + }, } if res.extra: - details['extra'].update(res.extra) - slog.test(res.test.relpath_tests, 'PASS' if ok else 'FAIL', res.dt, **details) + details["extra"].update(res.extra) + slog.test(res.test.relpath_tests, "PASS" if ok else "FAIL", res.dt, **details) # For failed tests, print as much information as we have, to aid debugging. if ok: @@ -596,63 +635,72 @@ def print_test_summary(num_tests, failures, complete, doing, options): if failures: if options.write_failures: try: - out = open(options.write_failures, 'w') + out = open(options.write_failures, "w") # Don't write duplicate entries when we are doing multiple # failures per job. written = set() for res in failures: if res.test.path not in written: - out.write(os.path.relpath(res.test.path, TEST_DIR) - + '\n') + out.write(os.path.relpath(res.test.path, TEST_DIR) + "\n") if options.write_failure_output: out.write(res.out) out.write(res.err) - out.write('Exit code: ' + str(res.rc) + "\n") + out.write("Exit code: " + str(res.rc) + "\n") written.add(res.test.path) out.close() except IOError: - sys.stderr.write("Exception thrown trying to write failure" - " file '{}'\n".format(options.write_failures)) + sys.stderr.write( + "Exception thrown trying to write failure" + " file '{}'\n".format(options.write_failures) + ) traceback.print_exc() - sys.stderr.write('---\n') + sys.stderr.write("---\n") def show_test(res): if options.show_failed: - print(' ' + escape_cmdline(res.cmd)) + print(" " + escape_cmdline(res.cmd)) else: - print(' ' + ' '.join(res.test.jitflags + [res.test.relpath_tests])) + print(" " + " ".join(res.test.jitflags + [res.test.relpath_tests])) - print('FAILURES:') + print("FAILURES:") for res in failures: if not res.timed_out: show_test(res) - print('TIMEOUTS:') + print("TIMEOUTS:") for res in failures: if res.timed_out: show_test(res) else: - print('PASSED ALL' - + ('' if complete - else ' (partial run -- interrupted by user {})'.format(doing))) - - if options.format == 'automation': + print( + "PASSED ALL" + + ( + "" + if complete + else " (partial run -- interrupted by user {})".format(doing) + ) + ) + + if options.format == "automation": num_failures = len(failures) if failures else 0 - print('Result summary:') - print('Passed: {:d}'.format(num_tests - num_failures)) - print('Failed: {:d}'.format(num_failures)) + print("Result summary:") + print("Passed: {:d}".format(num_tests - num_failures)) + print("Failed: {:d}".format(num_failures)) return not failures def create_progressbar(num_tests, options): - if not options.hide_progress and not options.show_cmd \ - and ProgressBar.conservative_isatty(): + if ( + not options.hide_progress + and not options.show_cmd + and ProgressBar.conservative_isatty() + ): fmt = [ - {'value': 'PASS', 'color': 'green'}, - {'value': 'FAIL', 'color': 'red'}, - {'value': 'TIMEOUT', 'color': 'blue'}, - {'value': 'SKIP', 'color': 'brightgray'}, + {"value": "PASS", "color": "green"}, + {"value": "FAIL", "color": "red"}, + {"value": "TIMEOUT", "color": "blue"}, + {"value": "SKIP", "color": "brightgray"}, ] return ProgressBar(num_tests, fmt) return NullProgressBar() @@ -663,7 +711,7 @@ def process_test_results(results, num_tests, pb, options, slog): timeouts = 0 complete = False output_dict = {} - doing = 'before starting' + doing = "before starting" if num_tests == 0: pb.finish(True) @@ -672,8 +720,9 @@ def process_test_results(results, num_tests, pb, options, slog): try: for i, res in enumerate(results): - ok = check_output(res.out, res.err, res.rc, res.timed_out, - res.test, options) + ok = check_output( + res.out, res.err, res.rc, res.timed_out, res.test, options + ) if ok: show_output = options.show_output and not options.failed_only @@ -684,7 +733,7 @@ def process_test_results(results, num_tests, pb, options, slog): pb.beginline() sys.stdout.write(res.out) sys.stdout.write(res.err) - sys.stdout.write('Exit code: {}\n'.format(res.rc)) + sys.stdout.write("Exit code: {}\n".format(res.rc)) if res.test.valgrind and not show_output: pb.beginline() @@ -693,11 +742,13 @@ def process_test_results(results, num_tests, pb, options, slog): if options.check_output: if res.test.path in output_dict.keys(): if output_dict[res.test.path] != res.out: - pb.message("FAIL - OUTPUT DIFFERS {}".format(res.test.relpath_tests)) + pb.message( + "FAIL - OUTPUT DIFFERS {}".format(res.test.relpath_tests) + ) else: output_dict[res.test.path] = res.out - doing = 'after {}'.format(res.test.relpath_tests) + doing = "after {}".format(res.test.relpath_tests) if not ok: failures.append(res) if res.timed_out: @@ -706,20 +757,25 @@ def process_test_results(results, num_tests, pb, options, slog): else: pb.message("FAIL - {}".format(res.test.relpath_tests)) - if options.format == 'automation': + if options.format == "automation": print_automation_format(ok, res, slog) n = i + 1 - pb.update(n, { - 'PASS': n - len(failures), - 'FAIL': len(failures), - 'TIMEOUT': timeouts, - 'SKIP': 0 - }) + pb.update( + n, + { + "PASS": n - len(failures), + "FAIL": len(failures), + "TIMEOUT": timeouts, + "SKIP": 0, + }, + ) complete = True except KeyboardInterrupt: - print("TEST-UNEXPECTED-FAIL | jit_test.py" + - " : Test execution interrupted by user") + print( + "TEST-UNEXPECTED-FAIL | jit_test.py" + + " : Test execution interrupted by user" + ) pb.finish(True) return print_test_summary(num_tests, failures, complete, doing, options) @@ -727,7 +783,7 @@ def process_test_results(results, num_tests, pb, options, slog): def run_tests(tests, num_tests, prefix, options, remote=False): slog = None - if options.format == 'automation': + if options.format == "automation": slog = TestLogger("jittests") slog.suite_start() @@ -747,11 +803,27 @@ def run_tests_local(tests, num_tests, prefix, options, slog): # taken from the jstests options processing code, which are frequently # subtly different from the options jit-tests expects. As such, we wrap # them here, as needed. - AdaptorOptions = namedtuple("AdaptorOptions", [ - "worker_count", "passthrough", "timeout", "output_fp", - "hide_progress", "run_skipped", "show_cmd"]) - shim_options = AdaptorOptions(options.max_jobs, False, options.timeout, - sys.stdout, False, True, options.show_cmd) + AdaptorOptions = namedtuple( + "AdaptorOptions", + [ + "worker_count", + "passthrough", + "timeout", + "output_fp", + "hide_progress", + "run_skipped", + "show_cmd", + ], + ) + shim_options = AdaptorOptions( + options.max_jobs, + False, + options.timeout, + sys.stdout, + False, + True, + options.show_cmd, + ) # The test runner wants the prefix as a static on the Test class. JitTest.js_cmd_prefix = prefix @@ -777,25 +849,24 @@ def get_remote_results(tests, device, prefix, options): def run_tests_remote(tests, num_tests, prefix, options, slog): # Setup device with everything needed to run our tests. from mozdevice import ADBError, ADBTimeoutError + try: device = init_device(options) - prefix[0] = posixpath.join(options.remote_test_root, 'bin', 'js') + prefix[0] = posixpath.join(options.remote_test_root, "bin", "js") # Update the test root to point to our test directory. - jit_tests_dir = posixpath.join(options.remote_test_root, 'tests') - options.remote_test_root = posixpath.join(jit_tests_dir, 'tests') + jit_tests_dir = posixpath.join(options.remote_test_root, "tests") + options.remote_test_root = posixpath.join(jit_tests_dir, "tests") jtd_tests = posixpath.join(options.remote_test_root) init_remote_dir(device, jit_tests_dir) device.push(JS_TESTS_DIR, jtd_tests, timeout=600) device.chmod(jtd_tests, recursive=True) - device.push(os.path.dirname(TEST_DIR), options.remote_test_root, - timeout=600) + device.push(os.path.dirname(TEST_DIR), options.remote_test_root, timeout=600) device.chmod(options.remote_test_root, recursive=True) except (ADBError, ADBTimeoutError): - print("TEST-UNEXPECTED-FAIL | jit_test.py" + - " : Device initialization failed") + print("TEST-UNEXPECTED-FAIL | jit_test.py" + " : Device initialization failed") raise # Run all tests. @@ -804,8 +875,7 @@ def run_tests_remote(tests, num_tests, prefix, options, slog): gen = get_remote_results(tests, device, prefix, options) ok = process_test_results(gen, num_tests, pb, options, slog) except (ADBError, ADBTimeoutError): - print("TEST-UNEXPECTED-FAIL | jit_test.py" + - " : Device error during test") + print("TEST-UNEXPECTED-FAIL | jit_test.py" + " : Device error during test") raise return ok @@ -817,6 +887,7 @@ def platform_might_be_android(): # device. False positives and negatives are possible, # however. import android # NOQA: F401 + return True except ImportError: return False @@ -826,5 +897,5 @@ def stdio_might_be_broken(): return platform_might_be_android() -if __name__ == '__main__': - print('Use ../jit-test/jit_test.py to run these tests.') +if __name__ == "__main__": + print("Use ../jit-test/jit_test.py to run these tests.") diff --git a/js/src/tests/lib/manifest.py b/js/src/tests/lib/manifest.py index d5652cf4b48d60..79c42a22b01005 100644 --- a/js/src/tests/lib/manifest.py +++ b/js/src/tests/lib/manifest.py @@ -38,15 +38,18 @@ def as_js(self): """Return JS that when executed sets up variables so that JS expression predicates on XUL build info evaluate properly.""" - return ('var xulRuntime = {{ OS: "{}", XPCOMABI: "{}", shell: true }};' - 'var release_or_beta = getBuildConfiguration().release_or_beta;' - 'var isDebugBuild={}; var Android={}; ' - 'var browserIsRemote={}'.format( - self.os, - self.abi, - str(self.isdebug).lower(), - str(self.os == "Android").lower(), - str(self.browserIsRemote).lower())) + return ( + 'var xulRuntime = {{ OS: "{}", XPCOMABI: "{}", shell: true }};' + "var release_or_beta = getBuildConfiguration().release_or_beta;" + "var isDebugBuild={}; var Android={}; " + "var browserIsRemote={}".format( + self.os, + self.abi, + str(self.isdebug).lower(), + str(self.os == "Android").lower(), + str(self.browserIsRemote).lower(), + ) + ) @classmethod def create(cls, jsdir): @@ -60,30 +63,32 @@ def create(cls, jsdir): path = None for dir in dirs: - _path = posixpath.join(dir, 'config', 'autoconf.mk') + _path = posixpath.join(dir, "config", "autoconf.mk") if os.path.isfile(_path): path = _path break if path is None: - print("Can't find config/autoconf.mk on a directory containing" - " the JS shell (searched from {})".format(jsdir)) + print( + "Can't find config/autoconf.mk on a directory containing" + " the JS shell (searched from {})".format(jsdir) + ) sys.exit(1) # Read the values. - val_re = re.compile(r'(TARGET_XPCOM_ABI|OS_TARGET|MOZ_DEBUG)\s*=\s*(.*)') - kw = {'isdebug': False} - for line in io.open(path, encoding='utf-8'): + val_re = re.compile(r"(TARGET_XPCOM_ABI|OS_TARGET|MOZ_DEBUG)\s*=\s*(.*)") + kw = {"isdebug": False} + for line in io.open(path, encoding="utf-8"): m = val_re.match(line) if m: key, val = m.groups() val = val.rstrip() - if key == 'TARGET_XPCOM_ABI': - kw['abi'] = val - if key == 'OS_TARGET': - kw['os'] = val - if key == 'MOZ_DEBUG': - kw['isdebug'] = (val == '1') + if key == "TARGET_XPCOM_ABI": + kw["abi"] = val + if key == "OS_TARGET": + kw["os"] = val + if key == "MOZ_DEBUG": + kw["isdebug"] = val == "1" return cls(**kw) @@ -100,7 +105,7 @@ def __init__(self, xulinfo, options, js_args): if not self.options.remote: return self.device = init_device(options) - self.js_bin = posixpath.join(options.remote_test_root, 'bin', 'js') + self.js_bin = posixpath.join(options.remote_test_root, "bin", "js") def test(self, cond, options=[]): if self.options.remote: @@ -115,41 +120,47 @@ def _test_remote(self, cond, options=[]): return ans env = { - 'LD_LIBRARY_PATH': posixpath.join(self.options.remote_test_root, 'bin'), + "LD_LIBRARY_PATH": posixpath.join(self.options.remote_test_root, "bin"), } - cmd = [ - self.js_bin - ] + self.js_args + options + [ - # run in safe configuration, since it is hard to debug - # crashes when running code here. In particular, msan will - # error out if the jit is active. - '--no-baseline', - '--no-blinterp', - '-e', self.js_prologue, - '-e', 'print(!!({}))'.format(cond) - ] + cmd = ( + [self.js_bin] + + self.js_args + + options + + [ + # run in safe configuration, since it is hard to debug + # crashes when running code here. In particular, msan will + # error out if the jit is active. + "--no-baseline", + "--no-blinterp", + "-e", + self.js_prologue, + "-e", + "print(!!({}))".format(cond), + ] + ) cmd = ADBDevice._escape_command_line(cmd) try: # Allow ADBError or ADBTimeoutError to terminate the test run, # but handle ADBProcessError in order to support the use of # non-zero exit codes in the JavaScript shell tests. - out = self.device.shell_output(cmd, env=env, - cwd=self.options.remote_test_root, - timeout=None) - err = '' + out = self.device.shell_output( + cmd, env=env, cwd=self.options.remote_test_root, timeout=None + ) + err = "" except ADBProcessError as e: - out = '' + out = "" err = str(e.adb_process.stdout) - if out == 'true': + if out == "true": ans = True - elif out == 'false': + elif out == "false": ans = False else: - raise Exception("Failed to test XUL condition {!r};" - " output was {!r}, stderr was {!r}".format( - cond, out, err)) + raise Exception( + "Failed to test XUL condition {!r};" + " output was {!r}, stderr was {!r}".format(cond, out, err) + ) self.cache[cond] = ans return ans @@ -157,27 +168,35 @@ def _test_local(self, cond, options=[]): """Test a XUL predicate condition against this local info.""" ans = self.cache.get(cond, None) if ans is None: - cmd = [ - self.js_bin - ] + self.js_args + options + [ - # run in safe configuration, since it is hard to debug - # crashes when running code here. In particular, msan will - # error out if the jit is active. - '--no-baseline', - '--no-blinterp', - '-e', self.js_prologue, - '-e', 'print(!!({}))'.format(cond) - ] - p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True) + cmd = ( + [self.js_bin] + + self.js_args + + options + + [ + # run in safe configuration, since it is hard to debug + # crashes when running code here. In particular, msan will + # error out if the jit is active. + "--no-baseline", + "--no-blinterp", + "-e", + self.js_prologue, + "-e", + "print(!!({}))".format(cond), + ] + ) + p = Popen( + cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True + ) out, err = p.communicate() - if out in ('true\n', 'true\r\n'): + if out in ("true\n", "true\r\n"): ans = True - elif out in ('false\n', 'false\r\n'): + elif out in ("false\n", "false\r\n"): ans = False else: - raise Exception("Failed to test XUL condition {!r};" - " output was {!r}, stderr was {!r}".format( - cond, out, err)) + raise Exception( + "Failed to test XUL condition {!r};" + " output was {!r}, stderr was {!r}".format(cond, out, err) + ) self.cache[cond] = ans return ans @@ -193,72 +212,71 @@ def _parse_one(testcase, terms, xul_tester): pos = 0 parts = terms.split() while pos < len(parts): - if parts[pos] == 'fails': + if parts[pos] == "fails": testcase.expect = False pos += 1 - elif parts[pos] == 'skip': + elif parts[pos] == "skip": testcase.expect = testcase.enable = False pos += 1 - elif parts[pos] == 'random': + elif parts[pos] == "random": testcase.random = True pos += 1 - elif parts[pos].startswith('shell-option('): + elif parts[pos].startswith("shell-option("): # This directive adds an extra option to pass to the shell. - option = parts[pos][len('shell-option('):-1] + option = parts[pos][len("shell-option(") : -1] testcase.options.append(option) pos += 1 - elif parts[pos].startswith('fails-if'): - cond = parts[pos][len('fails-if('):-1] + elif parts[pos].startswith("fails-if"): + cond = parts[pos][len("fails-if(") : -1] if xul_tester.test(cond, testcase.options): testcase.expect = False pos += 1 - elif parts[pos].startswith('asserts-if'): + elif parts[pos].startswith("asserts-if"): # This directive means we may flunk some number of # NS_ASSERTIONs in the browser. For the shell, ignore it. pos += 1 - elif parts[pos].startswith('skip-if'): - cond = parts[pos][len('skip-if('):-1] + elif parts[pos].startswith("skip-if"): + cond = parts[pos][len("skip-if(") : -1] if xul_tester.test(cond, testcase.options): testcase.expect = testcase.enable = False pos += 1 - elif parts[pos].startswith('ignore-flag'): - flag = parts[pos][len('ignore-flag('):-1] + elif parts[pos].startswith("ignore-flag"): + flag = parts[pos][len("ignore-flag(") : -1] testcase.ignoredflags.append(flag) pos += 1 - elif parts[pos].startswith('random-if'): - cond = parts[pos][len('random-if('):-1] + elif parts[pos].startswith("random-if"): + cond = parts[pos][len("random-if(") : -1] if xul_tester.test(cond, testcase.options): testcase.random = True pos += 1 - elif parts[pos] == 'slow': + elif parts[pos] == "slow": testcase.slow = True pos += 1 - elif parts[pos].startswith('slow-if'): - cond = parts[pos][len('slow-if('):-1] + elif parts[pos].startswith("slow-if"): + cond = parts[pos][len("slow-if(") : -1] if xul_tester.test(cond, testcase.options): testcase.slow = True pos += 1 - elif parts[pos] == 'silentfail': + elif parts[pos] == "silentfail": # silentfails use tons of memory, and Darwin doesn't support ulimit. if xul_tester.test("xulRuntime.OS == 'Darwin'", testcase.options): testcase.expect = testcase.enable = False pos += 1 - elif parts[pos].startswith('error:'): + elif parts[pos].startswith("error:"): # This directive allows to specify an error type. - (_, _, errortype) = parts[pos].partition(':') + (_, _, errortype) = parts[pos].partition(":") testcase.error = errortype pos += 1 - elif parts[pos] == 'module': + elif parts[pos] == "module": # This directive marks the test as module code. testcase.is_module = True pos += 1 - elif parts[pos] == 'async': + elif parts[pos] == "async": # This directive marks the test as async. testcase.is_async = True pos += 1 else: - print('warning: invalid manifest line element "{}"'.format( - parts[pos])) + print('warning: invalid manifest line element "{}"'.format(parts[pos])) pos += 1 @@ -267,12 +285,19 @@ def _build_manifest_script_entry(script_name, test): properties = [] if test.terms: # Remove jsreftest internal terms. - terms = " ".join([term for term in test.terms.split() - if not (term == "module" or - term == "async" or - term.startswith("error:") or - term.startswith("ignore-flag(") or - term.startswith("shell-option("))]) + terms = " ".join( + [ + term + for term in test.terms.split() + if not ( + term == "module" + or term == "async" + or term.startswith("error:") + or term.startswith("ignore-flag(") + or term.startswith("shell-option(") + ) + ] + ) if terms: line.append(terms) if test.error: @@ -289,7 +314,7 @@ def _build_manifest_script_entry(script_name, test): if test.comment: line.append("#") line.append(test.comment) - return ' '.join(line) + return " ".join(line) def _map_prefixes_left(test_gen): @@ -317,7 +342,7 @@ def _emit_manifest_at(location, relative, test_gen, depth): """ manifests = _map_prefixes_left(test_gen) - filename = os.path.join(location, 'jstests.list') + filename = os.path.join(location, "jstests.list") manifest = [] numTestFiles = 0 for k, test_list in manifests.items(): @@ -337,30 +362,31 @@ def _emit_manifest_at(location, relative, test_gen, depth): # If we have tests, we have to set the url-prefix so reftest can find them. if numTestFiles > 0: - manifest = ["url-prefix {}jsreftest.html?test={}/".format( - '../' * depth, relative)] + manifest + manifest = [ + "url-prefix {}jsreftest.html?test={}/".format("../" * depth, relative) + ] + manifest - fp = io.open(filename, 'w', encoding='utf-8', newline='\n') + fp = io.open(filename, "w", encoding="utf-8", newline="\n") try: - fp.write('\n'.join(manifest) + '\n') + fp.write("\n".join(manifest) + "\n") finally: fp.close() def make_manifests(location, test_gen): - _emit_manifest_at(location, '', test_gen, 0) + _emit_manifest_at(location, "", test_gen, 0) def _find_all_js_files(location): for root, dirs, files in os.walk(location): - root = root[len(location) + 1:] + root = root[len(location) + 1 :] for fn in files: - if fn.endswith('.js'): + if fn.endswith(".js"): yield root, fn # The pattern for test header lines. -TEST_HEADER_PATTERN = r''' +TEST_HEADER_PATTERN = r""" # Ignore any space before the tag. \s* @@ -388,23 +414,33 @@ def _find_all_js_files(location): # The actual comment. (?P.*) )? -''' +""" -TEST_HEADER_PATTERN_INLINE = re.compile(r''' +TEST_HEADER_PATTERN_INLINE = re.compile( + r""" # Start a single line comment // -''' + TEST_HEADER_PATTERN + r''' +""" + + TEST_HEADER_PATTERN + + r""" # Match the end of line. $ -''', re.VERBOSE) -TEST_HEADER_PATTERN_MULTI = re.compile(r''' +""", + re.VERBOSE, +) +TEST_HEADER_PATTERN_MULTI = re.compile( + r""" # Start a multi line comment /\* -''' + TEST_HEADER_PATTERN + r''' +""" + + TEST_HEADER_PATTERN + + r""" # Match the end of comment. \*/ -''', re.VERBOSE) +""", + re.VERBOSE, +) def _append_terms_and_comment(testcase, terms, comment): @@ -425,7 +461,7 @@ def _parse_test_header(fullpath, testcase, xul_tester): it has to be done on every test """ if six.PY3: - fp = open(fullpath, encoding='utf-8') + fp = open(fullpath, encoding="utf-8") else: fp = open(fullpath) try: @@ -438,7 +474,7 @@ def _parse_test_header(fullpath, testcase, xul_tester): return # Extract the token. - buf, _, _ = buf.partition('\n') + buf, _, _ = buf.partition("\n") matches = TEST_HEADER_PATTERN_INLINE.match(buf) if not matches: @@ -446,9 +482,11 @@ def _parse_test_header(fullpath, testcase, xul_tester): if not matches: return - testcase.tag = matches.group('tag') - _append_terms_and_comment(testcase, matches.group('options'), matches.group('comment')) - _parse_one(testcase, matches.group('options'), xul_tester) + testcase.tag = matches.group("tag") + _append_terms_and_comment( + testcase, matches.group("options"), matches.group("comment") + ) + _parse_one(testcase, matches.group("options"), xul_tester) def _parse_external_manifest(filename, relpath): @@ -464,11 +502,13 @@ def _parse_external_manifest(filename, relpath): entries = [] - with io.open(filename, 'r', encoding='utf-8') as fp: - manifest_re = re.compile(r'^\s*(?P.*)\s+(?Pinclude|script)\s+(?P\S+)$') - include_re = re.compile(r'^\s*include\s+(?P\S+)$') + with io.open(filename, "r", encoding="utf-8") as fp: + manifest_re = re.compile( + r"^\s*(?P.*)\s+(?Pinclude|script)\s+(?P\S+)$" + ) + include_re = re.compile(r"^\s*include\s+(?P\S+)$") for line in fp: - line, _, comment = line.partition('#') + line, _, comment = line.partition("#") line = line.strip() if not line: continue @@ -476,28 +516,37 @@ def _parse_external_manifest(filename, relpath): if not matches: matches = include_re.match(line) if not matches: - print('warning: unrecognized line in jstests.list:' - ' {0}'.format(line)) + print( + "warning: unrecognized line in jstests.list:" + " {0}".format(line) + ) continue - include_file = matches.group('path') + include_file = matches.group("path") include_filename = os.path.join(os.path.dirname(filename), include_file) include_relpath = os.path.join(relpath, os.path.dirname(include_file)) - include_entries = _parse_external_manifest(include_filename, include_relpath) + include_entries = _parse_external_manifest( + include_filename, include_relpath + ) entries.extend(include_entries) continue - path = os.path.normpath(os.path.join(relpath, matches.group('path'))) - if matches.group('type') == 'include': + path = os.path.normpath(os.path.join(relpath, matches.group("path"))) + if matches.group("type") == "include": # The manifest spec wants a reference to another manifest here, # but we need just the directory. We do need the trailing # separator so we don't accidentally match other paths of which # this one is a prefix. - assert(path.endswith('jstests.list')) - path = path[:-len('jstests.list')] + assert path.endswith("jstests.list") + path = path[: -len("jstests.list")] - entries.append({'path': path, 'terms': matches.group('terms'), - 'comment': comment.strip()}) + entries.append( + { + "path": path, + "terms": matches.group("terms"), + "comment": comment.strip(), + } + ) # if one directory name is a prefix of another, we want the shorter one # first @@ -524,8 +573,16 @@ def _apply_external_manifests(filename, testcase, entries, xul_tester): def _is_test_file(path_from_root, basename, filename, path_options): # Any file whose basename matches something in this set is ignored. - EXCLUDED = set(('browser.js', 'shell.js', 'template.js', - 'user.js', 'js-test-driver-begin.js', 'js-test-driver-end.js')) + EXCLUDED = set( + ( + "browser.js", + "shell.js", + "template.js", + "user.js", + "js-test-driver-begin.js", + "js-test-driver-end.js", + ) + ) # Skip js files in the root test directory. if not path_from_root: @@ -559,8 +616,8 @@ def load_reftests(location, path_options, xul_tester): - an external manifest entry for a containing directory, - most commonly: the header of the test case itself. """ - manifestFile = os.path.join(location, 'jstests.list') - externalManifestEntries = _parse_external_manifest(manifestFile, '') + manifestFile = os.path.join(location, "jstests.list") + externalManifestEntries = _parse_external_manifest(manifestFile, "") for root, basename in _find_all_js_files(location): # Get the full path and relative location of the file. @@ -572,7 +629,8 @@ def load_reftests(location, path_options, xul_tester): fullpath = os.path.join(location, filename) testcase = RefTestCase(location, filename) - _apply_external_manifests(filename, testcase, externalManifestEntries, - xul_tester) + _apply_external_manifests( + filename, testcase, externalManifestEntries, xul_tester + ) _parse_test_header(fullpath, testcase, xul_tester) yield testcase diff --git a/js/src/tests/lib/progressbar.py b/js/src/tests/lib/progressbar.py index 4ee11d524cf1bc..1de78f596c2a0b 100644 --- a/js/src/tests/lib/progressbar.py +++ b/js/src/tests/lib/progressbar.py @@ -5,25 +5,31 @@ from datetime import datetime, timedelta -if sys.platform.startswith('win'): +if sys.platform.startswith("win"): from .terminal_win import Terminal else: from .terminal_unix import Terminal class NullProgressBar(object): - def update(self, current, data): pass + def update(self, current, data): + pass - def poke(self): pass + def poke(self): + pass - def finish(self, complete=True): pass + def finish(self, complete=True): + pass - def beginline(self): pass + def beginline(self): + pass - def message(self, msg): sys.stdout.write(msg + '\n') + def message(self, msg): + sys.stdout.write(msg + "\n") @staticmethod - def update_granularity(): return timedelta.max + def update_granularity(): + return timedelta.max class ProgressBar(object): @@ -62,30 +68,31 @@ def update(self, current, data): self.atLineStart = False # Build counters string. - sys.stdout.write('\r[') + sys.stdout.write("\r[") for layout in self.counters_fmt: - Terminal.set_color(layout['color']) - sys.stdout.write(('{:' + str(self.limit_digits) + 'd}').format( - data[layout['value']])) + Terminal.set_color(layout["color"]) + sys.stdout.write( + ("{:" + str(self.limit_digits) + "d}").format(data[layout["value"]]) + ) Terminal.reset_color() if layout != self.counters_fmt[-1]: - sys.stdout.write('|') + sys.stdout.write("|") else: - sys.stdout.write('] ') + sys.stdout.write("] ") # Build the bar. pct = int(100.0 * current / self.limit) - sys.stdout.write('{:3d}% '.format(pct)) + sys.stdout.write("{:3d}% ".format(pct)) barlen = int(1.0 * self.barlen * current / self.limit) - 1 - bar = '=' * barlen + '>' + ' ' * (self.barlen - barlen - 1) - sys.stdout.write(bar + '|') + bar = "=" * barlen + ">" + " " * (self.barlen - barlen - 1) + sys.stdout.write(bar + "|") # Update the bar. now = datetime.now() dt = now - self.t0 dt = dt.seconds + dt.microseconds * 1e-6 - sys.stdout.write('{:6.1f}s'.format(dt)) + sys.stdout.write("{:6.1f}s".format(dt)) Terminal.clear_right() # Force redisplay, since we didn't write a \n. @@ -102,22 +109,24 @@ def poke(self): def finish(self, complete=True): if not self.prior: - sys.stdout.write('No test run... You can try adding' - ' --run-slow-tests or --run-skipped to run more tests\n') + sys.stdout.write( + "No test run... You can try adding" + " --run-slow-tests or --run-skipped to run more tests\n" + ) return final_count = self.limit if complete else self.prior[0] self.update(final_count, self.prior[1]) - sys.stdout.write('\n') + sys.stdout.write("\n") def beginline(self): if not self.atLineStart: - sys.stdout.write('\n') + sys.stdout.write("\n") self.atLineStart = True def message(self, msg): self.beginline() sys.stdout.write(msg) - sys.stdout.write('\n') + sys.stdout.write("\n") @staticmethod def conservative_isatty(): @@ -129,6 +138,7 @@ def conservative_isatty(): """ try: import android # NOQA: F401 + return False except ImportError: return sys.stdout.isatty() diff --git a/js/src/tests/lib/remote.py b/js/src/tests/lib/remote.py index 0eba9957c536bf..45f665bb467ce8 100644 --- a/js/src/tests/lib/remote.py +++ b/js/src/tests/lib/remote.py @@ -12,8 +12,13 @@ def push_libs(options, device, dest_dir): # This saves considerable time in pushing unnecessary libraries # to the device but needs to be updated if the dependencies change. - required_libs = ['libnss3.so', 'libmozglue.so', 'libnspr4.so', - 'libplc4.so', 'libplds4.so'] + required_libs = [ + "libnss3.so", + "libmozglue.so", + "libnspr4.so", + "libplc4.so", + "libplds4.so", + ] for file in os.listdir(options.local_lib): if file in required_libs: @@ -25,8 +30,7 @@ def push_libs(options, device, dest_dir): def push_progs(options, device, progs, dest_dir): assert isinstance(progs, list) for local_file in progs: - remote_file = posixpath.join(dest_dir, - os.path.basename(local_file)) + remote_file = posixpath.join(dest_dir, os.path.basename(local_file)) device.push(local_file, remote_file) device.chmod(remote_file) @@ -50,26 +54,28 @@ def init_device(options): return DEVICE from mozdevice import ADBDeviceFactory, ADBError, ADBTimeoutError + try: if not options.local_lib: # if not specified, use the local directory containing # the js binary to find the necessary libraries. options.local_lib = posixpath.dirname(options.js_shell) - DEVICE = ADBDeviceFactory(device=options.device_serial, - test_root=options.remote_test_root) + DEVICE = ADBDeviceFactory( + device=options.device_serial, test_root=options.remote_test_root + ) init_remote_dir(DEVICE, options.remote_test_root) - bin_dir = posixpath.join(options.remote_test_root, 'bin') - tests_dir = posixpath.join(options.remote_test_root, 'tests') + bin_dir = posixpath.join(options.remote_test_root, "bin") + tests_dir = posixpath.join(options.remote_test_root, "tests") # Push js shell and libraries. init_remote_dir(DEVICE, tests_dir) init_remote_dir(DEVICE, bin_dir) push_libs(options, DEVICE, bin_dir) push_progs(options, DEVICE, [options.js_shell], bin_dir) # update options.js_shell to point to the js binary on the device - options.js_shell = os.path.join(bin_dir, 'js') + options.js_shell = os.path.join(bin_dir, "js") return DEVICE diff --git a/js/src/tests/lib/results.py b/js/src/tests/lib/results.py index 16076c14ce6699..f9c7746c01b5a9 100644 --- a/js/src/tests/lib/results.py +++ b/js/src/tests/lib/results.py @@ -11,18 +11,19 @@ def escape_cmdline(args): - return ' '.join([pipes.quote(a) for a in args]) + return " ".join([pipes.quote(a) for a in args]) class TestOutput: """Output from a test run.""" + def __init__(self, test, cmd, out, err, rc, dt, timed_out, extra=None): - self.test = test # Test - self.cmd = cmd # str: command line of test - self.out = out # str: stdout - self.err = err # str: stderr - self.rc = rc # int: return code - self.dt = dt # float: run time + self.test = test # Test + self.cmd = cmd # str: command line of test + self.out = out # str: stdout + self.err = err # str: stderr + self.rc = rc # int: return code + self.dt = dt # float: run time self.timed_out = timed_out # bool: did the test time out self.extra = extra # includes the pid on some platforms @@ -42,18 +43,18 @@ class NullTestOutput: def __init__(self, test): self.test = test - self.cmd = '' - self.out = '' - self.err = '' + self.cmd = "" + self.out = "" + self.err = "" self.rc = 0 self.dt = 0.0 self.timed_out = False class TestResult: - PASS = 'PASS' - FAIL = 'FAIL' - CRASH = 'CRASH' + PASS = "PASS" + FAIL = "FAIL" + CRASH = "CRASH" """Classified result from a test run.""" @@ -83,11 +84,15 @@ def from_wpt_output(cls, output): else: for (idx, line) in enumerate(stdout): if line.startswith("WPT OUTPUT: "): - msg = line[len("WPT OUTPUT: "):] + msg = line[len("WPT OUTPUT: ") :] data = [output.test.wpt.url] + json.loads(msg) - harness_status_obj, tests = testharness_result_converter(output.test.wpt, data) + harness_status_obj, tests = testharness_result_converter( + output.test.wpt, data + ) harness_status = harness_status_obj.status - harness_message = "Reported by harness: %s" % (harness_status_obj.message,) + harness_message = "Reported by harness: %s" % ( + harness_status_obj.message, + ) del stdout[idx] break else: @@ -107,7 +112,7 @@ def from_wpt_output(cls, output): result = cls.FAIL else: for test in tests: - test_output = "Subtest \"%s\": " % (test.name,) + test_output = 'Subtest "%s": ' % (test.name,) expected = output.test.wpt.expected(test.name) if test.status == expected: test_result = (cls.PASS, "") @@ -117,13 +122,15 @@ def from_wpt_output(cls, output): result = cls.FAIL test_output += "expected %s, found %s" % (expected, test.status) if test.message: - test_output += " (with message: \"%s\")" % (test.message,) - subtests.append({ - "test": output.test.wpt.id, - "subtest": test.name, - "status": test.status, - "expected": expected, - }) + test_output += ' (with message: "%s")' % (test.message,) + subtests.append( + { + "test": output.test.wpt.id, + "subtest": test.name, + "status": test.status, + "expected": expected, + } + ) results.append(test_result) stdout.append(test_output) @@ -141,8 +148,8 @@ def from_wpt_output(cls, output): @classmethod def from_output(cls, output): test = output.test - result = None # str: overall result, see class-level variables - results = [] # (str,str) list: subtest results (pass/fail, message) + result = None # str: overall result, see class-level variables + results = [] # (str,str) list: subtest results (pass/fail, message) if test.wpt: return cls.from_wpt_output(output) @@ -153,21 +160,24 @@ def from_output(cls, output): passes = 0 expected_rcs = [] - if test.path.endswith('-n.js'): + if test.path.endswith("-n.js"): expected_rcs.append(3) - for line in out.split('\n'): - if line.startswith(' FAILED!'): + for line in out.split("\n"): + if line.startswith(" FAILED!"): failures += 1 - msg = line[len(' FAILED! '):] + msg = line[len(" FAILED! ") :] results.append((cls.FAIL, msg)) - elif line.startswith(' PASSED!'): + elif line.startswith(" PASSED!"): passes += 1 - msg = line[len(' PASSED! '):] + msg = line[len(" PASSED! ") :] results.append((cls.PASS, msg)) else: - m = re.match('--- NOTE: IN THIS TESTCASE, WE EXPECT EXIT CODE' - ' ((?:-|\\d)+) ---', line) + m = re.match( + "--- NOTE: IN THIS TESTCASE, WE EXPECT EXIT CODE" + " ((?:-|\\d)+) ---", + line, + ) if m: expected_rcs.append(int(m.group(1))) @@ -175,7 +185,9 @@ def from_output(cls, output): expected_rcs.append(3) if test.error not in err: failures += 1 - results.append((cls.FAIL, "Expected uncaught error: {}".format(test.error))) + results.append( + (cls.FAIL, "Expected uncaught error: {}".format(test.error)) + ) if rc and rc not in expected_rcs: if rc == 3: @@ -201,7 +213,7 @@ class ResultsSink: def __init__(self, testsuite, options, testcount): self.options = options self.fp = options.output_fp - if self.options.format == 'automation': + if self.options.format == "automation": self.slog = TestLogger(testsuite) self.slog.suite_start() @@ -209,6 +221,7 @@ def __init__(self, testsuite, options, testcount): if self.options.wptreport: try: from .wptreport import WptreportHandler + self.wptreport = WptreportHandler(self.options.wptreport) self.wptreport.suite_start() except ImportError: @@ -216,7 +229,7 @@ def __init__(self, testsuite, options, testcount): self.groups = {} self.output_dict = {} - self.counts = {'PASS': 0, 'FAIL': 0, 'TIMEOUT': 0, 'SKIP': 0} + self.counts = {"PASS": 0, "FAIL": 0, "TIMEOUT": 0, "SKIP": 0} self.slow_tests = [] self.n = 0 @@ -224,10 +237,10 @@ def __init__(self, testsuite, options, testcount): self.pb = NullProgressBar() else: fmt = [ - {'value': 'PASS', 'color': 'green'}, - {'value': 'FAIL', 'color': 'red'}, - {'value': 'TIMEOUT', 'color': 'blue'}, - {'value': 'SKIP', 'color': 'brightgray'}, + {"value": "PASS", "color": "green"}, + {"value": "FAIL", "color": "red"}, + {"value": "TIMEOUT", "color": "blue"}, + {"value": "SKIP", "color": "brightgray"}, ] self.pb = ProgressBar(testcount, fmt) @@ -235,13 +248,13 @@ def push(self, output): if self.options.show_slow and output.dt >= self.options.slow_test_threshold: self.slow_tests.append(TestDuration(output.test, output.dt)) if output.timed_out: - self.counts['TIMEOUT'] += 1 + self.counts["TIMEOUT"] += 1 if isinstance(output, NullTestOutput): - if self.options.format == 'automation': + if self.options.format == "automation": self.print_automation_result( - 'TEST-KNOWN-FAIL', output.test, time=output.dt, - skip=True) - self.counts['SKIP'] += 1 + "TEST-KNOWN-FAIL", output.test, time=output.dt, skip=True + ) + self.counts["SKIP"] += 1 self.n += 1 else: result = TestResult.from_output(output) @@ -255,54 +268,64 @@ def push(self, output): if self.options.check_output: if output.test.path in self.output_dict.keys(): if self.output_dict[output.test.path] != output: - self.counts['FAIL'] += 1 + self.counts["FAIL"] += 1 self.print_automation_result( - "TEST-UNEXPECTED-FAIL", result.test, time=output.dt, - message="Same test with different flag producing different output") + "TEST-UNEXPECTED-FAIL", + result.test, + time=output.dt, + message="Same test with different flag producing different output", + ) else: self.output_dict[output.test.path] = output if output.timed_out: - dev_label = 'TIMEOUTS' + dev_label = "TIMEOUTS" self.groups.setdefault(dev_label, []).append(result) - if dev_label == 'REGRESSIONS': - show_output = self.options.show_output \ - or not self.options.no_show_failed - elif dev_label == 'TIMEOUTS': + if dev_label == "REGRESSIONS": + show_output = ( + self.options.show_output or not self.options.no_show_failed + ) + elif dev_label == "TIMEOUTS": show_output = self.options.show_output else: - show_output = self.options.show_output \ - and not self.options.failed_only + show_output = self.options.show_output and not self.options.failed_only - if dev_label in ('REGRESSIONS', 'TIMEOUTS'): + if dev_label in ("REGRESSIONS", "TIMEOUTS"): show_cmd = self.options.show_cmd else: - show_cmd = self.options.show_cmd \ - and not self.options.failed_only + show_cmd = self.options.show_cmd and not self.options.failed_only if show_output or show_cmd: self.pb.beginline() if show_output: - print('## {}: rc = {:d}, run time = {}'.format( - output.test.path, output.rc, output.dt), file=self.fp) + print( + "## {}: rc = {:d}, run time = {}".format( + output.test.path, output.rc, output.dt + ), + file=self.fp, + ) if show_cmd: print(escape_cmdline(output.cmd), file=self.fp) if show_output: + def write_with_fallback(fp, data): try: fp.write(data) except UnicodeEncodeError as e: # In case the data contains something not directly # encodable, use \uXXXX. - fp.write('WARNING: Falling back from exception: {}\n'.format(e)) - fp.write('WARNING: The following output is escaped, ') - fp.write('and may be different than original one.\n') - fp.write(data.encode('ascii', 'namereplace') - .decode('ascii')) + fp.write( + "WARNING: Falling back from exception: {}\n".format(e) + ) + fp.write("WARNING: The following output is escaped, ") + fp.write("and may be different than original one.\n") + fp.write( + data.encode("ascii", "namereplace").decode("ascii") + ) write_with_fallback(self.fp, output.out) write_with_fallback(self.fp, output.err) @@ -310,40 +333,43 @@ def write_with_fallback(fp, data): self.n += 1 if result.result == TestResult.PASS and not result.test.random: - self.counts['PASS'] += 1 + self.counts["PASS"] += 1 elif result.test.expect and not result.test.random: - self.counts['FAIL'] += 1 + self.counts["FAIL"] += 1 else: - self.counts['SKIP'] += 1 + self.counts["SKIP"] += 1 - if self.options.format == 'automation': + if self.options.format == "automation": if result.result != TestResult.PASS and len(result.results) > 1: for sub_ok, msg in result.results: tup = (sub_ok, result.test.expect, result.test.random) label = self.LABELS[tup][0] - if label == 'TEST-UNEXPECTED-PASS': - label = 'TEST-PASS (EXPECTED RANDOM)' + if label == "TEST-UNEXPECTED-PASS": + label = "TEST-PASS (EXPECTED RANDOM)" self.print_automation_result( - label, result.test, time=output.dt, - message=msg) + label, result.test, time=output.dt, message=msg + ) tup = (result.result, result.test.expect, result.test.random) - self.print_automation_result(self.LABELS[tup][0], - result.test, - time=output.dt, - extra=getattr(output, 'extra', None)) + self.print_automation_result( + self.LABELS[tup][0], + result.test, + time=output.dt, + extra=getattr(output, "extra", None), + ) return if dev_label: + def singular(label): return "FIXED" if label == "FIXES" else label[:-1] - self.pb.message("{} - {}".format(singular(dev_label), - output.test.path)) + + self.pb.message("{} - {}".format(singular(dev_label), output.test.path)) self.pb.update(self.n, self.counts) def finish(self, completed): self.pb.finish(completed) - if self.options.format == 'automation': + if self.options.format == "automation": self.slog.suite_end() else: self.list(completed) @@ -355,88 +381,93 @@ def finish(self, completed): # key is (result, expect, random) # value is (automation label, dev test category) LABELS = { - (TestResult.CRASH, False, False): ('TEST-UNEXPECTED-FAIL', 'REGRESSIONS'), - (TestResult.CRASH, False, True): ('TEST-UNEXPECTED-FAIL', 'REGRESSIONS'), - (TestResult.CRASH, True, False): ('TEST-UNEXPECTED-FAIL', 'REGRESSIONS'), - (TestResult.CRASH, True, True): ('TEST-UNEXPECTED-FAIL', 'REGRESSIONS'), - - (TestResult.FAIL, False, False): ('TEST-KNOWN-FAIL', ''), - (TestResult.FAIL, False, True): ('TEST-KNOWN-FAIL (EXPECTED RANDOM)', ''), - (TestResult.FAIL, True, False): ('TEST-UNEXPECTED-FAIL', 'REGRESSIONS'), - (TestResult.FAIL, True, True): ('TEST-KNOWN-FAIL (EXPECTED RANDOM)', ''), - - (TestResult.PASS, False, False): ('TEST-UNEXPECTED-PASS', 'FIXES'), - (TestResult.PASS, False, True): ('TEST-PASS (EXPECTED RANDOM)', ''), - (TestResult.PASS, True, False): ('TEST-PASS', ''), - (TestResult.PASS, True, True): ('TEST-PASS (EXPECTED RANDOM)', ''), + (TestResult.CRASH, False, False): ("TEST-UNEXPECTED-FAIL", "REGRESSIONS"), + (TestResult.CRASH, False, True): ("TEST-UNEXPECTED-FAIL", "REGRESSIONS"), + (TestResult.CRASH, True, False): ("TEST-UNEXPECTED-FAIL", "REGRESSIONS"), + (TestResult.CRASH, True, True): ("TEST-UNEXPECTED-FAIL", "REGRESSIONS"), + (TestResult.FAIL, False, False): ("TEST-KNOWN-FAIL", ""), + (TestResult.FAIL, False, True): ("TEST-KNOWN-FAIL (EXPECTED RANDOM)", ""), + (TestResult.FAIL, True, False): ("TEST-UNEXPECTED-FAIL", "REGRESSIONS"), + (TestResult.FAIL, True, True): ("TEST-KNOWN-FAIL (EXPECTED RANDOM)", ""), + (TestResult.PASS, False, False): ("TEST-UNEXPECTED-PASS", "FIXES"), + (TestResult.PASS, False, True): ("TEST-PASS (EXPECTED RANDOM)", ""), + (TestResult.PASS, True, False): ("TEST-PASS", ""), + (TestResult.PASS, True, True): ("TEST-PASS (EXPECTED RANDOM)", ""), } def list(self, completed): for label, results in sorted(self.groups.items()): - if label == '': + if label == "": continue print(label) for result in results: - print(' {}'.format(' '.join(result.test.jitflags + - result.test.options + - [result.test.path]))) + print( + " {}".format( + " ".join( + result.test.jitflags + + result.test.options + + [result.test.path] + ) + ) + ) if self.options.failure_file: - failure_file = open(self.options.failure_file, 'w') + failure_file = open(self.options.failure_file, "w") if not self.all_passed(): - if 'REGRESSIONS' in self.groups: - for result in self.groups['REGRESSIONS']: + if "REGRESSIONS" in self.groups: + for result in self.groups["REGRESSIONS"]: print(result.test.path, file=failure_file) - if 'TIMEOUTS' in self.groups: - for result in self.groups['TIMEOUTS']: + if "TIMEOUTS" in self.groups: + for result in self.groups["TIMEOUTS"]: print(result.test.path, file=failure_file) failure_file.close() - suffix = '' if completed else ' (partial run -- interrupted by user)' + suffix = "" if completed else " (partial run -- interrupted by user)" if self.all_passed(): - print('PASS' + suffix) + print("PASS" + suffix) else: - print('FAIL' + suffix) + print("FAIL" + suffix) if self.options.show_slow: min_duration = self.options.slow_test_threshold - print('Slow tests (duration > {}s)'.format(min_duration)) + print("Slow tests (duration > {}s)".format(min_duration)) slow_tests = sorted(self.slow_tests, key=lambda x: x.duration, reverse=True) any = False for test in slow_tests: - print('{:>5} {}'.format(round(test.duration, 2), test.test)) + print("{:>5} {}".format(round(test.duration, 2), test.test)) any = True if not any: - print('None') + print("None") def all_passed(self): - return 'REGRESSIONS' not in self.groups and 'TIMEOUTS' not in self.groups + return "REGRESSIONS" not in self.groups and "TIMEOUTS" not in self.groups - def print_automation_result(self, label, test, message=None, skip=False, - time=None, extra=None): + def print_automation_result( + self, label, test, message=None, skip=False, time=None, extra=None + ): result = label result += " | " + test.path args = [] if self.options.shell_args: args.append(self.options.shell_args) args += test.jitflags - result += ' | (args: "{}")'.format(' '.join(args)) + result += ' | (args: "{}")'.format(" ".join(args)) if message: result += " | " + message if skip: - result += ' | (SKIP)' + result += " | (SKIP)" if time > self.options.timeout: - result += ' | (TIMEOUT)' - result += ' [{:.1f} s]'.format(time) + result += " | (TIMEOUT)" + result += " [{:.1f} s]".format(time) print(result) - details = {'extra': extra.copy() if extra else {}} + details = {"extra": extra.copy() if extra else {}} if self.options.shell_args: - details['extra']['shell_args'] = self.options.shell_args - details['extra']['jitflags'] = test.jitflags + details["extra"]["shell_args"] = self.options.shell_args + details["extra"]["jitflags"] = test.jitflags if message: - details['message'] = message - status = 'FAIL' if 'TEST-UNEXPECTED' in label else 'PASS' + details["message"] = message + status = "FAIL" if "TEST-UNEXPECTED" in label else "PASS" self.slog.test(test.path, status, time or 0, **details) diff --git a/js/src/tests/lib/structuredlog.py b/js/src/tests/lib/structuredlog.py index 0a9104a3c1e97c..5793dcd67cf7ca 100644 --- a/js/src/tests/lib/structuredlog.py +++ b/js/src/tests/lib/structuredlog.py @@ -10,11 +10,11 @@ class TestLogger(object): - def __init__(self, source, threadname='main'): + def __init__(self, source, threadname="main"): self.template = { - 'source': source, - 'thread': threadname, - 'pid': os.getpid(), + "source": source, + "thread": threadname, + "pid": os.getpid(), } directory = os.environ.get("MOZ_UPLOAD_DIR", ".") self.fh = open(os.path.join(directory, threadname + "_raw.log"), "a") @@ -22,8 +22,8 @@ def __init__(self, source, threadname='main'): def _record(self, **kwargs): record = self.template.copy() record.update(**kwargs) - if 'time' not in record: - record['time'] = time() + if "time" not in record: + record["time"] = time() return record def _log_obj(self, obj): @@ -33,25 +33,27 @@ def _log(self, **kwargs): self._log_obj(self._record(**kwargs)) def suite_start(self): - self._log(action='suite_start', tests=[]) + self._log(action="suite_start", tests=[]) def suite_end(self): - self._log(action='suite_end') + self._log(action="suite_end") def test_start(self, testname): - self._log(action='test_start', test=testname) + self._log(action="test_start", test=testname) def test_end(self, testname, status): - self._log(action='test_end', test=testname, status=status) + self._log(action="test_end", test=testname, status=status) def test(self, testname, status, duration, **details): - record = self._record(action='test_start', test=testname, **details.get('extra', {})) - end_time = record['time'] - record['time'] -= duration + record = self._record( + action="test_start", test=testname, **details.get("extra", {}) + ) + end_time = record["time"] + record["time"] -= duration self._log_obj(record) - record['action'] = 'test_end' - record['time'] = end_time - record['status'] = status + record["action"] = "test_end" + record["time"] = end_time + record["status"] = status record.update(**details) self._log_obj(record) diff --git a/js/src/tests/lib/tasks_unix.py b/js/src/tests/lib/tasks_unix.py index 4d67f7578fd552..563651e51b8708 100644 --- a/js/src/tests/lib/tasks_unix.py +++ b/js/src/tests/lib/tasks_unix.py @@ -124,7 +124,7 @@ def read_input(tasks, timeout): try: readable, _, _ = select.select(rlist, [], exlist, timeout) except OverflowError: - print >> sys.stderr, "timeout value", timeout + print >>sys.stderr, "timeout value", timeout raise for fd in readable: @@ -163,7 +163,7 @@ def timed_out(task, timeout): # Local copy of six.ensure_str for when six is unavailable or too old. -def ensure_str(s, encoding='utf-8', errors='strict'): +def ensure_str(s, encoding="utf-8", errors="strict"): if PY2: if isinstance(s, str): return s @@ -207,12 +207,14 @@ def reap_zombies(tasks, timeout): TestOutput( ended.test, ended.cmd, - ensure_str(b''.join(ended.out), errors='replace'), - ensure_str(b''.join(ended.err), errors='replace'), + ensure_str(b"".join(ended.out), errors="replace"), + ensure_str(b"".join(ended.err), errors="replace"), returncode, (datetime.now() - ended.start).total_seconds(), timed_out(ended, timeout), - {'pid': ended.pid})) + {"pid": ended.pid}, + ) + ) return tasks, finished @@ -243,8 +245,9 @@ def run_all_tests(tests, prefix, pb, options): while len(tests) or len(tasks): while len(tests) and len(tasks) < options.worker_count: test = tests.pop() - task = spawn_test(test, prefix, - options.passthrough, options.run_skipped, options.show_cmd) + task = spawn_test( + test, prefix, options.passthrough, options.run_skipped, options.show_cmd + ) if task: tasks.append(task) else: diff --git a/js/src/tests/lib/tasks_win.py b/js/src/tests/lib/tasks_win.py index d445040a51a95b..56f5d0adce527b 100644 --- a/js/src/tests/lib/tasks_win.py +++ b/js/src/tests/lib/tasks_win.py @@ -39,8 +39,7 @@ def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd): if show_cmd: print(escape_cmdline(cmd)) tStart = datetime.now() - proc = subprocess.Popen( - cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) # Push the task to the watchdog -- it will kill the task # if it goes over the timeout while we keep its stdout @@ -51,15 +50,22 @@ def _do_work(qTasks, qResults, qWatch, prefix, run_skipped, timeout, show_cmd): # still needing to support Python 3.5, which doesn't have the "encoding" # parameter to the Popen constructor, so we have to decode the output # here. - system_encoding = 'mbcs' if sys.platform == 'win32' else 'utf-8' + system_encoding = "mbcs" if sys.platform == "win32" else "utf-8" out = out.decode(system_encoding) err = err.decode(system_encoding) qWatch.put(TaskFinishedMarker) # Create a result record and forward to result processing. dt = datetime.now() - tStart - result = TestOutput(test, cmd, out, err, proc.returncode, dt.total_seconds(), - dt > timedelta(seconds=timeout)) + result = TestOutput( + test, + cmd, + out, + err, + proc.returncode, + dt.total_seconds(), + dt > timedelta(seconds=timeout), + ) qResults.put(result) @@ -99,9 +105,18 @@ def run_all_tests(tests, prefix, pb, options): watcher.setDaemon(True) watcher.start() watchdogs.append(watcher) - worker = Thread(target=_do_work, args=(qTasks, qResults, qWatch, - prefix, options.run_skipped, - options.timeout, options.show_cmd)) + worker = Thread( + target=_do_work, + args=( + qTasks, + qResults, + qWatch, + prefix, + options.run_skipped, + options.timeout, + options.show_cmd, + ), + ) worker.setDaemon(True) worker.start() workers.append(worker) @@ -116,6 +131,7 @@ def _do_push(num_workers, qTasks): qTasks.put(test) for _ in range(num_workers): qTasks.put(EndMarker) + pusher = Thread(target=_do_push, args=(len(workers), qTasks)) pusher.setDaemon(True) pusher.start() diff --git a/js/src/tests/lib/terminal_unix.py b/js/src/tests/lib/terminal_unix.py index b993bd4affbe2b..cdf799577690d5 100644 --- a/js/src/tests/lib/terminal_unix.py +++ b/js/src/tests/lib/terminal_unix.py @@ -2,19 +2,14 @@ class Terminal(object): - COLOR = { - 'red': '31', - 'green': '32', - 'blue': '34', - 'gray': '37' - } - NORMAL_INTENSITY = '1' - BRIGHT_INTENSITY = '2' - ESCAPE = '\x1b[' - RESET = '0' - SEPARATOR = ';' - COLOR_CODE = 'm' - CLEAR_RIGHT_CODE = 'K' + COLOR = {"red": "31", "green": "32", "blue": "34", "gray": "37"} + NORMAL_INTENSITY = "1" + BRIGHT_INTENSITY = "2" + ESCAPE = "\x1b[" + RESET = "0" + SEPARATOR = ";" + COLOR_CODE = "m" + CLEAR_RIGHT_CODE = "K" @classmethod def set_color(cls, color): @@ -22,13 +17,12 @@ def set_color(cls, color): color: str - color definition string """ mod = Terminal.NORMAL_INTENSITY - if color.startswith('bright'): + if color.startswith("bright"): mod = Terminal.BRIGHT_INTENSITY - color = color[len('bright'):] + color = color[len("bright") :] color_code = Terminal.COLOR[color] - sys.stdout.write(cls.ESCAPE + color_code + cls.SEPARATOR + mod - + cls.COLOR_CODE) + sys.stdout.write(cls.ESCAPE + color_code + cls.SEPARATOR + mod + cls.COLOR_CODE) @classmethod def reset_color(cls): diff --git a/js/src/tests/lib/terminal_win.py b/js/src/tests/lib/terminal_win.py index 28e5de24a73f3d..e4b65916997430 100644 --- a/js/src/tests/lib/terminal_win.py +++ b/js/src/tests/lib/terminal_win.py @@ -17,28 +17,26 @@ class COORD(Structure): """struct in wincon.h.""" - _fields_ = [ - ("X", SHORT), - ("Y", SHORT)] + + _fields_ = [("X", SHORT), ("Y", SHORT)] class SMALL_RECT(Structure): """struct in wincon.h.""" - _fields_ = [ - ("Left", SHORT), - ("Top", SHORT), - ("Right", SHORT), - ("Bottom", SHORT)] + + _fields_ = [("Left", SHORT), ("Top", SHORT), ("Right", SHORT), ("Bottom", SHORT)] class CONSOLE_SCREEN_BUFFER_INFO(Structure): """struct in wincon.h.""" + _fields_ = [ ("dwSize", COORD), ("dwCursorPosition", COORD), ("wAttributes", WORD), ("srWindow", SMALL_RECT), - ("dwMaximumWindowSize", COORD)] + ("dwMaximumWindowSize", COORD), + ] # winbase.h @@ -83,14 +81,14 @@ def get_text_attr(): class Terminal(object): COLOR = { - 'black': 0x0000, - 'blue': 0x0001, - 'green': 0x0002, - 'cyan': 0x0003, - 'red': 0x0004, - 'magenta': 0x0005, - 'yellow': 0x0006, - 'gray': 0x0007 + "black": 0x0000, + "blue": 0x0001, + "green": 0x0002, + "cyan": 0x0003, + "red": 0x0004, + "magenta": 0x0005, + "yellow": 0x0006, + "gray": 0x0007, } BRIGHT_INTENSITY = 0x0008 BACKGROUND_SHIFT = 4 @@ -101,9 +99,9 @@ def set_color(cls, color): color: str - color definition string """ color_code = 0 - if color.startswith('bright'): + if color.startswith("bright"): color_code |= cls.BRIGHT_INTENSITY - color = color[len('bright'):] + color = color[len("bright") :] color_code |= Terminal.COLOR[color] SetConsoleTextAttribute(stdout_handle, color_code) diff --git a/js/src/tests/lib/tests.py b/js/src/tests/lib/tests.py index 6f44b142853ea8..b9e4fa11882827 100644 --- a/js/src/tests/lib/tests.py +++ b/js/src/tests/lib/tests.py @@ -10,66 +10,90 @@ # When run on tbpl, we run each test multiple times with the following # arguments. JITFLAGS = { - 'all': [ + "all": [ [], # no flags, normal baseline and ion - ['--ion-eager', '--ion-offthread-compile=off', # implies --baseline-eager - '--more-compartments'], - ['--ion-eager', '--ion-offthread-compile=off', - '--ion-check-range-analysis', '--ion-extra-checks', '--no-sse3', '--no-threads'], - ['--baseline-eager'], - ['--no-blinterp', '--no-baseline', '--no-ion', '--more-compartments'], - ['--blinterp-eager'], + [ + "--ion-eager", + "--ion-offthread-compile=off", # implies --baseline-eager + "--more-compartments", + ], + [ + "--ion-eager", + "--ion-offthread-compile=off", + "--ion-check-range-analysis", + "--ion-extra-checks", + "--no-sse3", + "--no-threads", + ], + ["--baseline-eager"], + ["--no-blinterp", "--no-baseline", "--no-ion", "--more-compartments"], + ["--blinterp-eager"], ], # Like 'all' above but for jstests. This has fewer jit-specific # configurations. - 'jstests': [ + "jstests": [ [], # no flags, normal baseline and ion - ['--ion-eager', '--ion-offthread-compile=off', # implies --baseline-eager - '--more-compartments'], - ['--baseline-eager'], - ['--no-blinterp', '--no-baseline', '--no-ion', '--more-compartments'], + [ + "--ion-eager", + "--ion-offthread-compile=off", # implies --baseline-eager + "--more-compartments", + ], + ["--baseline-eager"], + ["--no-blinterp", "--no-baseline", "--no-ion", "--more-compartments"], ], # used by jit_test.py - 'ion': [ - ['--baseline-eager'], - ['--ion-eager', '--ion-offthread-compile=off', '--more-compartments'] + "ion": [ + ["--baseline-eager"], + ["--ion-eager", "--ion-offthread-compile=off", "--more-compartments"], ], # Used for testing WarpBuilder. - 'warp': [ - ['--warp'], - ['--warp', '--ion-eager', '--ion-offthread-compile=off'] - ], - 'nowarp': [ - ['--no-warp'], - ['--no-warp', '--ion-eager', '--ion-offthread-compile=off', - '--more-compartments'], - ['--no-warp', '--baseline-eager'], + "warp": [["--warp"], ["--warp", "--ion-eager", "--ion-offthread-compile=off"]], + "nowarp": [ + ["--no-warp"], + [ + "--no-warp", + "--ion-eager", + "--ion-offthread-compile=off", + "--more-compartments", + ], + ["--no-warp", "--baseline-eager"], ], # Run reduced variants on debug builds, since they take longer time. - 'debug': [ + "debug": [ [], # no flags, normal baseline and ion - ['--ion-eager', '--ion-offthread-compile=off', # implies --baseline-eager - '--more-compartments'], - ['--baseline-eager'], + [ + "--ion-eager", + "--ion-offthread-compile=off", # implies --baseline-eager + "--more-compartments", + ], + ["--baseline-eager"], ], # Cover cases useful for tsan. Note that we test --ion-eager without # --ion-offthread-compile=off here, because it helps catch races. - 'tsan': [ + "tsan": [ [], - ['--ion-eager', '--ion-check-range-analysis', '--ion-extra-checks', '--no-sse3'], - ['--no-blinterp', '--no-baseline', '--no-ion'], + [ + "--ion-eager", + "--ion-check-range-analysis", + "--ion-extra-checks", + "--no-sse3", + ], + ["--no-blinterp", "--no-baseline", "--no-ion"], ], - 'baseline': [ - ['--no-ion'], + "baseline": [ + ["--no-ion"], ], # Interpreter-only, for tools that cannot handle binary code generation. - 'interp': [ - ['--no-blinterp', '--no-baseline', '--no-asmjs', '--wasm-compiler=none', - '--no-native-regexp'] + "interp": [ + [ + "--no-blinterp", + "--no-baseline", + "--no-asmjs", + "--wasm-compiler=none", + "--no-native-regexp", + ] ], - 'none': [ - [] # no flags, normal baseline and ion - ] + "none": [[]], # no flags, normal baseline and ion } @@ -77,8 +101,8 @@ def get_jitflags(variant, **kwargs): if variant not in JITFLAGS: print('Invalid jitflag: "{}"'.format(variant)) sys.exit(1) - if variant == 'none' and 'none' in kwargs: - return kwargs['none'] + if variant == "none" and "none" in kwargs: + return kwargs["none"] return JITFLAGS[variant] @@ -94,21 +118,21 @@ def get_environment_overlay(js_shell): # When updating this also update |buildBrowserEnv| in layout/tools/reftest/runreftest.py. env = { # Force Pacific time zone to avoid failures in Date tests. - 'TZ': 'PST8PDT', + "TZ": "PST8PDT", # Force date strings to English. - 'LC_ALL': 'en_US.UTF-8', + "LC_ALL": "en_US.UTF-8", # Tell the shell to disable crash dialogs on windows. - 'XRE_NO_WINDOWS_CRASH_DIALOG': '1', + "XRE_NO_WINDOWS_CRASH_DIALOG": "1", } # Add the binary's directory to the library search path so that we find the # nspr and icu we built, instead of the platform supplied ones (or none at # all on windows). - if sys.platform.startswith('linux'): - env['LD_LIBRARY_PATH'] = os.path.dirname(js_shell) - elif sys.platform.startswith('darwin'): - env['DYLD_LIBRARY_PATH'] = os.path.dirname(js_shell) - elif sys.platform.startswith('win'): - env['PATH'] = os.path.dirname(js_shell) + if sys.platform.startswith("linux"): + env["LD_LIBRARY_PATH"] = os.path.dirname(js_shell) + elif sys.platform.startswith("darwin"): + env["DYLD_LIBRARY_PATH"] = os.path.dirname(js_shell) + elif sys.platform.startswith("win"): + env["PATH"] = os.path.dirname(js_shell) return env @@ -118,8 +142,8 @@ def change_env(env_overlay): prior_env = {} for key, val in env_overlay.items(): prior_env[key] = os.environ.get(key, None) - if 'PATH' in key and key in os.environ: - os.environ[key] = '{}{}{}'.format(val, os.pathsep, os.environ[key]) + if "PATH" in key and key in os.environ: + os.environ[key] = "{}{}{}".format(val, os.pathsep, os.environ[key]) else: os.environ[key] = val @@ -144,13 +168,14 @@ def get_cpu_count(): # Python 2.6+ try: import multiprocessing + return multiprocessing.cpu_count() except (ImportError, NotImplementedError): pass # POSIX try: - res = int(os.sysconf('SC_NPROCESSORS_ONLN')) + res = int(os.sysconf("SC_NPROCESSORS_ONLN")) if res > 0: return res except (AttributeError, ValueError): @@ -158,7 +183,7 @@ def get_cpu_count(): # Windows try: - res = int(os.environ['NUMBER_OF_PROCESSORS']) + res = int(os.environ["NUMBER_OF_PROCESSORS"]) if res > 0: return res except (KeyError, ValueError): @@ -216,17 +241,17 @@ def prefix_command(self): """Return the '-f' options needed to run a test with the given path.""" path = self.path prefix = [] - while path != '': - assert path != '/' + while path != "": + assert path != "/" path = os.path.dirname(path) - shell_path = os.path.join(self.root, path, 'shell.js') + shell_path = os.path.join(self.root, path, "shell.js") if os.path.exists(shell_path): prefix.append(shell_path) - prefix.append('-f') + prefix.append("-f") prefix.reverse() for extra_path in self.extra_helper_paths: - prefix.append('-f') + prefix.append("-f") prefix.append(extra_path) return prefix @@ -250,17 +275,17 @@ def get_command(self, prefix): def __str__(self): ans = self.path if not self.enable: - ans += ', skip' + ans += ", skip" if self.error is not None: - ans += ', error=' + self.error + ans += ", error=" + self.error if not self.expect: - ans += ', fails' + ans += ", fails" if self.random: - ans += ', random' + ans += ", random" if self.slow: - ans += ', slow' - if '-d' in self.options: - ans += ', debugMode' + ans += ", slow" + if "-d" in self.options: + ans += ", debugMode" return ans @staticmethod diff --git a/js/src/tests/lib/wptreport.py b/js/src/tests/lib/wptreport.py index ff4dc02aef8b19..95fa9b1ed71653 100644 --- a/js/src/tests/lib/wptreport.py +++ b/js/src/tests/lib/wptreport.py @@ -26,19 +26,23 @@ def suite_start(self): """ Produce the "suite_start" message at the present time. """ - self.formatter.suite_start({ - "time": time(), - "run_info": {}, - }) + self.formatter.suite_start( + { + "time": time(), + "run_info": {}, + } + ) def suite_end(self): """ Produce the "suite_end" message at the present time and write the results to the file path given in the constructor. """ - result = self.formatter.suite_end({ - "time": time(), - }) + result = self.formatter.suite_end( + { + "time": time(), + } + ) with open(self.out, "w") as fp: fp.write(result) @@ -61,17 +65,21 @@ def test(self, result, duration): end_time = time() start_time = end_time - duration - self.formatter.test_start({ - "test": testname, - "time": start_time, - }) + self.formatter.test_start( + { + "test": testname, + "time": start_time, + } + ) for subtest in result["subtests"]: self.formatter.test_status(subtest) - self.formatter.test_end({ - "test": testname, - "time": end_time, - "status": result["status"], - "expected": result["expected"], - }) + self.formatter.test_end( + { + "test": testname, + "time": end_time, + "status": result["status"], + "expected": result["expected"], + } + ) diff --git a/js/src/tests/moz.build b/js/src/tests/moz.build index 28919c271d3326..568f361a54234b 100644 --- a/js/src/tests/moz.build +++ b/js/src/tests/moz.build @@ -3,4 +3,3 @@ # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. - diff --git a/js/src/tests/non262/String/make-normalize-generateddata-input.py b/js/src/tests/non262/String/make-normalize-generateddata-input.py index c169447af31729..9fb23e682e1a00 100644 --- a/js/src/tests/non262/String/make-normalize-generateddata-input.py +++ b/js/src/tests/non262/String/make-normalize-generateddata-input.py @@ -11,38 +11,43 @@ import re import sys -sep_pat = re.compile(' +') +sep_pat = re.compile(" +") def to_code_list(codes): - return '[' + ', '.join('0x{0}'.format(x) for x in re.split(sep_pat, codes)) + ']' + return "[" + ", ".join("0x{0}".format(x) for x in re.split(sep_pat, codes)) + "]" def convert(dir): - ver_pat = re.compile('NormalizationTest-([0-9\.]+)\.txt') - part_pat = re.compile('^@(Part([0-9]+) .+)$') + ver_pat = re.compile("NormalizationTest-([0-9\.]+)\.txt") + part_pat = re.compile("^@(Part([0-9]+) .+)$") test_pat = re.compile( - '^([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);$') - ignore_pat = re.compile('^#|^$') - js_path = 'js/src/tests/non262/String/normalize-generateddata-input.js' - txt_path = 'intl/icu/source/data/unidata/NormalizationTest.txt' + "^([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);([0-9A-Fa-f ]+);$" + ) + ignore_pat = re.compile("^#|^$") + js_path = "js/src/tests/non262/String/normalize-generateddata-input.js" + txt_path = "intl/icu/source/data/unidata/NormalizationTest.txt" part_opened = False not_empty = False - with open('{dir}/{path}'.format(dir=dir, path=txt_path), 'r') as f: - with open('{dir}/{path}'.format(dir=dir, path=js_path), 'w') as outf: + with open("{dir}/{path}".format(dir=dir, path=txt_path), "r") as f: + with open("{dir}/{path}".format(dir=dir, path=js_path), "w") as outf: for line in f: m = test_pat.search(line) if m: if not_empty: - outf.write(',') - outf.write('\n') - pat = '{{ source: {source}, NFC: {NFC}, NFD: {NFD}, NFKC: {NFKC}, NFKD: {NFKD} }}' # NOQA: E501 - outf.write(pat.format(source=to_code_list(m.group(1)), - NFC=to_code_list(m.group(2)), - NFD=to_code_list(m.group(3)), - NFKC=to_code_list(m.group(4)), - NFKD=to_code_list(m.group(5)))) + outf.write(",") + outf.write("\n") + pat = "{{ source: {source}, NFC: {NFC}, NFD: {NFD}, NFKC: {NFKC}, NFKD: {NFKD} }}" # NOQA: E501 + outf.write( + pat.format( + source=to_code_list(m.group(1)), + NFC=to_code_list(m.group(2)), + NFD=to_code_list(m.group(3)), + NFKC=to_code_list(m.group(4)), + NFKD=to_code_list(m.group(5)), + ) + ) not_empty = True continue m = part_pat.search(line) @@ -50,28 +55,34 @@ def convert(dir): desc = m.group(1) part = m.group(2) if part_opened: - outf.write('\n];\n') - outf.write('/* {desc} */\n'.format(desc=desc)) - outf.write('var tests_part{part} = ['.format(part=part)) + outf.write("\n];\n") + outf.write("/* {desc} */\n".format(desc=desc)) + outf.write("var tests_part{part} = [".format(part=part)) part_opened = True not_empty = False continue m = ver_pat.search(line) if m: ver = m.group(1) - outf.write('/* created from NormalizationTest-{ver}.txt */\n'.format(ver=ver)) + outf.write( + "/* created from NormalizationTest-{ver}.txt */\n".format( + ver=ver + ) + ) continue m = ignore_pat.search(line) if m: continue print("Unknown line: {0}".format(line), file=sys.stderr) if part_opened: - outf.write('\n];\n') + outf.write("\n];\n") -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) < 2: - print("Usage: make-normalize-generateddata-input.py PATH_TO_MOZILLA_CENTRAL", - file=sys.stderr) + print( + "Usage: make-normalize-generateddata-input.py PATH_TO_MOZILLA_CENTRAL", + file=sys.stderr, + ) sys.exit(1) convert(sys.argv[1]) diff --git a/js/src/tests/parsemark.py b/js/src/tests/parsemark.py index e48f601e2bf2cb..cedfc8bc2ad7d0 100644 --- a/js/src/tests/parsemark.py +++ b/js/src/tests/parsemark.py @@ -37,7 +37,8 @@ _DIR = os.path.dirname(__file__) -JS_CODE_TEMPLATE = Template(""" +JS_CODE_TEMPLATE = Template( + """ if (typeof snarf !== 'undefined') read = snarf var contents = read("$filepath"); for (var i = 0; i < $warmup_run_count; i++) @@ -50,10 +51,11 @@ results.push(end - start); } print(results); -""") +""" +) -def gen_filepaths(dirpath, target_ext='.js'): +def gen_filepaths(dirpath, target_ext=".js"): for filename in os.listdir(dirpath): if filename.endswith(target_ext): yield os.path.join(dirpath, filename) @@ -71,19 +73,18 @@ def stddev(seq, mean): def bench(shellpath, filepath, warmup_runs, counted_runs, stfu=False): """Return a list of milliseconds for the counted runs.""" assert '"' not in filepath - code = JS_CODE_TEMPLATE.substitute(filepath=filepath, - warmup_run_count=warmup_runs, - real_run_count=counted_runs) - proc = subp.Popen([shellpath, '-e', code], stdout=subp.PIPE) + code = JS_CODE_TEMPLATE.substitute( + filepath=filepath, warmup_run_count=warmup_runs, real_run_count=counted_runs + ) + proc = subp.Popen([shellpath, "-e", code], stdout=subp.PIPE) stdout, _ = proc.communicate() - milliseconds = [float(val) for val in stdout.split(',')] + milliseconds = [float(val) for val in stdout.split(",")] mean = avg(milliseconds) sigma = stddev(milliseconds, mean) if not stfu: - print('Runs:', [int(ms) for ms in milliseconds]) - print('Mean:', mean) - print('Stddev: {:.2f} ({:.2f}% of mean)'.format( - sigma, sigma / mean * 100)) + print("Runs:", [int(ms) for ms in milliseconds]) + print("Mean:", mean) + print("Stddev: {:.2f} ({:.2f}% of mean)".format(sigma, sigma / mean * 100)) return mean, sigma @@ -93,76 +94,105 @@ def parsemark(filepaths, fbench, stfu=False): for filepath in filepaths: filename = os.path.split(filepath)[-1] if not stfu: - print('Parsemarking {}...'.format(filename)) + print("Parsemarking {}...".format(filename)) bench_map[filename] = fbench(filepath) - print('{') + print("{") for i, (filename, (avg, stddev)) in enumerate(bench_map.iteritems()): assert '"' not in filename fmt = ' {:30s}: {{"average_ms": {:6.2f}, "stddev_ms": {:6.2f}}}' if i != len(bench_map) - 1: - fmt += ',' + fmt += "," filename_str = '"{}"'.format(filename) print(fmt.format(filename_str, avg, stddev)) - print('}') - return dict((filename, dict(average_ms=avg, stddev_ms=stddev)) - for filename, (avg, stddev) in bench_map.iteritems()) + print("}") + return dict( + (filename, dict(average_ms=avg, stddev_ms=stddev)) + for filename, (avg, stddev) in bench_map.iteritems() + ) def main(): parser = optparse.OptionParser(usage=__doc__.strip()) - parser.add_option('-w', '--warmup-runs', metavar='COUNT', type=int, - default=5, - help='used to minimize test instability [%default]') - parser.add_option('-c', '--counted-runs', metavar='COUNT', type=int, - default=50, - help='timed data runs that count towards the average' - ' [%default]') - parser.add_option('-s', '--shell', metavar='PATH', - help='explicit shell location; when omitted, will look' - ' in likely places') - parser.add_option('-b', '--baseline', metavar='JSON_PATH', - dest='baseline_path', - help='json file with baseline values to ' - 'compare against') - parser.add_option('-q', '--quiet', dest='stfu', action='store_true', - default=False, - help='only print JSON to stdout [%default]') + parser.add_option( + "-w", + "--warmup-runs", + metavar="COUNT", + type=int, + default=5, + help="used to minimize test instability [%default]", + ) + parser.add_option( + "-c", + "--counted-runs", + metavar="COUNT", + type=int, + default=50, + help="timed data runs that count towards the average" " [%default]", + ) + parser.add_option( + "-s", + "--shell", + metavar="PATH", + help="explicit shell location; when omitted, will look" " in likely places", + ) + parser.add_option( + "-b", + "--baseline", + metavar="JSON_PATH", + dest="baseline_path", + help="json file with baseline values to " "compare against", + ) + parser.add_option( + "-q", + "--quiet", + dest="stfu", + action="store_true", + default=False, + help="only print JSON to stdout [%default]", + ) options, args = parser.parse_args() try: shellpath = args.pop(0) except IndexError: parser.print_help() print() - print('error: shellpath required', file=sys.stderr) + print("error: shellpath required", file=sys.stderr) return -1 try: dirpath = args.pop(0) except IndexError: parser.print_help() print() - print('error: dirpath required', file=sys.stderr) + print("error: dirpath required", file=sys.stderr) return -1 if not shellpath or not os.path.exists(shellpath): - print('error: could not find shell:', shellpath, file=sys.stderr) + print("error: could not find shell:", shellpath, file=sys.stderr) return -1 if options.baseline_path: if not os.path.isfile(options.baseline_path): - print('error: baseline file does not exist', file=sys.stderr) + print("error: baseline file does not exist", file=sys.stderr) return -1 if not compare_bench: - print('error: JSON support is missing, cannot compare benchmarks', - file=sys.stderr) + print( + "error: JSON support is missing, cannot compare benchmarks", + file=sys.stderr, + ) return -1 - def benchfile(filepath): return bench(shellpath, filepath, - options.warmup_runs, - options.counted_runs, - stfu=options.stfu) + def benchfile(filepath): + return bench( + shellpath, + filepath, + options.warmup_runs, + options.counted_runs, + stfu=options.stfu, + ) + bench_map = parsemark(gen_filepaths(dirpath), benchfile, options.stfu) if options.baseline_path: compare_bench.compare_immediate(bench_map, options.baseline_path) return 0 -if __name__ == '__main__': +if __name__ == "__main__": sys.exit(main()) diff --git a/js/src/tests/test/run.py b/js/src/tests/test/run.py index 76cf4e80a4927b..ee0cbcabf08294 100755 --- a/js/src/tests/test/run.py +++ b/js/src/tests/test/run.py @@ -9,11 +9,11 @@ import unittest testDir = os.path.dirname(os.path.relpath(__file__)) -OUT_DIR = os.path.join(testDir, 'out') -EXPECTED_DIR = os.path.join(testDir, 'expected') -ex = os.path.join(testDir, '..', 'test262-export.py') -importExec = os.path.join(testDir, '..', 'test262-update.py') -test262Url = 'git://github.com/tc39/test262.git' +OUT_DIR = os.path.join(testDir, "out") +EXPECTED_DIR = os.path.join(testDir, "expected") +ex = os.path.join(testDir, "..", "test262-export.py") +importExec = os.path.join(testDir, "..", "test262-update.py") +test262Url = "git://github.com/tc39/test262.git" @contextlib.contextmanager @@ -29,50 +29,49 @@ class TestExport(unittest.TestCase): maxDiff = None def exportScript(self): - relpath = os.path.relpath(os.path.join(testDir, 'fixtures', 'export')) - sp = subprocess.Popen( - [ex, relpath, '--out', OUT_DIR], - stdout=subprocess.PIPE) + relpath = os.path.relpath(os.path.join(testDir, "fixtures", "export")) + sp = subprocess.Popen([ex, relpath, "--out", OUT_DIR], stdout=subprocess.PIPE) stdout, stderr = sp.communicate() return dict(stdout=stdout, stderr=stderr, returncode=sp.returncode) def importLocal(self): with TemporaryDirectory() as cloneDir: - branch = 'smTempBranch' + branch = "smTempBranch" # Clone Test262 to a local branch - subprocess.check_call( - ['git', 'clone', '--depth=1', test262Url, cloneDir] - ) + subprocess.check_call(["git", "clone", "--depth=1", test262Url, cloneDir]) # Checkout to a new branch - subprocess.check_call( - ['git', '-C', cloneDir, 'checkout', '-b', branch] - ) + subprocess.check_call(["git", "-C", cloneDir, "checkout", "-b", branch]) # Make changes on the new branch # Remove test/language/export/escaped-from.js subprocess.check_call( - ['git', '-C', cloneDir, 'rm', - 'test/language/export/escaped-from.js'] + ["git", "-C", cloneDir, "rm", "test/language/export/escaped-from.js"] ) # Rename test/language/export/escaped-default.js subprocess.check_call( - ['git', '-C', cloneDir, 'mv', - 'test/language/export/escaped-default.js', - 'test/language/export/escaped-foobarbaz.js', - ] + [ + "git", + "-C", + cloneDir, + "mv", + "test/language/export/escaped-default.js", + "test/language/export/escaped-foobarbaz.js", + ] ) # Copy fixtures files - fixturesDir = os.path.join(testDir, 'fixtures', 'import', 'files') - shutil.copytree(fixturesDir, os.path.join(cloneDir, 'test', 'temp42')) + fixturesDir = os.path.join(testDir, "fixtures", "import", "files") + shutil.copytree(fixturesDir, os.path.join(cloneDir, "test", "temp42")) # Stage and Commit changes - subprocess.check_call(['git', '-C', cloneDir, 'add', '.']) - subprocess.check_call(['git', '-C', cloneDir, 'commit', '-m', '"local foo"']) + subprocess.check_call(["git", "-C", cloneDir, "add", "."]) + subprocess.check_call( + ["git", "-C", cloneDir, "commit", "-m", '"local foo"'] + ) # Run import script print("%s --local %s --out %s" % (importExec, cloneDir, OUT_DIR)) sp = subprocess.Popen( - [importExec, '--local', cloneDir, '--out', OUT_DIR], + [importExec, "--local", cloneDir, "--out", OUT_DIR], stdout=subprocess.PIPE, - stderr=subprocess.PIPE + stderr=subprocess.PIPE, ) stdoutdata, _ = sp.communicate() @@ -80,9 +79,9 @@ def importLocal(self): def isTestFile(self, filename): return not ( - filename.startswith('.') or - filename.startswith('#') or - filename.endswith('~') + filename.startswith(".") + or filename.startswith("#") + or filename.endswith("~") ) def getFiles(self, path): @@ -102,20 +101,21 @@ def compareTrees(self, targetName): self.assertListEqual( map(lambda x: os.path.relpath(x, expectedPath), expectedFiles), - map(lambda x: os.path.relpath(x, actualPath), actualFiles)) + map(lambda x: os.path.relpath(x, actualPath), actualFiles), + ) for expectedFile, actualFile in zip(expectedFiles, actualFiles): with open(expectedFile) as expectedHandle: with open(actualFile) as actualHandle: self.assertMultiLineEqual( - expectedHandle.read(), - actualHandle.read()) + expectedHandle.read(), actualHandle.read() + ) def compareContents(self, output, filePath, folder): with open(filePath, "rb") as file: expected = file.read() - expected = expected.replace('{{folder}}', folder) + expected = expected.replace("{{folder}}", folder) self.assertMultiLineEqual(output, expected) def tearDown(self): @@ -123,19 +123,17 @@ def tearDown(self): def test_export(self): result = self.exportScript() - self.assertEqual(result['returncode'], 0) - self.compareTrees('export') + self.assertEqual(result["returncode"], 0) + self.compareTrees("export") def test_import_local(self): output, returncode, folder = self.importLocal() self.assertEqual(returncode, 0) - self.compareTrees(os.path.join('import', 'files')) + self.compareTrees(os.path.join("import", "files")) self.compareContents( - output, - os.path.join(testDir, 'expected', 'import', 'output.txt'), - folder + output, os.path.join(testDir, "expected", "import", "output.txt"), folder ) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/js/src/tests/test262-export.py b/js/src/tests/test262-export.py index 5bc1a038cb3d2a..f93f3e605c7925 100755 --- a/js/src/tests/test262-export.py +++ b/js/src/tests/test262-export.py @@ -15,11 +15,20 @@ # Skip all common files used to support tests for jstests # These files are listed in the README.txt -SUPPORT_FILES = set(["browser.js", "shell.js", "template.js", "user.js", - "js-test-driver-begin.js", "js-test-driver-end.js"]) +SUPPORT_FILES = set( + [ + "browser.js", + "shell.js", + "template.js", + "user.js", + "js-test-driver-begin.js", + "js-test-driver-end.js", + ] +) FRONTMATTER_WRAPPER_PATTERN = re.compile( - r'/\*\---\n([\s]*)((?:\s|\S)*)[\n\s*]---\*/', flags=re.DOTALL) + r"/\*\---\n([\s]*)((?:\s|\S)*)[\n\s*]---\*/", flags=re.DOTALL +) def convertTestFile(source, includes): @@ -55,12 +64,12 @@ def replaceFn(matchobj): return matchobj.group() newSource = re.sub( - r'.*reportCompare\s*\(\s*(\w*)\s*,\s*(\w*)\s*(,\s*\S*)?\s*\)\s*;*\s*', + r".*reportCompare\s*\(\s*(\w*)\s*,\s*(\w*)\s*(,\s*\S*)?\s*\)\s*;*\s*", replaceFn, - source + source, ) - return re.sub(r'\breportCompare\b', "assert.sameValue", newSource) + return re.sub(r"\breportCompare\b", "assert.sameValue", newSource) def fetchReftestEntries(reftest): @@ -76,41 +85,37 @@ def fetchReftestEntries(reftest): module = False # should capture conditions to skip - matchesSkip = re.search(r'skip-if\((.*)\)', reftest) + matchesSkip = re.search(r"skip-if\((.*)\)", reftest) if matchesSkip: matches = matchesSkip.group(1).split("||") for match in matches: # captures a features list dependsOnProp = re.search( - r'!this.hasOwnProperty\([\'\"](.*?)[\'\"]\)', match) + r"!this.hasOwnProperty\([\'\"](.*?)[\'\"]\)", match + ) if dependsOnProp: features.append(dependsOnProp.group(1)) else: print("# Can't parse the following skip-if rule: %s" % match) # should capture the expected error - matchesError = re.search(r'error:\s*(\w*)', reftest) + matchesError = re.search(r"error:\s*(\w*)", reftest) if matchesError: # The metadata from the reftests won't say if it's a runtime or an # early error. This specification is required for the frontmatter tags. error = matchesError.group(1) # just tells if it's a module - matchesModule = re.search(r'\bmodule\b', reftest) + matchesModule = re.search(r"\bmodule\b", reftest) if matchesModule: module = True # captures any comments - matchesComments = re.search(r' -- (.*)', reftest) + matchesComments = re.search(r" -- (.*)", reftest) if matchesComments: comments = matchesComments.group(1) - return { - "features": features, - "error": error, - "module": module, - "info": comments - } + return {"features": features, "error": error, "module": module, "info": comments} def parseHeader(source): @@ -149,7 +154,7 @@ def extractMeta(source): indent, frontmatter_lines = match.groups() - unindented = re.sub('^%s' % indent, '', frontmatter_lines) + unindented = re.sub("^%s" % indent, "", frontmatter_lines) return yaml.safe_load(unindented) @@ -199,12 +204,16 @@ def cleanupMeta(meta): if "negative" in meta: # If the negative tag exists, phase needs to be present and set if meta["negative"].get("phase") not in ("early", "runtime"): - print("Warning: the negative.phase is not properly set.\n" + - "Ref https://github.com/tc39/test262/blob/main/INTERPRETING.md#negative") + print( + "Warning: the negative.phase is not properly set.\n" + + "Ref https://github.com/tc39/test262/blob/main/INTERPRETING.md#negative" + ) # If the negative tag exists, type is required if "type" not in meta["negative"]: - print("Warning: the negative.type is not set.\n" + - "Ref https://github.com/tc39/test262/blob/main/INTERPRETING.md#negative") + print( + "Warning: the negative.type is not set.\n" + + "Ref https://github.com/tc39/test262/blob/main/INTERPRETING.md#negative" + ) return meta @@ -218,8 +227,7 @@ def mergeMeta(reftest, frontmatter, includes): # Merge the meta from reftest to the frontmatter if "features" in reftest: - frontmatter.setdefault("features", []) \ - .extend(reftest.get("features", [])) + frontmatter.setdefault("features", []).extend(reftest.get("features", [])) # Only add the module flag if the value from reftest is truish if reftest.get("module"): @@ -245,13 +253,15 @@ def mergeMeta(reftest, frontmatter, includes): # specify the error phase in the generated code or fill the # phase with an empty string. "phase": "early", - "type": error + "type": error, } # Print a warning if the errors don't match elif frontmatter["negative"].get("type") != error: - print("Warning: The reftest error doesn't match the existing " + - "frontmatter error. %s != %s" % (error, - frontmatter["negative"]["type"])) + print( + "Warning: The reftest error doesn't match the existing " + + "frontmatter error. %s != %s" + % (error, frontmatter["negative"]["type"]) + ) # Add the shell specific includes if includes: @@ -268,10 +278,14 @@ def insertCopyrightLines(source): lines = [] - if not re.match(r'\/\/\s+Copyright.*\. All rights reserved.', source): + if not re.match(r"\/\/\s+Copyright.*\. All rights reserved.", source): year = date.today().year - lines.append("// Copyright (C) %s Mozilla Corporation. All rights reserved." % year) - lines.append("// This code is governed by the BSD license found in the LICENSE file.") + lines.append( + "// Copyright (C) %s Mozilla Corporation. All rights reserved." % year + ) + lines.append( + "// This code is governed by the BSD license found in the LICENSE file." + ) lines.append("\n") return "\n".join(lines) + source @@ -289,11 +303,21 @@ def insertMeta(source, frontmatter): for (key, value) in frontmatter.items(): if key in ("description", "info"): lines.append("%s: |" % key) - lines.append(" " + yaml.dump(value, encoding="utf8", - ).strip().replace('\n...', '')) + lines.append( + " " + + yaml.dump( + value, + encoding="utf8", + ) + .strip() + .replace("\n...", "") + ) else: - lines.append(yaml.dump({key: value}, encoding="utf8", - default_flow_style=False).strip()) + lines.append( + yaml.dump( + {key: value}, encoding="utf8", default_flow_style=False + ).strip() + ) lines.append("---*/") @@ -312,7 +336,7 @@ def findAndCopyIncludes(dirPath, baseDir, includeDir): # Recurse down all folders in the relative path until # we reach the base directory of shell.js include files. # Each directory will have a shell.js file to copy. - while (relPath): + while relPath: # find the shell.js shellFile = os.path.join(baseDir, relPath, "shell.js") @@ -391,7 +415,9 @@ def exportTest262(args): continue filePath = os.path.join(dirPath, fileName) - testName = os.path.join(fullRelPath, fileName) # captures folder(s)+filename + testName = os.path.join( + fullRelPath, fileName + ) # captures folder(s)+filename # Copy non-test files as is. (_, fileExt) = os.path.splitext(fileName) @@ -423,15 +449,25 @@ def exportTest262(args): if "/".join(os.path.normpath(os.getcwd()).split(os.sep)[-3:]) != "js/src/tests": raise RuntimeError("%s must be run from js/src/tests" % sys.argv[0]) - parser = argparse.ArgumentParser(description="Export tests to match Test262 file compliance.") - parser.add_argument("--out", default="test262/export", - help="Output directory. Any existing directory will be removed! " - "(default: %(default)s)") - parser.add_argument("--exportshellincludes", action="store_true", - help="Optionally export shell.js files as includes in exported tests. " - "Only use for testing, do not use for exporting to test262 (test262 tests " - "should have as few dependencies as possible).") - parser.add_argument("src", nargs="+", help="Source folder with test files to export") + parser = argparse.ArgumentParser( + description="Export tests to match Test262 file compliance." + ) + parser.add_argument( + "--out", + default="test262/export", + help="Output directory. Any existing directory will be removed! " + "(default: %(default)s)", + ) + parser.add_argument( + "--exportshellincludes", + action="store_true", + help="Optionally export shell.js files as includes in exported tests. " + "Only use for testing, do not use for exporting to test262 (test262 tests " + "should have as few dependencies as possible).", + ) + parser.add_argument( + "src", nargs="+", help="Source folder with test files to export" + ) parser.set_defaults(func=exportTest262) args = parser.parse_args() args.func(args) diff --git a/js/src/tests/test262-update.py b/js/src/tests/test262-update.py index 32de8a5691802d..e14879ce39e331 100755 --- a/js/src/tests/test262-update.py +++ b/js/src/tests/test262-update.py @@ -19,26 +19,30 @@ from operator import itemgetter # Skip all tests which use features not supported in SpiderMonkey. -UNSUPPORTED_FEATURES = set([ - "tail-call-optimization", - "regexp-match-indices", - "Intl.DateTimeFormat-quarter", - "Intl.Segmenter", - "top-level-await", - "Atomics.waitAsync", - "legacy-regexp", -]) +UNSUPPORTED_FEATURES = set( + [ + "tail-call-optimization", + "regexp-match-indices", + "Intl.DateTimeFormat-quarter", + "Intl.Segmenter", + "top-level-await", + "Atomics.waitAsync", + "legacy-regexp", + ] +) FEATURE_CHECK_NEEDED = { "Atomics": "!this.hasOwnProperty('Atomics')", "FinalizationRegistry": "!this.hasOwnProperty('FinalizationRegistry')", "SharedArrayBuffer": "!this.hasOwnProperty('SharedArrayBuffer')", "WeakRef": "!this.hasOwnProperty('WeakRef')", } -RELEASE_OR_BETA = set([ - "Intl.DateTimeFormat-fractionalSecondDigits", - "Intl.DateTimeFormat-dayPeriod", - "Intl.DateTimeFormat-formatRange", -]) +RELEASE_OR_BETA = set( + [ + "Intl.DateTimeFormat-fractionalSecondDigits", + "Intl.DateTimeFormat-dayPeriod", + "Intl.DateTimeFormat-formatRange", + ] +) SHELL_OPTIONS = { "class-fields-private": "--enable-private-fields", "class-static-fields-private": "--enable-private-fields", @@ -172,7 +176,9 @@ def addSuffixToFileName(fileName, suffix): return filePath + suffix + ext -def writeShellAndBrowserFiles(test262OutDir, harnessDir, includesMap, localIncludesMap, relPath): +def writeShellAndBrowserFiles( + test262OutDir, harnessDir, includesMap, localIncludesMap, relPath +): """ Generate the shell.js and browser.js files for the test harness. """ @@ -197,19 +203,25 @@ def findIncludes(): def readIncludeFile(filePath): with io.open(filePath, "rb") as includeFile: - return b"// file: %s\n%s" % (os.path.basename(filePath).encode("utf-8"), - includeFile.read()) + return b"// file: %s\n%s" % ( + os.path.basename(filePath).encode("utf-8"), + includeFile.read(), + ) localIncludes = localIncludesMap[relPath] if relPath in localIncludesMap else [] # Concatenate all includes files. - includeSource = b"\n".join(map(readIncludeFile, chain( - # The requested include files. - map(partial(os.path.join, harnessDir), sorted(findIncludes())), - - # And additional local include files. - map(partial(os.path.join, os.getcwd()), sorted(localIncludes)) - ))) + includeSource = b"\n".join( + map( + readIncludeFile, + chain( + # The requested include files. + map(partial(os.path.join, harnessDir), sorted(findIncludes())), + # And additional local include files. + map(partial(os.path.join, os.getcwd()), sorted(localIncludes)), + ), + ) + ) # Write the concatenated include sources to shell.js. with io.open(os.path.join(test262OutDir, relPath, "shell.js"), "wb") as shellFile: @@ -218,7 +230,9 @@ def readIncludeFile(filePath): shellFile.write(includeSource) # The browser.js file is always empty for test262 tests. - with io.open(os.path.join(test262OutDir, relPath, "browser.js"), "wb") as browserFile: + with io.open( + os.path.join(test262OutDir, relPath, "browser.js"), "wb" + ) as browserFile: browserFile.write(b"") @@ -271,15 +285,17 @@ def convertTestFile(test262parser, testSource, testName, includeSet, strictTests # Test262 tests cannot be both "negative" and "async". (In principle a # negative async test is permitted when the error phase is not "parse" or # the error type is not SyntaxError, but no such tests exist now.) - assert not (isNegative and isAsync), \ - "Can't have both async and negative attributes: %s" % testName + assert not (isNegative and isAsync), ( + "Can't have both async and negative attributes: %s" % testName + ) # Only async tests may use the $DONE function. However, negative parse # tests may "use" the $DONE function (of course they don't actually use it!) # without specifying the "async" attribute. Otherwise, $DONE must not # appear in the test. - assert b"$DONE" not in testSource or isAsync or isNegative, \ - "Missing async attribute in: %s" % testName + assert b"$DONE" not in testSource or isAsync or isNegative, ( + "Missing async attribute in: %s" % testName + ) # When the "module" attribute is set, the source code is module code. isModule = "module" in testRec @@ -302,26 +318,47 @@ def convertTestFile(test262parser, testSource, testName, includeSet, strictTests else: releaseOrBeta = [f for f in testRec["features"] if f in RELEASE_OR_BETA] if releaseOrBeta: - refTestSkipIf.append(("release_or_beta", - "%s is not released yet" % ",".join(releaseOrBeta))) + refTestSkipIf.append( + ( + "release_or_beta", + "%s is not released yet" % ",".join(releaseOrBeta), + ) + ) - featureCheckNeeded = [f for f in testRec["features"] if f in FEATURE_CHECK_NEEDED] + featureCheckNeeded = [ + f for f in testRec["features"] if f in FEATURE_CHECK_NEEDED + ] if featureCheckNeeded: - refTestSkipIf.append(("||".join([FEATURE_CHECK_NEEDED[f] - for f in featureCheckNeeded]), - "%s is not enabled unconditionally" % ",".join( - featureCheckNeeded))) + refTestSkipIf.append( + ( + "||".join( + [FEATURE_CHECK_NEEDED[f] for f in featureCheckNeeded] + ), + "%s is not enabled unconditionally" + % ",".join(featureCheckNeeded), + ) + ) - if "Atomics" in testRec["features"] and "SharedArrayBuffer" in testRec["features"]: - refTestSkipIf.append(("(this.hasOwnProperty('getBuildConfiguration')" - "&&getBuildConfiguration()['arm64-simulator'])", - "ARM64 Simulator cannot emulate atomics")) + if ( + "Atomics" in testRec["features"] + and "SharedArrayBuffer" in testRec["features"] + ): + refTestSkipIf.append( + ( + "(this.hasOwnProperty('getBuildConfiguration')" + "&&getBuildConfiguration()['arm64-simulator'])", + "ARM64 Simulator cannot emulate atomics", + ) + ) - shellOptions = {SHELL_OPTIONS[f] for f in testRec["features"] if f in SHELL_OPTIONS} + shellOptions = { + SHELL_OPTIONS[f] for f in testRec["features"] if f in SHELL_OPTIONS + } if shellOptions: refTestSkipIf.append(("!xulRuntime.shell", "requires shell-options")) - refTestOptions.extend(("shell-option({})".format(opt) - for opt in shellOptions)) + refTestOptions.extend( + ("shell-option({})".format(opt) for opt in shellOptions) + ) # Includes for every test file in a directory is collected in a single # shell.js file per directory level. This is done to avoid adding all @@ -336,8 +373,9 @@ def convertTestFile(test262parser, testSource, testName, includeSet, strictTests else: testEpilogue = "" - (terms, comments) = createRefTestEntry(refTestOptions, refTestSkip, refTestSkipIf, errorType, - isModule, isAsync) + (terms, comments) = createRefTestEntry( + refTestOptions, refTestSkip, refTestSkipIf, errorType, isModule, isAsync + ) if raw: refTest = "" externRefTest = (terms, comments) @@ -347,8 +385,9 @@ def convertTestFile(test262parser, testSource, testName, includeSet, strictTests # Don't write a strict-mode variant for raw or module files. noStrictVariant = raw or isModule - assert not (noStrictVariant and (onlyStrict or noStrict)),\ + assert not (noStrictVariant and (onlyStrict or noStrict)), ( "Unexpected onlyStrict or noStrict attribute: %s" % testName + ) # Write non-strict mode test. if noStrictVariant or noStrict or not onlyStrict: @@ -380,8 +419,9 @@ def convertFixtureFile(fixtureSource, fixtureName): isModule = False isAsync = False - (terms, comments) = createRefTestEntry(refTestOptions, refTestSkip, refTestSkipIf, errorType, - isModule, isAsync) + (terms, comments) = createRefTestEntry( + refTestOptions, refTestSkip, refTestSkipIf, errorType, isModule, isAsync + ) refTest = createRefTestLine(terms, comments) source = createSource(fixtureSource, refTest, "", "") @@ -417,18 +457,29 @@ def process_test262(test262Dir, test262OutDir, strictTests, externManifests): includesMap[""].update(["propertyHelper.js", "compareArray.js"]) # Write the root shell.js file. - writeShellAndBrowserFiles(test262OutDir, harnessDir, includesMap, localIncludesMap, "") + writeShellAndBrowserFiles( + test262OutDir, harnessDir, includesMap, localIncludesMap, "" + ) # Additional explicit includes inserted at well-chosen locations to reduce # code duplication in shell.js files. explicitIncludes = {} - explicitIncludes[os.path.join("built-ins", "Atomics")] = ["testAtomics.js", - "testTypedArray.js"] - explicitIncludes[os.path.join("built-ins", "DataView")] = ["byteConversionValues.js"] + explicitIncludes[os.path.join("built-ins", "Atomics")] = [ + "testAtomics.js", + "testTypedArray.js", + ] + explicitIncludes[os.path.join("built-ins", "DataView")] = [ + "byteConversionValues.js" + ] explicitIncludes[os.path.join("built-ins", "Promise")] = ["promiseHelper.js"] - explicitIncludes[os.path.join("built-ins", "TypedArray")] = ["byteConversionValues.js", - "detachArrayBuffer.js", "nans.js"] - explicitIncludes[os.path.join("built-ins", "TypedArrays")] = ["detachArrayBuffer.js"] + explicitIncludes[os.path.join("built-ins", "TypedArray")] = [ + "byteConversionValues.js", + "detachArrayBuffer.js", + "nans.js", + ] + explicitIncludes[os.path.join("built-ins", "TypedArrays")] = [ + "detachArrayBuffer.js" + ] # Intl.DisplayNames isn't yet enabled by default. localIncludesMap[os.path.join("intl402")] = ["test262-intl-displaynames.js"] @@ -440,8 +491,9 @@ def process_test262(test262Dir, test262OutDir, strictTests, externManifests): continue # Skip creating a "prs" directory if it already exists - if relPath not in ("prs", "local") and not os.path.exists(os.path.join(test262OutDir, - relPath)): + if relPath not in ("prs", "local") and not os.path.exists( + os.path.join(test262OutDir, relPath) + ): os.makedirs(os.path.join(test262OutDir, relPath)) includeSet = set() @@ -472,21 +524,25 @@ def process_test262(test262Dir, test262OutDir, strictTests, externManifests): if isFixtureFile: convert = convertFixtureFile(testSource, testName) else: - convert = convertTestFile(test262parser, testSource, testName, - includeSet, strictTests) + convert = convertTestFile( + test262parser, testSource, testName, includeSet, strictTests + ) for (newFileName, newSource, externRefTest) in convert: writeTestFile(test262OutDir, newFileName, newSource) if externRefTest is not None: - externManifests.append({ - "name": newFileName, - "reftest": externRefTest, - }) + externManifests.append( + { + "name": newFileName, + "reftest": externRefTest, + } + ) # Add shell.js and browers.js files for the current directory. - writeShellAndBrowserFiles(test262OutDir, harnessDir, - includesMap, localIncludesMap, relPath) + writeShellAndBrowserFiles( + test262OutDir, harnessDir, includesMap, localIncludesMap, relPath + ) def fetch_local_changes(inDir, outDir, srcDir, strictTests): @@ -511,12 +567,13 @@ def fetch_local_changes(inDir, outDir, srcDir, strictTests): if status.strip(): raise RuntimeError( "Please commit files and cleanup the local test262 folder before importing files.\n" - "Current status: \n%s" - % status) + "Current status: \n%s" % status + ) # Captures the branch name to be used on the output branchName = subprocess.check_output( - ("git -C %s rev-parse --abbrev-ref HEAD" % srcDir).split(" ")).split("\n")[0] + ("git -C %s rev-parse --abbrev-ref HEAD" % srcDir).split(" ") + ).split("\n")[0] # Fetches the file names to import files = subprocess.check_output( @@ -546,9 +603,14 @@ def fetch_local_changes(inDir, outDir, srcDir, strictTests): print("From the branch %s in %s \n" % (branchName, srcDir)) print("Files being copied to the local folder: \n%s" % files) if deletedFiles: - print("Deleted files (use this list to update the skip list): \n%s" % deletedFiles) + print( + "Deleted files (use this list to update the skip list): \n%s" % deletedFiles + ) if modifiedFiles: - print("Modified files (use this list to update the skip list): \n%s" % modifiedFiles) + print( + "Modified files (use this list to update the skip list): \n%s" + % modifiedFiles + ) if renamedFiles: print("Renamed files (already added with the new names): \n%s" % renamedFiles) @@ -560,8 +622,7 @@ def fetch_local_changes(inDir, outDir, srcDir, strictTests): os.makedirs(fileTree) shutil.copyfile( - os.path.join(srcDir, f), - os.path.join(fileTree, os.path.basename(f)) + os.path.join(srcDir, f), os.path.join(fileTree, os.path.basename(f)) ) # Extras from Test262. Copy the current support folders - including the @@ -591,16 +652,20 @@ def fetch_pr_files(inDir, outDir, prNumber, strictTests): # folder can be discarded from here shutil.rmtree(os.path.join(inDir, "test")) - prRequest = requests.get("https://api.github.com/repos/tc39/test262/pulls/%s" % prNumber) + prRequest = requests.get( + "https://api.github.com/repos/tc39/test262/pulls/%s" % prNumber + ) prRequest.raise_for_status() pr = prRequest.json() - if (pr["state"] != "open"): + if pr["state"] != "open": # Closed PR, remove respective files from folder return print("PR %s is closed" % prNumber) - files = requests.get("https://api.github.com/repos/tc39/test262/pulls/%s/files" % prNumber) + files = requests.get( + "https://api.github.com/repos/tc39/test262/pulls/%s/files" % prNumber + ) files.raise_for_status() for item in files.json(): @@ -627,7 +692,7 @@ def fetch_pr_files(inDir, outDir, prNumber, strictTests): os.makedirs(filePathDirs) with io.open(os.path.join(inDir, *filename.split("/")), "wb") as output_file: - output_file.write(fileText.encode('utf8')) + output_file.write(fileText.encode("utf8")) process_test262(inDir, prTestsOutDir, strictTests, []) @@ -676,7 +741,7 @@ def general_update(inDir, outDir, strictTests): entry = "%s script %s%s\n" % ( terms, externManifest["name"], - (" # %s" % comments) if comments else "" + (" # %s" % comments) if comments else "", ) manifestFile.write(entry.encode("utf-8")) @@ -711,11 +776,13 @@ def update_test262(args): return fetch_local_changes(inDir, outDir, srcDir, strictTests) if revision == "HEAD": - subprocess.check_call(["git", "clone", "--depth=1", - "--branch=%s" % branch, url, inDir]) + subprocess.check_call( + ["git", "clone", "--depth=1", "--branch=%s" % branch, url, inDir] + ) else: - subprocess.check_call(["git", "clone", "--single-branch", - "--branch=%s" % branch, url, inDir]) + subprocess.check_call( + ["git", "clone", "--single-branch", "--branch=%s" % branch, url, inDir] + ) subprocess.check_call(["git", "-C", inDir, "reset", "--hard", revision]) # If a PR number is provided, fetches only the new and modified files @@ -736,22 +803,37 @@ def update_test262(args): raise RuntimeError("%s must be run from js/src/tests" % sys.argv[0]) parser = argparse.ArgumentParser(description="Update the test262 test suite.") - parser.add_argument("--url", default="git://github.com/tc39/test262.git", - help="URL to git repository (default: %(default)s)") - parser.add_argument("--branch", default="main", - help="Git branch (default: %(default)s)") - parser.add_argument("--revision", default="HEAD", - help="Git revision (default: %(default)s)") - parser.add_argument("--out", default="test262", - help="Output directory. Any existing directory will be removed!" - "(default: %(default)s)") - parser.add_argument("--pull", - help="Import contents from a Pull Request specified by its number") - parser.add_argument("--local", - help="Import new and modified contents from a local folder, a new folder " - "will be created on local/branch_name") - parser.add_argument("--strict", default=False, action="store_true", - help="Generate additional strict mode tests. Not enabled by default.") + parser.add_argument( + "--url", + default="git://github.com/tc39/test262.git", + help="URL to git repository (default: %(default)s)", + ) + parser.add_argument( + "--branch", default="main", help="Git branch (default: %(default)s)" + ) + parser.add_argument( + "--revision", default="HEAD", help="Git revision (default: %(default)s)" + ) + parser.add_argument( + "--out", + default="test262", + help="Output directory. Any existing directory will be removed!" + "(default: %(default)s)", + ) + parser.add_argument( + "--pull", help="Import contents from a Pull Request specified by its number" + ) + parser.add_argument( + "--local", + help="Import new and modified contents from a local folder, a new folder " + "will be created on local/branch_name", + ) + parser.add_argument( + "--strict", + default=False, + action="store_true", + help="Generate additional strict mode tests. Not enabled by default.", + ) parser.set_defaults(func=update_test262) args = parser.parse_args() args.func(args) diff --git a/js/src/vm/jsopcode.py b/js/src/vm/jsopcode.py index b49f360ab20d9b..4f3ff66d8c8a9a 100644 --- a/js/src/vm/jsopcode.py +++ b/js/src/vm/jsopcode.py @@ -10,21 +10,21 @@ def codify(text): - text = re.sub(quoted_pat, '\\1\\2', text) - text = re.sub(js_pat, '\\1\\2', text) + text = re.sub(quoted_pat, "\\1\\2", text) + text = re.sub(js_pat, "\\1\\2", text) return text -space_star_space_pat = re.compile('^\s*\* ?', re.M) +space_star_space_pat = re.compile("^\s*\* ?", re.M) def get_comment_body(comment): - return re.sub(space_star_space_pat, '', comment).split('\n') + return re.sub(space_star_space_pat, "", comment).split("\n") quote_pat = re.compile('"([^"]+)"') -str_pat = re.compile('js_([^_]+)_str') +str_pat = re.compile("js_([^_]+)_str") def parse_name(s): @@ -37,34 +37,34 @@ def parse_name(s): return s -csv_pat = re.compile(', *') +csv_pat = re.compile(", *") def parse_csv(s): a = csv_pat.split(s) - if len(a) == 1 and a[0] == '': + if len(a) == 1 and a[0] == "": return [] return a def get_stack_count(stack): - if stack == '': + if stack == "": return 0 - if '...' in stack: + if "..." in stack: return -1 - return len(stack.split(',')) + return len(stack.split(",")) def parse_index(comment): index = [] current_types = None - category_name = '' - category_pat = re.compile('\[([^\]]+)\]') + category_name = "" + category_pat = re.compile("\[([^\]]+)\]") for line in get_comment_body(comment): m = category_pat.search(line) if m: category_name = m.group(1) - if category_name == 'Index': + if category_name == "Index": continue current_types = [] index.append((category_name, current_types)) @@ -75,6 +75,7 @@ def parse_index(comment): return index + # Holds the information stored in the comment with the following format: # /* # * {desc} @@ -87,12 +88,13 @@ def parse_index(comment): class CommentInfo: def __init__(self): - self.desc = '' - self.category_name = '' - self.type_name = '' - self.operands = '' - self.stack_uses = '' - self.stack_defs = '' + self.desc = "" + self.category_name = "" + self.type_name = "" + self.operands = "" + self.stack_uses = "" + self.stack_defs = "" + # Holds the information stored in the macro with the following format: # MACRO({op}, {op_snake}, {token}, {length}, {nuses}, {ndefs}, {format}) @@ -101,14 +103,14 @@ def __init__(self): class OpcodeInfo: def __init__(self, value, comment_info): - self.op = '' - self.op_snake = '' + self.op = "" + self.op_snake = "" self.value = value - self.token = '' - self.length = '' - self.nuses = '' - self.ndefs = '' - self.format_ = '' + self.token = "" + self.length = "" + self.nuses = "" + self.ndefs = "" + self.format_ = "" self.operands_array = [] self.stack_uses_array = [] @@ -133,7 +135,7 @@ def __init__(self, value, comment_info): # MACRO(JSOP_DIV, ...) self.group = [] - self.sort_key = '' + self.sort_key = "" def find_by_name(list, name): @@ -147,53 +149,71 @@ def find_by_name(list, name): def add_to_index(index, opcode): types = find_by_name(index, opcode.category_name) if types is None: - raise Exception('Category is not listed in index: ' - '{name}'.format(name=opcode.category_name)) + raise Exception( + "Category is not listed in index: " + "{name}".format(name=opcode.category_name) + ) opcodes = find_by_name(types, opcode.type_name) if opcodes is None: if opcode.type_name: - raise Exception('Type is not listed in {category}: ' - '{name}'.format(category=opcode.category_name, - name=opcode.type_name)) + raise Exception( + "Type is not listed in {category}: " + "{name}".format(category=opcode.category_name, name=opcode.type_name) + ) types.append((opcode.type_name, [opcode])) return opcodes.append(opcode) -tag_pat = re.compile('^\s*[A-Za-z]+:\s*|\s*$') +tag_pat = re.compile("^\s*[A-Za-z]+:\s*|\s*$") def get_tag_value(line): - return re.sub(tag_pat, '', line) + return re.sub(tag_pat, "", line) RUST_OR_CPP_KEYWORDS = { - 'and', 'case', 'default', 'double', 'false', 'goto', 'in', 'new', 'not', 'or', 'return', - 'throw', 'true', 'try', 'typeof', 'void', + "and", + "case", + "default", + "double", + "false", + "goto", + "in", + "new", + "not", + "or", + "return", + "throw", + "true", + "try", + "typeof", + "void", } def get_opcodes(dir): - iter_pat = re.compile(r"/\*(.*?)\*/" # either a documentation comment... - r"|" - r"MACRO\(" # or a MACRO(...) call - r"(?P[^,]+),\s*" - r"(?P[^,]+),\s*" - r"(?P[^,]+,)\s*" - r"(?P[0-9\-]+),\s*" - r"(?P[0-9\-]+),\s*" - r"(?P[0-9\-]+),\s*" - r"(?P[^\)]+)" - r"\)", re.S) - stack_pat = re.compile(r"^(?P.*?)" - r"\s*=>\s*" - r"(?P.*?)$") + iter_pat = re.compile( + r"/\*(.*?)\*/" # either a documentation comment... + r"|" + r"MACRO\(" # or a MACRO(...) call + r"(?P[^,]+),\s*" + r"(?P[^,]+),\s*" + r"(?P[^,]+,)\s*" + r"(?P[0-9\-]+),\s*" + r"(?P[0-9\-]+),\s*" + r"(?P[0-9\-]+),\s*" + r"(?P[^\)]+)" + r"\)", + re.S, + ) + stack_pat = re.compile(r"^(?P.*?)" r"\s*=>\s*" r"(?P.*?)$") opcodes = dict() index = [] - with open('{dir}/js/src/vm/Opcodes.h'.format(dir=dir), 'r', encoding='utf-8') as f: + with open("{dir}/js/src/vm/Opcodes.h".format(dir=dir), "r", encoding="utf-8") as f: data = f.read() comment_info = None @@ -205,49 +225,52 @@ def get_opcodes(dir): for m in re.finditer(iter_pat, data): comment = m.group(1) - op = m.group('op') + op = m.group("op") if comment: - if '[Index]' in comment: + if "[Index]" in comment: index = parse_index(comment) continue - if 'Operands:' not in comment: + if "Operands:" not in comment: continue group_head = None comment_info = CommentInfo() - state = 'desc' - stack = '' - desc = '' + state = "desc" + stack = "" + desc = "" for line in get_comment_body(comment): - if line.startswith(' Category:'): - state = 'category' + if line.startswith(" Category:"): + state = "category" comment_info.category_name = get_tag_value(line) - elif line.startswith(' Type:'): - state = 'type' + elif line.startswith(" Type:"): + state = "type" comment_info.type_name = get_tag_value(line) - elif line.startswith(' Operands:'): - state = 'operands' + elif line.startswith(" Operands:"): + state = "operands" comment_info.operands = get_tag_value(line) - elif line.startswith(' Stack:'): - state = 'stack' + elif line.startswith(" Stack:"): + state = "stack" stack = get_tag_value(line) - elif state == 'desc': + elif state == "desc": desc += line + "\n" - elif line.startswith(' '): + elif line.startswith(" "): if line.isspace(): pass - elif state == 'operands': - comment_info.operands += ' ' + line.strip() - elif state == 'stack': - stack += ' ' + line.strip() + elif state == "operands": + comment_info.operands += " " + line.strip() + elif state == "stack": + stack += " " + line.strip() else: - raise ValueError("unrecognized line in comment: {!r}\n\nfull comment was:\n{}" - .format(line, comment)) + raise ValueError( + "unrecognized line in comment: {!r}\n\nfull comment was:\n{}".format( + line, comment + ) + ) comment_info.desc = desc @@ -257,65 +280,77 @@ def get_opcodes(dir): m2 = stack_pat.search(stack) if m2: - comment_info.stack_uses = m2.group('uses') - comment_info.stack_defs = m2.group('defs') + comment_info.stack_uses = m2.group("uses") + comment_info.stack_defs = m2.group("defs") else: assert op is not None opcode = OpcodeInfo(next_opcode_value, comment_info) next_opcode_value += 1 opcode.op = op - opcode.op_snake = m.group('op_snake') - opcode.token = parse_name(m.group('token')) - opcode.length = m.group('length') - opcode.nuses = m.group('nuses') - opcode.ndefs = m.group('ndefs') - opcode.format_ = m.group('format').split('|') - - expected_snake = re.sub(r'(?Format: {format}\n'.format(format=', '.join(format)) + return "" + return "
Format: {format}
\n".format(format=", ".join(format)) def maybe_escape(value, format_str, fallback=""): @@ -100,7 +107,7 @@ def maybe_escape(value, format_str, fallback=""): def print_opcode(opcode): opcodes = [opcode] + opcode.group - names = ', '.join(maybe_escape(code.op, "{}") for code in opcodes) + names = ", ".join(maybe_escape(code.op, "{}") for code in opcodes) operands = maybe_escape(opcode.operands, "
Operands: ({})
\n") stack_uses = maybe_escape(opcode.stack_uses, "{} ") stack_defs = maybe_escape(opcode.stack_defs, " {}") @@ -109,33 +116,35 @@ def print_opcode(opcode): else: stack = "" - print(OPCODE_FORMAT.format( - id=opcodes[0].op, - names=names, - operands=operands, - stack=stack, - desc=markdown.markdown(opcode.desc), - format=format_format(opcode.format_), - )) + print( + OPCODE_FORMAT.format( + id=opcodes[0].op, + names=names, + operands=operands, + stack=stack, + desc=markdown.markdown(opcode.desc), + format=format_format(opcode.format_), + ) + ) id_cache = dict() id_count = dict() -def make_element_id(category, type=''): - key = '{}:{}'.format(category, type) +def make_element_id(category, type=""): + key = "{}:{}".format(category, type) if key in id_cache: return id_cache[key] - if type == '': - id = category.replace(' ', '_') + if type == "": + id = category.replace(" ", "_") else: - id = type.replace(' ', '_') + id = type.replace(" ", "_") if id in id_count: id_count[id] += 1 - id = '{}_{}'.format(id, id_count[id]) + id = "{}_{}".format(id, id_count[id]) else: id_count[id] = 1 @@ -144,36 +153,44 @@ def make_element_id(category, type=''): def print_doc(index): - print("""
{{{{SpiderMonkeySidebar("Internals")}}}}
+ print( + """
{{{{SpiderMonkeySidebar("Internals")}}}}

Bytecode Listing

This document is automatically generated from Opcodes.h by make_opcode_doc.py.

-""".format(source_base=SOURCE_BASE)) +""".format( + source_base=SOURCE_BASE + ) + ) for (category_name, types) in index: - print('

{name}

'.format(name=category_name, - id=make_element_id(category_name))) + print( + '

{name}

'.format( + name=category_name, id=make_element_id(category_name) + ) + ) for (type_name, opcodes) in types: if type_name: - print('

{name}

'.format( - name=type_name, - id=make_element_id(category_name, type_name))) - print('
') + print( + '

{name}

'.format( + name=type_name, id=make_element_id(category_name, type_name) + ) + ) + print("
") for opcode in opcodes: print_opcode(opcode) - print('
') + print("
") -if __name__ == '__main__': +if __name__ == "__main__": if len(sys.argv) != 1: - print("Usage: mach python make_opcode_doc.py", - file=sys.stderr) + print("Usage: mach python make_opcode_doc.py", file=sys.stderr) sys.exit(1) js_src_vm_dir = os.path.dirname(os.path.realpath(__file__)) - root_dir = os.path.abspath(os.path.join(js_src_vm_dir, '..', '..', '..')) + root_dir = os.path.abspath(os.path.join(js_src_vm_dir, "..", "..", "..")) index, _ = jsopcode.get_opcodes(root_dir) print_doc(index) diff --git a/js/src/wasm/moz.build b/js/src/wasm/moz.build index 369e2bb492e979..d6f2c87fa297d7 100644 --- a/js/src/wasm/moz.build +++ b/js/src/wasm/moz.build @@ -4,48 +4,45 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -FINAL_LIBRARY = 'js' +FINAL_LIBRARY = "js" # Includes should be relative to parent path -LOCAL_INCLUDES += [ - '!..', - '..' -] +LOCAL_INCLUDES += ["!..", ".."] -include('../js-config.mozbuild') -include('../js-cxxflags.mozbuild') +include("../js-config.mozbuild") +include("../js-cxxflags.mozbuild") -if CONFIG['ENABLE_WASM_CRANELIFT']: +if CONFIG["ENABLE_WASM_CRANELIFT"]: UNIFIED_SOURCES += [ - 'WasmCraneliftCompile.cpp', + "WasmCraneliftCompile.cpp", ] UNIFIED_SOURCES += [ - 'AsmJS.cpp', - 'TypedObject.cpp', - 'WasmBaselineCompile.cpp', - 'WasmBuiltins.cpp', - 'WasmCode.cpp', - 'WasmCompile.cpp', - 'WasmDebug.cpp', - 'WasmFrameIter.cpp', - 'WasmGC.cpp', - 'WasmGenerator.cpp', - 'WasmInstance.cpp', - 'WasmIonCompile.cpp', - 'WasmJS.cpp', - 'WasmModule.cpp', - 'WasmOpIter.cpp', - 'WasmProcess.cpp', - 'WasmRealm.cpp', - 'WasmSignalHandlers.cpp', - 'WasmStubs.cpp', - 'WasmTable.cpp', - 'WasmTypes.cpp', - 'WasmValidate.cpp' + "AsmJS.cpp", + "TypedObject.cpp", + "WasmBaselineCompile.cpp", + "WasmBuiltins.cpp", + "WasmCode.cpp", + "WasmCompile.cpp", + "WasmDebug.cpp", + "WasmFrameIter.cpp", + "WasmGC.cpp", + "WasmGenerator.cpp", + "WasmInstance.cpp", + "WasmIonCompile.cpp", + "WasmJS.cpp", + "WasmModule.cpp", + "WasmOpIter.cpp", + "WasmProcess.cpp", + "WasmRealm.cpp", + "WasmSignalHandlers.cpp", + "WasmStubs.cpp", + "WasmTable.cpp", + "WasmTypes.cpp", + "WasmValidate.cpp", ] # Make sure all WebAssembly code is built with libfuzzer # coverage instrumentation in FUZZING mode. -if CONFIG['FUZZING_INTERFACES'] and CONFIG['LIBFUZZER']: - include('/tools/fuzzing/libfuzzer-config.mozbuild') +if CONFIG["FUZZING_INTERFACES"] and CONFIG["LIBFUZZER"]: + include("/tools/fuzzing/libfuzzer-config.mozbuild") diff --git a/js/sub.configure b/js/sub.configure index a972f3da9f5e69..0d1443d07fa680 100644 --- a/js/sub.configure +++ b/js/sub.configure @@ -4,10 +4,11 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. + @depends(check_build_environment) -@imports('logging') -@imports(_from='__builtin__', _import='object') -@imports(_from='mozbuild.configure.util', _import='ConfigureOutputHandler') +@imports("logging") +@imports(_from="__builtin__", _import="object") +@imports(_from="mozbuild.configure.util", _import="ConfigureOutputHandler") def old_js_configure(build_env): class PrefixOutput(object): def __init__(self, prefix, fh): @@ -18,21 +19,21 @@ def old_js_configure(build_env): def write(self, content): if self._begin_line: self._fh.write(self._prefix) - self._fh.write(('\n' + self._prefix).join(content.splitlines())) - self._begin_line = content.endswith('\n') + self._fh.write(("\n" + self._prefix).join(content.splitlines())) + self._begin_line = content.endswith("\n") if self._begin_line: - self._fh.write('\n') + self._fh.write("\n") def flush(self): self._fh.flush() - logger = logging.getLogger('moz.configure') - formatter = logging.Formatter('js/src> %(levelname)s: %(message)s') + logger = logging.getLogger("moz.configure") + formatter = logging.Formatter("js/src> %(levelname)s: %(message)s") for handler in logger.handlers: handler.setFormatter(formatter) if isinstance(handler, ConfigureOutputHandler): - handler._stdout = PrefixOutput('js/src> ', handler._stdout) - return os.path.join(build_env.topsrcdir, 'js', 'src', 'old-configure') + handler._stdout = PrefixOutput("js/src> ", handler._stdout) + return os.path.join(build_env.topsrcdir, "js", "src", "old-configure") @depends(old_configure.substs, mozconfig) @@ -49,35 +50,41 @@ def old_js_configure_env(substs, mozconfig): extra_env = {} for var in ( - 'MOZ_DEV_EDITION', 'STLPORT_LIBS', 'MOZ_LINKER', 'ZLIB_IN_MOZGLUE', - 'RANLIB', + "MOZ_DEV_EDITION", + "STLPORT_LIBS", + "MOZ_LINKER", + "ZLIB_IN_MOZGLUE", + "RANLIB", ): if var in substs: value = substs[var] - elif mozconfig and var in mozconfig and \ - not mozconfig[var][1].startswith('removed'): + elif ( + mozconfig + and var in mozconfig + and not mozconfig[var][1].startswith("removed") + ): value = mozconfig[var][0] else: continue if isinstance(value, list): - value = ' '.join(value) + value = " ".join(value) extra_env[var] = value return extra_env old_js_configure = old_configure_for(old_js_configure, extra_env=old_js_configure_env) -set_config('OLD_JS_CONFIGURE_SUBSTS', old_js_configure.substs) -set_config('OLD_JS_CONFIGURE_DEFINES', old_js_configure.defines) +set_config("OLD_JS_CONFIGURE_SUBSTS", old_js_configure.substs) +set_config("OLD_JS_CONFIGURE_DEFINES", old_js_configure.defines) @dependable -@imports('logging') -@imports(_from='mozbuild.configure.util', _import='ConfigureOutputHandler') +@imports("logging") +@imports(_from="mozbuild.configure.util", _import="ConfigureOutputHandler") def post_old_js_configure(): # Restore unprefixed logging. - formatter = logging.Formatter('%(levelname)s: %(message)s') - logger = logging.getLogger('moz.configure') + formatter = logging.Formatter("%(levelname)s: %(message)s") + logger = logging.getLogger("moz.configure") for handler in logger.handlers: handler.setFormatter(formatter) if isinstance(handler, ConfigureOutputHandler): diff --git a/js/xpconnect/idl/moz.build b/js/xpconnect/idl/moz.build index 5ac5dd900d421b..5e6feb4c249305 100644 --- a/js/xpconnect/idl/moz.build +++ b/js/xpconnect/idl/moz.build @@ -5,13 +5,12 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. XPIDL_SOURCES += [ - 'mozIJSSubScriptLoader.idl', - 'nsIXPConnect.idl', - 'nsIXPCScriptable.idl', - 'xpccomponents.idl', - 'xpcIJSGetFactory.idl', - 'xpcIJSWeakReference.idl', + "mozIJSSubScriptLoader.idl", + "nsIXPConnect.idl", + "nsIXPCScriptable.idl", + "xpccomponents.idl", + "xpcIJSGetFactory.idl", + "xpcIJSWeakReference.idl", ] -XPIDL_MODULE = 'xpconnect' - +XPIDL_MODULE = "xpconnect" diff --git a/js/xpconnect/loader/moz.build b/js/xpconnect/loader/moz.build index dff21ea14e3941..926d3ce0defb6c 100644 --- a/js/xpconnect/loader/moz.build +++ b/js/xpconnect/loader/moz.build @@ -5,61 +5,59 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. UNIFIED_SOURCES += [ - 'AutoMemMap.cpp', - 'ChromeScriptLoader.cpp', - 'mozJSLoaderUtils.cpp', - 'mozJSSubScriptLoader.cpp', - 'nsImportModule.cpp', - 'ScriptCacheActors.cpp', - 'ScriptPreloader.cpp', - 'URLPreloader.cpp', + "AutoMemMap.cpp", + "ChromeScriptLoader.cpp", + "mozJSLoaderUtils.cpp", + "mozJSSubScriptLoader.cpp", + "nsImportModule.cpp", + "ScriptCacheActors.cpp", + "ScriptPreloader.cpp", + "URLPreloader.cpp", ] # mozJSComponentLoader.cpp cannot be built in unified mode because it uses # windows.h SOURCES += [ - 'mozJSComponentLoader.cpp', + "mozJSComponentLoader.cpp", ] IPDL_SOURCES += [ - 'PScriptCache.ipdl', + "PScriptCache.ipdl", ] -EXPORTS += [ - 'nsImportModule.h' -] +EXPORTS += ["nsImportModule.h"] EXPORTS.mozilla += [ - 'AutoMemMap.h', - 'IOBuffers.h', - 'ScriptPreloader.h', - 'URLPreloader.h', + "AutoMemMap.h", + "IOBuffers.h", + "ScriptPreloader.h", + "URLPreloader.h", ] EXPORTS.mozilla.dom += [ - 'PrecompiledScript.h', + "PrecompiledScript.h", ] EXPORTS.mozilla.loader += [ - 'AutoMemMap.h', - 'ScriptCacheActors.h', + "AutoMemMap.h", + "ScriptCacheActors.h", ] EXTRA_JS_MODULES += [ - 'ComponentUtils.jsm', - 'XPCOMUtils.jsm', + "ComponentUtils.jsm", + "XPCOMUtils.jsm", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../src', - '../wrappers', - '/dom/base', - '/xpcom/base/', + "../src", + "../wrappers", + "/dom/base", + "/xpcom/base/", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-shadow"] diff --git a/js/xpconnect/loader/script_cache.py b/js/xpconnect/loader/script_cache.py index e946b616de3a71..ecf5d8ae2e3ad6 100755 --- a/js/xpconnect/loader/script_cache.py +++ b/js/xpconnect/loader/script_cache.py @@ -11,14 +11,16 @@ import struct import sys -MAGIC = b'mozXDRcachev002\0' +MAGIC = b"mozXDRcachev002\0" def usage(): - print("""Usage: script_cache.py ... + print( + """Usage: script_cache.py ... Decodes and prints out the contents of a startup script cache file - (e.g., startupCache/scriptCache.bin) in human-readable form.""") + (e.g., startupCache/scriptCache.bin) in human-readable form.""" + ) sys.exit(1) @@ -36,20 +38,19 @@ def __init__(self, val): def __str__(self): res = [] if self.val & (1 << self.Uninitialized): - raise Exception('Uninitialized process type') + raise Exception("Uninitialized process type") if self.val & (1 << self.Parent): - res.append('Parent') + res.append("Parent") if self.val & (1 << self.Web): - res.append('Web') + res.append("Web") if self.val & (1 << self.Extension): - res.append('Extension') + res.append("Extension") if self.val & (1 << self.Privileged): - res.append('Privileged') - return '|'.join(res) + res.append("Privileged") + return "|".join(res) class InputBuffer(object): - def __init__(self, data): self.data = data self.offset = 0 @@ -64,8 +65,8 @@ def unpack(self, fmt): return res def unpack_str(self): - size, = self.unpack('9,}'.format(*hdr.unpack('9,}'.format(*hdr.unpack('9,}".format(*hdr.unpack("9,}".format(*hdr.unpack(" resolve(), "{}"); - """.format(name)) + """.format( + name + ) + ) def get_histogram(self, name): with self.marionette.using_context(self.marionette.CONTEXT_CHROME): - return self.marionette.execute_script(""" + return self.marionette.execute_script( + """ let snapshot = Services.telemetry.getSnapshotForHistograms("main", true); return snapshot.parent.{}.values; - """.format(name)) + """.format( + name + ) + ) def invalidate_caches(self): with self.marionette.using_context(self.marionette.CONTEXT_CHROME): diff --git a/js/xpconnect/tests/mochitest/moz.build b/js/xpconnect/tests/mochitest/moz.build index 8e5cb5d717f8d5..7c990fbc62babe 100644 --- a/js/xpconnect/tests/mochitest/moz.build +++ b/js/xpconnect/tests/mochitest/moz.build @@ -4,5 +4,4 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -MOCHITEST_MANIFESTS += ['mochitest.ini'] - +MOCHITEST_MANIFESTS += ["mochitest.ini"] diff --git a/js/xpconnect/tests/moz.build b/js/xpconnect/tests/moz.build index 8448a59ee622e6..c3613da9df06f3 100644 --- a/js/xpconnect/tests/moz.build +++ b/js/xpconnect/tests/moz.build @@ -5,29 +5,29 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. TEST_DIRS += [ - 'marionette', - 'mochitest', - 'chrome', - 'browser', - 'components/native', + "marionette", + "mochitest", + "chrome", + "browser", + "components/native", ] -if CONFIG['COMPILE_ENVIRONMENT']: +if CONFIG["COMPILE_ENVIRONMENT"]: TEST_DIRS += [ - 'idl', + "idl", ] XPCSHELL_TESTS_MANIFESTS += [ - 'unit/xpcshell.ini', + "unit/xpcshell.ini", ] TEST_HARNESS_FILES.xpcshell.js.xpconnect.tests.components.js += [ - 'components/js/xpctest.manifest', - 'components/js/xpctest_attributes.js', - 'components/js/xpctest_bug809674.js', - 'components/js/xpctest_cenums.js', - 'components/js/xpctest_interfaces.js', - 'components/js/xpctest_params.js', - 'components/js/xpctest_returncode_child.js', - 'components/js/xpctest_utils.js', + "components/js/xpctest.manifest", + "components/js/xpctest_attributes.js", + "components/js/xpctest_bug809674.js", + "components/js/xpctest_cenums.js", + "components/js/xpctest_interfaces.js", + "components/js/xpctest_params.js", + "components/js/xpctest_returncode_child.js", + "components/js/xpctest_utils.js", ] diff --git a/js/xpconnect/wrappers/moz.build b/js/xpconnect/wrappers/moz.build index 65474d5adc5c55..851535087750fb 100644 --- a/js/xpconnect/wrappers/moz.build +++ b/js/xpconnect/wrappers/moz.build @@ -5,31 +5,31 @@ # file, You can obtain one at http://mozilla.org/MPL/2.0/. EXPORTS += [ - 'WrapperFactory.h', + "WrapperFactory.h", ] UNIFIED_SOURCES += [ - 'AccessCheck.cpp', - 'ChromeObjectWrapper.cpp', - 'FilteringWrapper.cpp', - 'WaiveXrayWrapper.cpp', - 'WrapperFactory.cpp', + "AccessCheck.cpp", + "ChromeObjectWrapper.cpp", + "FilteringWrapper.cpp", + "WaiveXrayWrapper.cpp", + "WrapperFactory.cpp", ] # XrayWrapper needs to be built separately becaue of template instantiations. SOURCES += [ - 'XrayWrapper.cpp', + "XrayWrapper.cpp", ] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" LOCAL_INCLUDES += [ - '../../../dom/base', - '../src', - '/caps', + "../../../dom/base", + "../src", + "/caps", ] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-shadow"] diff --git a/layout/base/gtest/moz.build b/layout/base/gtest/moz.build index 9b9a36debf3fcf..12943b5fa87f42 100644 --- a/layout/base/gtest/moz.build +++ b/layout/base/gtest/moz.build @@ -6,25 +6,25 @@ UNIFIED_SOURCES += [ - 'TestAccessibleCaretEventHub.cpp', - 'TestAccessibleCaretManager.cpp', + "TestAccessibleCaretEventHub.cpp", + "TestAccessibleCaretManager.cpp", ] # THE MOCK_METHOD2 macro from gtest triggers this clang warning and it's hard # to work around, so we just ignore it. -if CONFIG['CC_TYPE'] == 'clang': - CXXFLAGS += ['-Wno-inconsistent-missing-override'] +if CONFIG["CC_TYPE"] == "clang": + CXXFLAGS += ["-Wno-inconsistent-missing-override"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '/docshell/base', - '/layout/base', - '/layout/style', + "/docshell/base", + "/layout/base", + "/layout/style", ] # Workaround bug 1142396. Suppress the warning from gmock library for clang. -if CONFIG['CC_TYPE'] == 'clang': - CXXFLAGS += ['-Wno-null-dereference'] +if CONFIG["CC_TYPE"] == "clang": + CXXFLAGS += ["-Wno-null-dereference"] -FINAL_LIBRARY = 'xul-gtest' +FINAL_LIBRARY = "xul-gtest" diff --git a/layout/base/moz.build b/layout/base/moz.build index 04cdb3b7e46ea3..0ffa214473544d 100644 --- a/layout/base/moz.build +++ b/layout/base/moz.build @@ -4,185 +4,185 @@ # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. -with Files('Restyle*'): - BUG_COMPONENT = ('Core', 'CSS Parsing and Computation') +with Files("Restyle*"): + BUG_COMPONENT = ("Core", "CSS Parsing and Computation") -with Files('nsStyle*'): - BUG_COMPONENT = ('Core', 'CSS Parsing and Computation') +with Files("nsStyle*"): + BUG_COMPONENT = ("Core", "CSS Parsing and Computation") -with Files('nsChangeHint.h'): - BUG_COMPONENT = ('Core', 'CSS Parsing and Computation') +with Files("nsChangeHint.h"): + BUG_COMPONENT = ("Core", "CSS Parsing and Computation") -with Files('nsBidi*'): - BUG_COMPONENT = ('Core', 'Layout: Text and Fonts') +with Files("nsBidi*"): + BUG_COMPONENT = ("Core", "Layout: Text and Fonts") -with Files('AccessibleCaret*'): - BUG_COMPONENT = ('Core', 'DOM: Selection') +with Files("AccessibleCaret*"): + BUG_COMPONENT = ("Core", "DOM: Selection") XPIDL_SOURCES += [ - 'nsILayoutHistoryState.idl', - 'nsIPreloadedStyleSheet.idl', - 'nsIStyleSheetService.idl', + "nsILayoutHistoryState.idl", + "nsIPreloadedStyleSheet.idl", + "nsIStyleSheetService.idl", ] -if CONFIG['MOZ_DEBUG']: +if CONFIG["MOZ_DEBUG"]: UNIFIED_SOURCES += [ - 'nsAutoLayoutPhase.cpp', + "nsAutoLayoutPhase.cpp", ] -XPIDL_MODULE = 'layout_base' +XPIDL_MODULE = "layout_base" EXPORTS += [ - 'CaretAssociationHint.h', - 'FrameProperties.h', - 'LayoutConstants.h', - 'LayoutLogging.h', - 'MobileViewportManager.h', - 'nsAutoLayoutPhase.h', - 'nsBidi.h', - 'nsBidiPresUtils.h', - 'nsCaret.h', - 'nsChangeHint.h', - 'nsCompatibility.h', - 'nsCounterManager.h', - 'nsCSSFrameConstructor.h', - 'nsFrameManager.h', - 'nsFrameTraversal.h', - 'nsGenConList.h', - 'nsIFrameTraversal.h', - 'nsIPercentBSizeObserver.h', - 'nsIReflowCallback.h', - 'nsLayoutUtils.h', - 'nsPresArena.h', - 'nsPresArenaObjectList.h', - 'nsPresContext.h', - 'nsPresContextInlines.h', - 'nsQuoteList.h', - 'nsRefreshDriver.h', - 'nsRefreshObservers.h', - 'nsStyleChangeList.h', - 'nsStyleSheetService.h', - 'StackArena.h', - 'TouchManager.h', - 'Units.h', - 'UnitTransforms.h', - 'WordMovementType.h', - 'ZoomConstraintsClient.h', + "CaretAssociationHint.h", + "FrameProperties.h", + "LayoutConstants.h", + "LayoutLogging.h", + "MobileViewportManager.h", + "nsAutoLayoutPhase.h", + "nsBidi.h", + "nsBidiPresUtils.h", + "nsCaret.h", + "nsChangeHint.h", + "nsCompatibility.h", + "nsCounterManager.h", + "nsCSSFrameConstructor.h", + "nsFrameManager.h", + "nsFrameTraversal.h", + "nsGenConList.h", + "nsIFrameTraversal.h", + "nsIPercentBSizeObserver.h", + "nsIReflowCallback.h", + "nsLayoutUtils.h", + "nsPresArena.h", + "nsPresArenaObjectList.h", + "nsPresContext.h", + "nsPresContextInlines.h", + "nsQuoteList.h", + "nsRefreshDriver.h", + "nsRefreshObservers.h", + "nsStyleChangeList.h", + "nsStyleSheetService.h", + "StackArena.h", + "TouchManager.h", + "Units.h", + "UnitTransforms.h", + "WordMovementType.h", + "ZoomConstraintsClient.h", ] EXPORTS.mozilla += [ - 'AccessibleCaretEventHub.h', - 'ArenaObjectID.h', - 'DisplayPortUtils.h', - 'GeckoMVMContext.h', - 'GeometryUtils.h', - 'MediaEmulationData.h', - 'MotionPathUtils.h', - 'MVMContext.h', - 'OverflowChangedTracker.h', - 'PositionedEventTargeting.h', - 'PresShell.h', - 'PresShellForwards.h', - 'PresShellInlines.h', - 'RelativeTo.h', - 'RestyleManager.h', - 'ScrollStyles.h', - 'ScrollTypes.h', - 'ShapeUtils.h', - 'StaticPresData.h', - 'SurfaceFromElementResult.h', - 'ViewportUtils.h', + "AccessibleCaretEventHub.h", + "ArenaObjectID.h", + "DisplayPortUtils.h", + "GeckoMVMContext.h", + "GeometryUtils.h", + "MediaEmulationData.h", + "MotionPathUtils.h", + "MVMContext.h", + "OverflowChangedTracker.h", + "PositionedEventTargeting.h", + "PresShell.h", + "PresShellForwards.h", + "PresShellInlines.h", + "RelativeTo.h", + "RestyleManager.h", + "ScrollStyles.h", + "ScrollTypes.h", + "ShapeUtils.h", + "StaticPresData.h", + "SurfaceFromElementResult.h", + "ViewportUtils.h", ] EXPORTS.mozilla.layout += [ - 'LayoutTelemetryTools.h', + "LayoutTelemetryTools.h", ] UNIFIED_SOURCES += [ - 'AccessibleCaret.cpp', - 'AccessibleCaretEventHub.cpp', - 'AccessibleCaretManager.cpp', - 'DisplayPortUtils.cpp', - 'GeckoMVMContext.cpp', - 'GeometryUtils.cpp', - 'LayoutLogging.cpp', - 'LayoutTelemetryTools.cpp', - 'MobileViewportManager.cpp', - 'MotionPathUtils.cpp', - 'nsBidi.cpp', - 'nsBidiPresUtils.cpp', - 'nsCaret.cpp', - 'nsCounterManager.cpp', - 'nsCSSColorUtils.cpp', - 'nsCSSFrameConstructor.cpp', - 'nsDocumentViewer.cpp', - 'nsFrameManager.cpp', - 'nsFrameTraversal.cpp', - 'nsGenConList.cpp', - 'nsLayoutDebugger.cpp', - 'nsLayoutHistoryState.cpp', - 'nsLayoutUtils.cpp', - 'nsPresArena.cpp', - 'nsPresContext.cpp', - 'nsQuoteList.cpp', - 'nsRefreshObservers.cpp', - 'nsStyleChangeList.cpp', - 'nsStyleSheetService.cpp', - 'PositionedEventTargeting.cpp', - 'PresShell.cpp', - 'RestyleManager.cpp', - 'ScrollStyles.cpp', - 'ShapeUtils.cpp', - 'StackArena.cpp', - 'StaticPresData.cpp', - 'TouchManager.cpp', - 'ViewportUtils.cpp', - 'ZoomConstraintsClient.cpp', + "AccessibleCaret.cpp", + "AccessibleCaretEventHub.cpp", + "AccessibleCaretManager.cpp", + "DisplayPortUtils.cpp", + "GeckoMVMContext.cpp", + "GeometryUtils.cpp", + "LayoutLogging.cpp", + "LayoutTelemetryTools.cpp", + "MobileViewportManager.cpp", + "MotionPathUtils.cpp", + "nsBidi.cpp", + "nsBidiPresUtils.cpp", + "nsCaret.cpp", + "nsCounterManager.cpp", + "nsCSSColorUtils.cpp", + "nsCSSFrameConstructor.cpp", + "nsDocumentViewer.cpp", + "nsFrameManager.cpp", + "nsFrameTraversal.cpp", + "nsGenConList.cpp", + "nsLayoutDebugger.cpp", + "nsLayoutHistoryState.cpp", + "nsLayoutUtils.cpp", + "nsPresArena.cpp", + "nsPresContext.cpp", + "nsQuoteList.cpp", + "nsRefreshObservers.cpp", + "nsStyleChangeList.cpp", + "nsStyleSheetService.cpp", + "PositionedEventTargeting.cpp", + "PresShell.cpp", + "RestyleManager.cpp", + "ScrollStyles.cpp", + "ShapeUtils.cpp", + "StackArena.cpp", + "StaticPresData.cpp", + "TouchManager.cpp", + "ViewportUtils.cpp", + "ZoomConstraintsClient.cpp", ] # nsRefreshDriver.cpp needs to be built separately because of name clashes in the OS X headers SOURCES += [ - 'nsRefreshDriver.cpp', + "nsRefreshDriver.cpp", ] -if CONFIG['ENABLE_TESTS']: - DIRS += ['gtest'] +if CONFIG["ENABLE_TESTS"]: + DIRS += ["gtest"] -include('/ipc/chromium/chromium-config.mozbuild') +include("/ipc/chromium/chromium-config.mozbuild") LOCAL_INCLUDES += [ - '../forms', - '../generic', - '../mathml', - '../painting', - '../printing', - '../style', - '../tables', - '../xul', - '../xul/tree/', - '/docshell/base', - '/dom/base', - '/dom/html', - '/dom/svg', - '/dom/xul', - '/view', + "../forms", + "../generic", + "../mathml", + "../painting", + "../printing", + "../style", + "../tables", + "../xul", + "../xul/tree/", + "/docshell/base", + "/dom/base", + "/dom/html", + "/dom/svg", + "/dom/xul", + "/view", ] -if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'android': +if CONFIG["MOZ_WIDGET_TOOLKIT"] == "android": LOCAL_INCLUDES += [ - '/widget/android', + "/widget/android", ] IPDL_SOURCES += [ - 'PresState.ipdlh', + "PresState.ipdlh", ] -FINAL_LIBRARY = 'xul' +FINAL_LIBRARY = "xul" -BROWSER_CHROME_MANIFESTS += ['tests/browser.ini'] -MARIONETTE_LAYOUT_MANIFESTS += ['tests/marionette/manifest.ini'] -MOCHITEST_MANIFESTS += ['tests/mochitest.ini'] -MOCHITEST_CHROME_MANIFESTS += ['tests/chrome/chrome.ini'] +BROWSER_CHROME_MANIFESTS += ["tests/browser.ini"] +MARIONETTE_LAYOUT_MANIFESTS += ["tests/marionette/manifest.ini"] +MOCHITEST_MANIFESTS += ["tests/mochitest.ini"] +MOCHITEST_CHROME_MANIFESTS += ["tests/chrome/chrome.ini"] -if CONFIG['CC_TYPE'] in ('clang', 'gcc'): - CXXFLAGS += ['-Wno-error=shadow'] +if CONFIG["CC_TYPE"] in ("clang", "gcc"): + CXXFLAGS += ["-Wno-error=shadow"] diff --git a/layout/base/tests/marionette/selection.py b/layout/base/tests/marionette/selection.py index aa6ed20ec7cc72..7cc64dfeea3483 100644 --- a/layout/base/tests/marionette/selection.py +++ b/layout/base/tests/marionette/selection.py @@ -14,7 +14,9 @@ def __init__(self, marionette): self._reset_action_chain() def _reset_action_chain(self): - self.mouse_chain = self.sequence("pointer", "pointer_id", {"pointerType": "mouse"}) + self.mouse_chain = self.sequence( + "pointer", "pointer_id", {"pointerType": "mouse"} + ) self.key_chain = self.sequence("key", "keyboard_id") def flick(self, element, x1, y1, x2, y2, duration=200): @@ -32,16 +34,15 @@ def flick(self, element, x1, y1, x2, y2, duration=200): """ rect = element.rect - el_x, el_y = rect['x'], rect['y'] + el_x, el_y = rect["x"], rect["y"] # Add element's (x, y) to make the coordinate relative to the viewport. from_x, from_y = int(el_x + x1), int(el_y + y1) to_x, to_y = int(el_x + x2), int(el_y + y2) - self.mouse_chain.pointer_move(from_x, from_y) \ - .pointer_down() \ - .pointer_move(to_x, to_y, duration=duration) \ - .pointer_up() + self.mouse_chain.pointer_move(from_x, from_y).pointer_down().pointer_move( + to_x, to_y, duration=duration + ).pointer_up() return self def send_keys(self, keys): @@ -67,7 +68,7 @@ def perform(self): class SelectionManager(object): - '''Interface for manipulating the selection and carets of the element. + """Interface for manipulating the selection and carets of the element. We call the blinking cursor (nsCaret) as cursor, and call AccessibleCaret as caret for short. @@ -80,17 +81,17 @@ class SelectionManager(object): sel = SelectionManager(element) sel.move_caret_to_front() - ''' + """ def __init__(self, element): self.element = element def _input_or_textarea(self): - '''Return True if element is either or