Remove all sources, note the new location for the code.
diff --git a/.flake8 b/.flake8
deleted file mode 100644
index a4360ed..0000000
--- a/.flake8
+++ /dev/null
@@ -1,3 +0,0 @@
-[flake8]
-ignore = W504
-exclude = ./debian,./src/work
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
deleted file mode 100644
index ca346f0..0000000
--- a/.github/workflows/build.yml
+++ /dev/null
@@ -1,38 +0,0 @@
-name: CI
-
-on:
-  create:
-    tags:
-  push:
-    branches:
-      - master
-  pull_request:
-
-jobs:
-  flake8:
-    runs-on: ubuntu-latest
-    steps:
-    - uses: actions/setup-python@v1
-      with:
-        python-version: '3.x'
-    - uses: actions/checkout@v1
-      with:
-        submodules: true
-    - name: pip install
-      run: pip install flake8==3.7.8
-    - run: flake8
-  build:
-    runs-on: ubuntu-latest
-    steps:
-    - uses: actions/setup-python@v1
-      with:
-        python-version: '3.x'
-    - uses: actions/checkout@v1
-      with:
-        submodules: true
-    - name: install dependencies
-      run: sudo apt-get --quiet install ninja-build devscripts
-    - name: build.py
-      run: ./src/build.py --sync-include=cmake,wabt --build-include=wabt,debian --no-test --no-host-clang --no-sysroot
-    - name: build.py with sysroot
-      run: ./src/build.py --sync-include=sysroot,cmake,binaryen --build-include=binaryen --no-test --no-host-clang
diff --git a/.gitignore b/.gitignore
deleted file mode 100644
index 8ac7faa..0000000
--- a/.gitignore
+++ /dev/null
@@ -1,9 +0,0 @@
-*~
-*.pyc
-src/work
-debian/wasm-toolchain.substvars
-debian/wasm-toolchain
-debian/wasm-toolchain.postinst.debhelper
-debian/wasm-toolchain.postrm.debhelper
-debian/wasm-toolchain.debhelper.log
-debian/files
diff --git a/.style.yapf b/.style.yapf
deleted file mode 100644
index 557fa7b..0000000
--- a/.style.yapf
+++ /dev/null
@@ -1,2 +0,0 @@
-[style]
-based_on_style = pep8
diff --git a/.vpython b/.vpython
deleted file mode 100644
index b177e44..0000000
--- a/.vpython
+++ /dev/null
@@ -1,47 +0,0 @@
-# This is a vpython "spec" file.
-#
-# It describes patterns for python wheel dependencies of the python scripts in
-# the chromium repo, particularly for dependencies that have compiled components
-# (since pure-python dependencies can be easily vendored into third_party).
-#
-# When vpython is invoked, it finds this file and builds a python VirtualEnv,
-# containing all of the dependencies described in this file, fetching them from
-# CIPD (the "Chrome Infrastructure Package Deployer" service). Unlike `pip`,
-# this never requires the end-user machine to have a working python extension
-# compilation environment. All of these packages are built using:
-#   https://chromium.googlesource.com/infra/infra/+/master/infra/tools/dockerbuild/
-#
-# All python scripts in the repo share this same spec, to avoid dependency
-# fragmentation.
-#
-# If you have depot_tools installed in your $PATH, you can invoke python scripts
-# in this repo by running them as you normally would run them, except
-# substituting `vpython` instead of `python` on the command line, e.g.:
-#   vpython path/to/script.py some --arguments
-#
-# Read more about `vpython` and how to modify this file here:
-#   https://chromium.googlesource.com/infra/infra/+/master/doc/users/vpython.md
-
-python_version: "3.8"
-
-wheel: <
-  name: "infra/python/wheels/requests-py2_py3"
-  version: "version:2.21.0"
->
-
-wheel: <
-  name: "infra/python/wheels/urllib3-py2_py3"
-  version: "version:1.24.3"
->
-wheel: <
-  name: "infra/python/wheels/certifi-py2_py3"
-  version: "version:2019.3.9"
->
-wheel: <
-  name: "infra/python/wheels/chardet-py2_py3"
-  version: "version:3.0.4"
->
-wheel: <
-  name: "infra/python/wheels/idna-py2_py3"
-  version: "version:2.8"
->
diff --git a/Contributing.md b/Contributing.md
deleted file mode 100644
index 1cc607f..0000000
--- a/Contributing.md
+++ /dev/null
@@ -1,8 +0,0 @@
-# Contributing to WebAssembly
-
-Interested in participating? Please follow
-[the same contributing guidelines as the design repository][].
-
-  [the same contributing guidelines as the design repository]: https://github.com/WebAssembly/design/blob/master/Contributing.md
-
-Also, please be sure to read [the README.md](README.md) for this repository.
diff --git a/LICENSE b/LICENSE
deleted file mode 100644
index 8f71f43..0000000
--- a/LICENSE
+++ /dev/null
@@ -1,202 +0,0 @@
-                                 Apache License
-                           Version 2.0, January 2004
-                        http://www.apache.org/licenses/
-
-   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
-   1. Definitions.
-
-      "License" shall mean the terms and conditions for use, reproduction,
-      and distribution as defined by Sections 1 through 9 of this document.
-
-      "Licensor" shall mean the copyright owner or entity authorized by
-      the copyright owner that is granting the License.
-
-      "Legal Entity" shall mean the union of the acting entity and all
-      other entities that control, are controlled by, or are under common
-      control with that entity. For the purposes of this definition,
-      "control" means (i) the power, direct or indirect, to cause the
-      direction or management of such entity, whether by contract or
-      otherwise, or (ii) ownership of fifty percent (50%) or more of the
-      outstanding shares, or (iii) beneficial ownership of such entity.
-
-      "You" (or "Your") shall mean an individual or Legal Entity
-      exercising permissions granted by this License.
-
-      "Source" form shall mean the preferred form for making modifications,
-      including but not limited to software source code, documentation
-      source, and configuration files.
-
-      "Object" form shall mean any form resulting from mechanical
-      transformation or translation of a Source form, including but
-      not limited to compiled object code, generated documentation,
-      and conversions to other media types.
-
-      "Work" shall mean the work of authorship, whether in Source or
-      Object form, made available under the License, as indicated by a
-      copyright notice that is included in or attached to the work
-      (an example is provided in the Appendix below).
-
-      "Derivative Works" shall mean any work, whether in Source or Object
-      form, that is based on (or derived from) the Work and for which the
-      editorial revisions, annotations, elaborations, or other modifications
-      represent, as a whole, an original work of authorship. For the purposes
-      of this License, Derivative Works shall not include works that remain
-      separable from, or merely link (or bind by name) to the interfaces of,
-      the Work and Derivative Works thereof.
-
-      "Contribution" shall mean any work of authorship, including
-      the original version of the Work and any modifications or additions
-      to that Work or Derivative Works thereof, that is intentionally
-      submitted to Licensor for inclusion in the Work by the copyright owner
-      or by an individual or Legal Entity authorized to submit on behalf of
-      the copyright owner. For the purposes of this definition, "submitted"
-      means any form of electronic, verbal, or written communication sent
-      to the Licensor or its representatives, including but not limited to
-      communication on electronic mailing lists, source code control systems,
-      and issue tracking systems that are managed by, or on behalf of, the
-      Licensor for the purpose of discussing and improving the Work, but
-      excluding communication that is conspicuously marked or otherwise
-      designated in writing by the copyright owner as "Not a Contribution."
-
-      "Contributor" shall mean Licensor and any individual or Legal Entity
-      on behalf of whom a Contribution has been received by Licensor and
-      subsequently incorporated within the Work.
-
-   2. Grant of Copyright License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      copyright license to reproduce, prepare Derivative Works of,
-      publicly display, publicly perform, sublicense, and distribute the
-      Work and such Derivative Works in Source or Object form.
-
-   3. Grant of Patent License. Subject to the terms and conditions of
-      this License, each Contributor hereby grants to You a perpetual,
-      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
-      (except as stated in this section) patent license to make, have made,
-      use, offer to sell, sell, import, and otherwise transfer the Work,
-      where such license applies only to those patent claims licensable
-      by such Contributor that are necessarily infringed by their
-      Contribution(s) alone or by combination of their Contribution(s)
-      with the Work to which such Contribution(s) was submitted. If You
-      institute patent litigation against any entity (including a
-      cross-claim or counterclaim in a lawsuit) alleging that the Work
-      or a Contribution incorporated within the Work constitutes direct
-      or contributory patent infringement, then any patent licenses
-      granted to You under this License for that Work shall terminate
-      as of the date such litigation is filed.
-
-   4. Redistribution. You may reproduce and distribute copies of the
-      Work or Derivative Works thereof in any medium, with or without
-      modifications, and in Source or Object form, provided that You
-      meet the following conditions:
-
-      (a) You must give any other recipients of the Work or
-          Derivative Works a copy of this License; and
-
-      (b) You must cause any modified files to carry prominent notices
-          stating that You changed the files; and
-
-      (c) You must retain, in the Source form of any Derivative Works
-          that You distribute, all copyright, patent, trademark, and
-          attribution notices from the Source form of the Work,
-          excluding those notices that do not pertain to any part of
-          the Derivative Works; and
-
-      (d) If the Work includes a "NOTICE" text file as part of its
-          distribution, then any Derivative Works that You distribute must
-          include a readable copy of the attribution notices contained
-          within such NOTICE file, excluding those notices that do not
-          pertain to any part of the Derivative Works, in at least one
-          of the following places: within a NOTICE text file distributed
-          as part of the Derivative Works; within the Source form or
-          documentation, if provided along with the Derivative Works; or,
-          within a display generated by the Derivative Works, if and
-          wherever such third-party notices normally appear. The contents
-          of the NOTICE file are for informational purposes only and
-          do not modify the License. You may add Your own attribution
-          notices within Derivative Works that You distribute, alongside
-          or as an addendum to the NOTICE text from the Work, provided
-          that such additional attribution notices cannot be construed
-          as modifying the License.
-
-      You may add Your own copyright statement to Your modifications and
-      may provide additional or different license terms and conditions
-      for use, reproduction, or distribution of Your modifications, or
-      for any such Derivative Works as a whole, provided Your use,
-      reproduction, and distribution of the Work otherwise complies with
-      the conditions stated in this License.
-
-   5. Submission of Contributions. Unless You explicitly state otherwise,
-      any Contribution intentionally submitted for inclusion in the Work
-      by You to the Licensor shall be under the terms and conditions of
-      this License, without any additional terms or conditions.
-      Notwithstanding the above, nothing herein shall supersede or modify
-      the terms of any separate license agreement you may have executed
-      with Licensor regarding such Contributions.
-
-   6. Trademarks. This License does not grant permission to use the trade
-      names, trademarks, service marks, or product names of the Licensor,
-      except as required for reasonable and customary use in describing the
-      origin of the Work and reproducing the content of the NOTICE file.
-
-   7. Disclaimer of Warranty. Unless required by applicable law or
-      agreed to in writing, Licensor provides the Work (and each
-      Contributor provides its Contributions) on an "AS IS" BASIS,
-      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
-      implied, including, without limitation, any warranties or conditions
-      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
-      PARTICULAR PURPOSE. You are solely responsible for determining the
-      appropriateness of using or redistributing the Work and assume any
-      risks associated with Your exercise of permissions under this License.
-
-   8. Limitation of Liability. In no event and under no legal theory,
-      whether in tort (including negligence), contract, or otherwise,
-      unless required by applicable law (such as deliberate and grossly
-      negligent acts) or agreed to in writing, shall any Contributor be
-      liable to You for damages, including any direct, indirect, special,
-      incidental, or consequential damages of any character arising as a
-      result of this License or out of the use or inability to use the
-      Work (including but not limited to damages for loss of goodwill,
-      work stoppage, computer failure or malfunction, or any and all
-      other commercial damages or losses), even if such Contributor
-      has been advised of the possibility of such damages.
-
-   9. Accepting Warranty or Additional Liability. While redistributing
-      the Work or Derivative Works thereof, You may choose to offer,
-      and charge a fee for, acceptance of support, warranty, indemnity,
-      or other liability obligations and/or rights consistent with this
-      License. However, in accepting such obligations, You may act only
-      on Your own behalf and on Your sole responsibility, not on behalf
-      of any other Contributor, and only if You agree to indemnify,
-      defend, and hold each Contributor harmless for any liability
-      incurred by, or claims asserted against, such Contributor by reason
-      of your accepting any such warranty or additional liability.
-
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "{}"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright {yyyy} {name of copyright owner}
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
-
diff --git a/README.md b/README.md
index 837d610..8cc6e4e 100644
--- a/README.md
+++ b/README.md
@@ -1,71 +1,10 @@
-# Waterfall
+## ༼ ༎ຶ ෴ ༎ຶ༽ This code has moved
 
-## ༼ ༎ຶ ෴ ༎ຶ༽ If it’s not tested, it’s already broken.
-
-Luckily, this repository has some tests: [![Build Status](https://travis-ci.org/WebAssembly/waterfall.svg?branch=master)](https://travis-ci.org/WebAssembly/waterfall)
-
-# What's this?
-
-This repository holds the code which make the WebAssembly waterfall's heart
-beat. You may want to see [the waterfall][] in action, and if you don't like
-what you see you may even want to [contribute](Contributing.md).
-
-  [the waterfall]: https://wasm-stat.us
-
-# What's a waterfall?
-
-WebAssembly has many moving parts (implementations, tools, tests, etc) and no
-central owner. All of these parts have have their own owners, priorities, and
-tests (which include WebAssembly as well as others).  A build and test waterfall
-allows us to test the interactions between these components. It helps us:
-
-* Have simple build instructions for each component.
-* Archive build logs and build artifacts.
-* Identify which build artifacts are known-good.
-* Know which tests matter.
-* Make tests easily executable.
-* Know which configurations matter (build flavor, host OS, host architecture,
-  ...).
-* Cause inadvertent breakage less often.
-* When breakage occurs, identify it quickly and reverted / silenced / fixed
-  easily.
-* When a big change is required, know which moving parts should synchronize.
-* Make the feature implementation status straightforward to check for each
-  component.
-
-We should keep process to a minimum, try things out, see what works.
-
-# How do I run it?
-
-1. Get the sources: `$ git clone https://github.com/WebAssembly/waterfall.git`
-2. Install `depot_tools`. Follow [the instructions](https://commondatastorage.googleapis.com/chrome-infra-docs/flat/depot_tools/docs/html/depot_tools_tutorial.html#_setting_up)
-3. Install `pkg-config` if you don't have it installed already, e.g. `# apt install pkg-config`
-4. Run build.py `python src/build.py`
-
-Build.py has 3 types of actions:
-* downloading/updating sources for tools and engines (sync)
-* building those sources (build)
-* running tests against them (test).
-
-Each of these types has multiple steps (e.g. a build step for each component).
-If you run build.py with no arguments, it will run all the sync, build, and test
-steps. If you make a change and only want to run a subset of steps, you can
-apply filters from the command line, via exclusions (to prevent specified steps
-from running) or inclusions (to run only the specified steps). Sync, build, and
-test exclusions are specified separately.  For example:
-
-1. Do not sync any sources, build everything except LLVM, and run all tests:
-  `$ src/build.py --no-sync --build-exclude=llvm`
-2. Sync only WABT, build WABT and Binaryen, run everything other than the
-   emscripten testsuites:
-  `$ src/build.py --sync-include=wabt --build-include=wabt,binaryen --test-exclude=emtest,emtest-asm`
-
-The script should throw an error if you specify nonexistent steps or if you
-specify both includes and excludes for the same type of action.
-
-When run, the script creates a directory `src/work` inside the waterfall's git
-checkout. All modifications are made inside this directory (checking and out and
-building the sources, as well as the test builds and execution results). You can
-also use the git checkouts (e.g. `src/work/llvm`) with your own branches; the
-sync steps will check out the latest revision from the script's remote
-repositories but will not overwrite or destroy any local work.
+This repository contained build scripts for building WebAssembly toolchain
+components (including LLVM, Binaryen, and Emscripten). Eventually it was
+only used for the "emscripten-releases" builders used to build binary
+distributions for EMSDK on top of Chromium's infrastructure. So, we
+have moved it a Chromium repository for ease of development. You can
+find it in the `build/` subdirectory of the
+[emscripten-releases](https://chromium.googlesource.com/emscripten-releases/)
+repository.
diff --git a/__init__.py b/__init__.py
deleted file mode 100755
index 96bbb5f..0000000
--- a/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-# Empty __init__.py file: Python treats the directory as containing a package.
diff --git a/debian/README.Debian b/debian/README.Debian
deleted file mode 100644
index a1caba7..0000000
--- a/debian/README.Debian
+++ /dev/null
@@ -1,27 +0,0 @@
-wasm-toolchain for Debian
--------------------------
-
-This is a binary-only package produced by the wasm waterfall which
-is hosted at https://wasm-stat.us/.  The code that runs the build
-is hosted at https://github.com/WebAssembly/waterfall.
-
-This package includes the following components:
-
- - upstream llvm including wasm backend (/opt/wasm/llvm)
- - emscripten (/opt/wasm/emscripten)
- - binaryen (/opt/wasm/bin/ /opt/wasm/src/js/)
- - wabt (/opt/wasm/bin/)
- - emscripten launcher scripts (/opt/wasm/bin/emcc + em++)
-
-The versions of each of the source package that were used to produce
-this package are documented in /opt/wasm/buildinfo.json.
-
-As well as installing files under /opt/wasm the package also uses
-the debian alternatives mechanism to install /usr/bin/emcc and
-/usr/bin/em++ which point to the scripts in /opt/wasm/bin/emcc.
-
-These wrapper scripts allow emcc and em++ to be used without any
-EM_CONFIG or ~/.emscripten (i.e. they provide a pre-configured
-version of the toolchain).
-
- -- Sam Clegg <[email protected]>  Fri, 23 Sep 2016 10:57:11 -0700
diff --git a/debian/changelog b/debian/changelog
deleted file mode 100644
index 316161c..0000000
--- a/debian/changelog
+++ /dev/null
@@ -1,5 +0,0 @@
-wasm-toolchain (0.1) unstable; urgency=low
-
-  * Initial Release.
-
- -- Sam Clegg <[email protected]>  Fri, 23 Sep 2016 10:57:11 -0700
diff --git a/debian/compat b/debian/compat
deleted file mode 100644
index ec63514..0000000
--- a/debian/compat
+++ /dev/null
@@ -1 +0,0 @@
-9
diff --git a/debian/control b/debian/control
deleted file mode 100644
index 0730cd0..0000000
--- a/debian/control
+++ /dev/null
@@ -1,16 +0,0 @@
-Source: wasm-toolchain
-Section: devel
-Priority: optional
-Maintainer: Sam Clegg <[email protected]>
-Build-Depends: debhelper (>= 8.0.0)
-Standards-Version: 3.9.4
-Homepage: https://github.com/WebAssembly
-Vcs-Git: https://github.com/WebAssembly/waterfall.git
-Vcs-Browser: https://github.com/WebAssembly/waterfall
-
-Package: wasm-toolchain
-Architecture: any
-Depends: ${shlibs:Depends}, ${misc:Depends}, ${perl:Depends}, python (>= 2.6)
-Description: Web Assembly Toolchain
- Toolchain for working with and producing Web Assembly
- files.
diff --git a/debian/docs b/debian/docs
deleted file mode 100644
index b43bf86..0000000
--- a/debian/docs
+++ /dev/null
@@ -1 +0,0 @@
-README.md
diff --git a/debian/postinst b/debian/postinst
deleted file mode 100644
index 3b777b5..0000000
--- a/debian/postinst
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-set -e
-
-for x in emcc em++ emconfigure emmake wasm2wat wat2wasm wasm-objdump \
-         wasm-interp wasm-opt wasm-as wasm-dis wasm-shell wasm d8; do
-  update-alternatives --install /usr/bin/$x $x /opt/wasm/bin/$x 10
-done
diff --git a/debian/prerm b/debian/prerm
deleted file mode 100644
index 97cb912..0000000
--- a/debian/prerm
+++ /dev/null
@@ -1,7 +0,0 @@
-#!/bin/sh
-set -e
-
-for x in emcc em++ emconfigure emmake wasm2wat wat2wasm wasm-objdump \
-         wasm-interp wasm-opt wasm-as wasm-dis wasm-shell wasm d8; do
-  update-alternatives --remove $x /opt/wasm/bin/$x
-done
diff --git a/debian/rules b/debian/rules
deleted file mode 100755
index 19b232d..0000000
--- a/debian/rules
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/make -f
-# -*- makefile -*-
-
-# Uncomment this to turn on verbose mode.
-#export DH_VERBOSE=1
-
-%:
-	dh $@
-
-PACKAGE = $(shell dh_listpackages)
-TMP     = $(CURDIR)/debian/$(PACKAGE)
-PREFIX  = /opt/wasm
-DESTDIR = $(TMP)$(PREFIX)
-
-override_dh_update_autotools_config:
-
-override_dh_auto_install:
-	mkdir -p debian/wasm-toolchain/opt/wasm
-	cp -ar src/work/wasm-install/* $(DESTDIR)
-	find $(DESTDIR) -name "*.pyc" -exec rm "{}" \;
-	rm -rf $(DESTDIR)/emscripten/tests
-	cp src/emscripten_config* debian/wasm-toolchain/opt/wasm
-	rm -f debian/wasm-toolchain/opt/wasm/emscripten_config*sanity*
-	perl -pi -e 's/{{WASM_INSTALL}}/\/opt\/wasm/' debian/wasm-toolchain/opt/wasm/emscripten_config*
-
-override_dh_strip:
-	# Don't try to strip any library containing wasm object files because
-	# the native tools can't understand them and end up removing the ar
-	# indexes
-	dh_strip -Xopt/wasm/sysroot -Xwasm32.a
diff --git a/debian/source/format b/debian/source/format
deleted file mode 100644
index 89ae9db..0000000
--- a/debian/source/format
+++ /dev/null
@@ -1 +0,0 @@
-3.0 (native)
diff --git a/setup.cfg b/setup.cfg
deleted file mode 100644
index 50b0627..0000000
--- a/setup.cfg
+++ /dev/null
@@ -1,5 +0,0 @@
-[pep8]
-ignore = E111,E114
-[flake8]
-ignore = E111,E114
-exclude = work/*,src/work/*
diff --git a/src/Wasi.cmake b/src/Wasi.cmake
deleted file mode 100644
index 0735c4d..0000000
--- a/src/Wasi.cmake
+++ /dev/null
@@ -1,37 +0,0 @@
-# WACK (WebAssembly Clang Kit) is an experimental WebAssembly standalone
-# toolchain. It is used on the build waterfall for testing various toolchain
-# components (e.g. clang, LLVM, lld) and engines (e.g. V8 and JSC) but is not
-# an official or productionized tool.
-
-
-# Set up the CMake include path to find the WACK platform file. Following the
-# same convention as the CMake distribution suppresses noisy CMake warnings:
-# "System is unknown to cmake"
-# Module path modification can't be done from the command line, so this file
-# exists to do that.
-
-set(WASM_SDKROOT ${CMAKE_CURRENT_LIST_DIR})
-list(APPEND CMAKE_MODULE_PATH "${WASM_SDKROOT}/cmake/Modules")
-
-# This has to go before we set CMAKE_SYSTEM_NAME because the default c compiler
-# gets set before the platform file is included
-if (CMAKE_HOST_WIN32)
-  set(EXE_SUFFIX ".exe")
-else()
-  set(EXE_SUFFIX "")
-endif()
-if ("${CMAKE_C_COMPILER}" STREQUAL "")
-  set(CMAKE_C_COMPILER ${WASM_SDKROOT}/bin/wasm32-wasi-clang${EXE_SUFFIX})
-endif()
-if ("${CMAKE_CXX_COMPILER}" STREQUAL "")
-  set(CMAKE_CXX_COMPILER ${WASM_SDKROOT}/bin/wasm32-wasi-clang++${EXE_SUFFIX})
-endif()
-if ("${CMAKE_AR}" STREQUAL "")
-  set(CMAKE_AR ${WASM_SDKROOT}/bin/llvm-ar${EXE_SUFFIX} CACHE FILEPATH "llvm ar")
-endif()
-if ("${CMAKE_RANLIB}" STREQUAL "")
- set(CMAKE_RANLIB ${WASM_SDKROOT}/bin/llvm-ranlib${EXE_SUFFIX} CACHE FILEPATH "llvm ranlib")
-endif()
-
-# Include the platform file
-set(CMAKE_SYSTEM_NAME Wasi)
diff --git a/src/__init__.py b/src/__init__.py
deleted file mode 100755
index 96bbb5f..0000000
--- a/src/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-# Empty __init__.py file: Python treats the directory as containing a package.
diff --git a/src/build.py b/src/build.py
deleted file mode 100755
index 70d3a3e..0000000
--- a/src/build.py
+++ /dev/null
@@ -1,2018 +0,0 @@
-#!/usr/bin/env python3
-# -*- coding: utf-8 -*-
-
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import argparse
-import glob
-import json
-import multiprocessing
-import os
-import shutil
-import sys
-import tarfile
-import tempfile
-import textwrap
-import time
-import traceback
-import zipfile
-
-import buildbot
-import cloud
-import compile_torture_tests
-import execute_files
-from file_util import Chdir, CopyTree, Mkdir, Remove
-import host_toolchains
-import link_assembly_files
-import proc
-import testing
-import work_dirs
-from urllib.request import urlopen, URLError
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-JSVU_OUT_DIR = os.path.expanduser(os.path.join('~', '.jsvu'))
-
-# This file has a special path to avoid warnings about the system being unknown
-CMAKE_TOOLCHAIN_FILE = 'Wasi.cmake'
-
-EMSCRIPTEN_CONFIG_UPSTREAM = 'emscripten_config_upstream'
-RELEASE_DEPS_FILE = 'DEPS.tagged-release'
-
-# Avoid flakes: use cached repositories to avoid relying on external network.
-GIT_MIRROR_BASE = 'https://chromium.googlesource.com/'
-GITHUB_MIRROR_BASE = GIT_MIRROR_BASE + 'external/github.com/'
-WASM_GIT_BASE = GITHUB_MIRROR_BASE + 'WebAssembly/'
-EMSCRIPTEN_GIT_BASE = 'https://github.com/emscripten-core/'
-LLVM_GIT_BASE = 'https://github.com/llvm/'
-
-# Name of remote for build script to use. Don't touch origin to avoid
-# clobbering any local development.
-WATERFALL_REMOTE = '_waterfall'
-
-WASM_STORAGE_BASE = 'https://wasm.storage.googleapis.com/'
-
-GNUWIN32_ZIP = 'gnuwin32.zip'
-
-# This version is the current LLVM version in development. This needs to be
-# manually updated to the latest x.0.0 version whenever LLVM starts development
-# on a new major version. This is so our manual build of compiler-rt is put
-# where LLVM expects it.
-LLVM_VERSION = '12.0.0'
-
-# Update this number each time you want to create a clobber build.  If the
-# clobber_version.txt file in the build dir doesn't match we remove ALL work
-# dirs.  This works like a simpler version of chromium's landmine feature.
-CLOBBER_BUILD_TAG = 23
-
-V8_BUILD_SUBDIR = os.path.join('out.gn', 'x64.release')
-
-LINUX_SYSROOT = 'sysroot_debian_stretch_amd64'
-LINUX_SYSROOT_URL = WASM_STORAGE_BASE + LINUX_SYSROOT + '_v2.tar.xz'
-
-options = None
-
-
-def GccTestDir():
-    return GetSrcDir('gcc', 'gcc', 'testsuite')
-
-
-def GetBuildDir(*args):
-    return os.path.join(work_dirs.GetBuild(), *args)
-
-
-def GetPrebuilt(*args):
-    return os.path.join(work_dirs.GetPrebuilt(), *args)
-
-
-def GetPrebuiltClang(binary):
-    return os.path.join(work_dirs.GetV8(), 'third_party', 'llvm-build',
-                        'Release+Asserts', 'bin', binary)
-
-
-def GetSrcDir(*args):
-    return os.path.join(work_dirs.GetSync(), *args)
-
-
-def GetInstallDir(*args):
-    return os.path.join(work_dirs.GetInstall(), *args)
-
-
-def GetTestDir(*args):
-    return os.path.join(work_dirs.GetTest(), *args)
-
-
-def GetLLVMSrcDir(*args):
-    return GetSrcDir('llvm-project', *args)
-
-
-def IsWindows():
-    return sys.platform == 'win32'
-
-
-def IsLinux():
-    return sys.platform.startswith('linux')
-
-
-def IsMac():
-    return sys.platform == 'darwin'
-
-
-def Executable(name, extension='.exe'):
-    return name + extension if IsWindows() else name
-
-
-def WindowsFSEscape(path):
-    return os.path.normpath(path).replace('\\', '/')
-
-
-# Use prebuilt Node.js because the buildbots don't have node preinstalled
-NODE_VERSION = '12.18.1'
-NODE_BASE_NAME = 'node-v' + NODE_VERSION + '-'
-
-
-def NodePlatformName():
-    return {
-        'darwin': 'darwin-x64',
-        'linux': 'linux-x64',
-        'linux2': 'linux-x64',
-        'win32': 'win-x64'
-    }[sys.platform]
-
-
-def NodeBinDir():
-    node_subdir = NODE_BASE_NAME + NodePlatformName()
-    if IsWindows():
-        return GetPrebuilt(node_subdir)
-    return GetPrebuilt(node_subdir, 'bin')
-
-
-def NodeBin():
-    return Executable(os.path.join(NodeBinDir(), 'node'))
-
-
-def CMakePlatformName():
-    return {
-        'linux': 'Linux',
-        'linux2': 'Linux',
-        'darwin': 'Darwin',
-        'win32': 'win64'
-    }[sys.platform]
-
-
-def CMakeArch():
-    return 'x64' if IsWindows() else 'x86_64'
-
-
-PREBUILT_CMAKE_VERSION = '3.15.3'
-PREBUILT_CMAKE_BASE_NAME = 'cmake-%s-%s-%s' % (
-    PREBUILT_CMAKE_VERSION, CMakePlatformName(), CMakeArch())
-
-
-def PrebuiltCMakeDir(*args):
-    return GetPrebuilt(PREBUILT_CMAKE_BASE_NAME, *args)
-
-
-def PrebuiltCMakeBin():
-    if IsMac():
-        bin_dir = os.path.join('CMake.app', 'Contents', 'bin')
-    else:
-        bin_dir = 'bin'
-    return PrebuiltCMakeDir(bin_dir, 'cmake')
-
-
-def BuilderPlatformName():
-    return {
-        'linux': 'linux',
-        'linux2': 'linux',
-        'darwin': 'mac',
-        'win32': 'windows'
-    }[sys.platform]
-
-
-def D8Bin():
-    if IsMac():
-        return os.path.join(JSVU_OUT_DIR, 'v8')
-    return Executable(GetInstallDir('bin', 'd8'))
-
-
-# Java installed in the buildbots are too old while emscripten uses closure
-# compiler that requires Java SE 8.0 (version 52) or above
-JAVA_VERSION = '9.0.1'
-
-
-def JavaDir():
-    outdir = GetPrebuilt('jre-' + JAVA_VERSION)
-    if IsMac():
-        outdir += '.jre'
-    return outdir
-
-
-def JavaBin():
-    if IsMac():
-        bin_dir = os.path.join('Contents', 'Home', 'bin')
-    else:
-        bin_dir = 'bin'
-    return Executable(os.path.join(JavaDir(), bin_dir, 'java'))
-
-
-# Known failures.
-IT_IS_KNOWN = 'known_gcc_test_failures.txt'
-ASM2WASM_KNOWN_TORTURE_COMPILE_FAILURES = [
-    os.path.join(SCRIPT_DIR, 'test', 'asm2wasm_compile_' + IT_IS_KNOWN)
-]
-EMWASM_KNOWN_TORTURE_COMPILE_FAILURES = [
-    os.path.join(SCRIPT_DIR, 'test', 'emwasm_compile_' + IT_IS_KNOWN)
-]
-
-RUN_KNOWN_TORTURE_FAILURES = [
-    os.path.join(SCRIPT_DIR, 'test', 'run_' + IT_IS_KNOWN)
-]
-LLD_KNOWN_TORTURE_FAILURES = [
-    os.path.join(SCRIPT_DIR, 'test', 'lld_' + IT_IS_KNOWN)
-]
-
-# Exclusions (known failures are compiled and run, and expected to fail,
-# whereas exclusions are not even run, e.g. because they have UB which
-# results in infinite loops)
-LLVM_TORTURE_EXCLUSIONS = [
-    os.path.join(SCRIPT_DIR, 'test', 'llvm_torture_exclusions')
-]
-
-RUN_LLVM_TESTSUITE_FAILURES = [
-    os.path.join(SCRIPT_DIR, 'test', 'llvmtest_known_failures.txt')
-]
-
-# Optimization levels
-BARE_TEST_OPT_FLAGS = ['O0', 'O2']
-EMSCRIPTEN_TEST_OPT_FLAGS = ['O0', 'O3']
-
-NPROC = multiprocessing.cpu_count()
-
-if IsMac():
-    # Experimental temp fix for crbug.com/829034 stdout write sometimes fails
-    from fcntl import fcntl, F_GETFL, F_SETFL
-    fd = sys.stdout.fileno()
-    flags = fcntl(fd, F_GETFL)
-    fcntl(fd, F_SETFL, flags & ~os.O_NONBLOCK)
-
-# Pin the GCC revision so that new torture tests don't break the bot. This
-# should be manually updated when convenient.
-GCC_REVISION = 'b6125c702850488ac3bfb1079ae5c9db89989406'
-GCC_CLONE_DEPTH = 1000
-
-g_should_use_lto = None
-
-
-def ShouldUseLTO():
-    if options.use_lto == 'auto':
-        # Avoid shelling out to git (via RevisionModifiesFile) more than once.
-        global g_should_use_lto
-        if g_should_use_lto is None:
-            g_should_use_lto = RevisionModifiesFile(
-                GetSrcDir(RELEASE_DEPS_FILE))
-        return g_should_use_lto
-    return options.use_lto == 'true'
-
-
-def CopyBinaryToArchive(binary, prefix=''):
-    """All binaries are archived in the same tar file."""
-    install_bin = GetInstallDir(prefix, 'bin')
-    print('Copying binary %s to archive %s' % (binary, install_bin))
-    Mkdir(install_bin)
-    shutil.copy2(binary, install_bin)
-
-
-def CopyLibraryToArchive(library, prefix=''):
-    """All libraries are archived in the same tar file."""
-    install_lib = GetInstallDir(prefix, 'lib')
-    print('Copying library %s to archive %s' % (library, install_lib))
-    Mkdir(install_lib)
-    shutil.copy2(library, install_lib)
-
-
-def CopyLibraryToSysroot(library):
-    """All libraries are archived in the same tar file."""
-    install_lib = GetInstallDir('sysroot', 'lib', 'wasm32-wasi')
-    print('Copying library %s to archive %s' % (library, install_lib))
-    Mkdir(install_lib)
-    shutil.copy2(library, install_lib)
-
-
-def Archive(directory, print_content=False):
-    """Create an archive file from directory."""
-    # Use the format "native" to the platform
-    if IsWindows():
-        archive = Zip(directory, print_content)
-    else:
-        archive = Tar(directory, print_content)
-    print('Archive created: %s [%s]' % (archive, os.path.getsize(archive)))
-    return archive
-
-
-def Tar(directory, print_content=False):
-    assert os.path.isdir(directory), 'Must tar a directory to avoid tarbombs'
-    up_directory, basename = os.path.split(directory)
-    tar = os.path.join(up_directory, basename + '.tbz2')
-    Remove(tar)
-    if print_content:
-        proc.check_call(
-            ['find', basename, '-type', 'f', '-exec', 'ls', '-lhS', '{}', '+'],
-            cwd=up_directory)
-    proc.check_call(['tar', 'cjf', tar, basename], cwd=up_directory)
-    proc.check_call(['ls', '-lh', tar], cwd=up_directory)
-    return tar
-
-
-def Zip(directory, print_content=False):
-    assert os.path.isdir(directory), 'Must be a directory'
-    dirname, basename = os.path.split(directory)
-    archive = os.path.join(dirname, basename + '.zip')
-    print('Creating zip archive', archive)
-    with zipfile.ZipFile(archive, 'w', zipfile.ZIP_DEFLATED) as z:
-        for root, dirs, files in os.walk(directory):
-            for name in files:
-                fs_path = os.path.join(root, name)
-                zip_path = os.path.relpath(fs_path, os.path.dirname(directory))
-                if print_content:
-                    print('Adding', fs_path)
-                z.write(fs_path, zip_path)
-    print('Size:', os.stat(archive).st_size)
-    return archive
-
-
-def UploadFile(local_name, remote_name):
-    """Archive the file with the given name, and with the LLVM git hash."""
-    if not buildbot.IsUploadingBot():
-        return
-    buildbot.Link(
-        'download',
-        cloud.Upload(
-            local_name, '%s/%s/%s' %
-            (buildbot.BuilderName(), buildbot.BuildNumber(), remote_name)))
-
-
-def UploadArchive(name, archive):
-    """Archive the tar/zip file with the given name and the build number."""
-    if not buildbot.IsUploadingBot():
-        return
-    extension = os.path.splitext(archive)[1]
-    UploadFile(archive, 'wasm-%s%s' % (name, extension))
-
-
-# Repo and subproject utilities
-
-
-def GitRemoteUrl(cwd, remote):
-    """Get the URL of a remote."""
-    return proc.check_output(
-        ['git', 'config', '--get', 'remote.%s.url' % remote],
-        cwd=cwd).strip()
-
-
-def RemoteBranch(branch):
-    """Get the remote-qualified branch name to use for waterfall"""
-    return WATERFALL_REMOTE + '/' + branch
-
-
-def GitUpdateRemote(src_dir, git_repo, remote_name):
-    try:
-        proc.check_call(['git', 'remote', 'set-url', remote_name, git_repo],
-                        cwd=src_dir)
-    except proc.CalledProcessError:
-        # If proc.check_call fails it throws an exception. 'git remote set-url'
-        # fails when the remote doesn't exist, so we should try to add it.
-        proc.check_call(['git', 'remote', 'add', remote_name, git_repo],
-                        cwd=src_dir)
-
-
-class Filter(object):
-    """Filter for source or build rules, to allow including or excluding only
-     selected targets.
-    """
-    def __init__(self, name=None, include=None, exclude=None):
-        """
-        include:
-          if present, only items in it will be included (if empty, nothing will
-          be included).
-        exclude:
-          if present, items in it will be excluded.
-          include ane exclude cannot both be present.
-        """
-        if include and exclude:
-            raise Exception(
-                'Filter cannot include both include and exclude rules')
-
-        self.name = name
-        self.include = include
-        self.exclude = exclude
-
-    def Apply(self, targets):
-        """Return the filtered list of targets."""
-        all_names = [t.name for t in targets]
-        specified_names = self.include or self.exclude or []
-        missing_names = [i for i in specified_names if i not in all_names]
-        if missing_names:
-            raise Exception('Invalid step name(s): {0}\n\n'
-                            'Valid {1} steps:\n{2}'.format(
-                                missing_names, self.name,
-                                TextWrapNameList(prefix='', items=targets)))
-
-        return [t for t in targets if self.Check(t.name)]
-
-    def Check(self, target):
-        """Return true if the specified target will be run."""
-        if self.include is not None:
-            return target in self.include
-
-        if self.exclude is not None:
-            return target not in self.exclude
-        return True
-
-    def All(self):
-        """Return true if all possible targets will be run."""
-        return self.include is None and not self.exclude
-
-    def Any(self):
-        """Return true if any targets can be run."""
-        return self.include is None or len(self.include)
-
-
-class Source(object):
-    """Metadata about a sync-able source repo on the waterfall"""
-    def __init__(self, name, src_dir, git_repo,
-                 checkout=RemoteBranch('main'), depth=None,
-                 custom_sync=None, os_filter=None):
-        self.name = name
-        self.src_dir = src_dir
-        self.git_repo = git_repo
-        self.checkout = checkout
-        self.depth = depth
-        self.custom_sync = custom_sync
-        self.os_filter = os_filter
-
-        # Ensure that git URLs end in .git.  We have had issues in the past
-        # where github would not recognize the requests correctly otherwise due
-        # to chromium's builders setting custom GIT_USER_AGENT:
-        # https://bugs.chromium.org/p/chromium/issues/detail?id=711775
-        if git_repo:
-            assert git_repo.endswith('.git'), 'Git URLs should end in .git'
-
-    def Sync(self, good_hashes=None):
-        if self.os_filter and not self.os_filter.Check(BuilderPlatformName()):
-            print("Skipping %s: Doesn't work on %s" %
-                  (self.name, BuilderPlatformName()))
-            return
-        if good_hashes and good_hashes.get(self.name):
-            self.checkout = good_hashes[self.name]
-        if self.custom_sync:
-            self.custom_sync(self.name, self.src_dir, self.git_repo)
-        else:
-            self.GitCloneFetchCheckout()
-
-    def GitCloneFetchCheckout(self):
-        """Clone a git repo if not already cloned, then fetch and checkout."""
-        if os.path.isdir(self.src_dir):
-            print('%s directory already exists' % self.name)
-        else:
-            clone = ['clone', self.git_repo, self.src_dir]
-            if self.depth:
-                clone.append('--depth')
-                clone.append(str(self.depth))
-            proc.check_call(['git'] + clone)
-
-        GitUpdateRemote(self.src_dir, self.git_repo, WATERFALL_REMOTE)
-        proc.check_call(['git', 'fetch', '--force', '--prune', '--tags',
-                         WATERFALL_REMOTE],
-                        cwd=self.src_dir)
-        if not self.checkout.startswith(WATERFALL_REMOTE + '/'):
-            sys.stderr.write(
-                ('WARNING: `git checkout %s` not based on waterfall '
-                 'remote (%s), checking out local branch' %
-                 (self.checkout, WATERFALL_REMOTE)))
-        proc.check_call(['git', 'checkout', self.checkout], cwd=self.src_dir)
-        proc.check_call(['git', 'submodule', 'update', '--init'],
-                        cwd=self.src_dir)
-
-    def CurrentGitInfo(self):
-        if not os.path.exists(self.src_dir):
-            return None
-
-        def pretty(fmt):
-            return proc.check_output(
-                ['git', 'log', '-n1',
-                 '--pretty=format:%s' % fmt],
-                cwd=self.src_dir).strip()
-
-        try:
-            remote = GitRemoteUrl(self.src_dir, WATERFALL_REMOTE)
-        except proc.CalledProcessError:
-            # Not all checkouts have the '_waterfall' remote (e.g. the
-            # waterfall itself) so fall back to origin on failure
-            remote = GitRemoteUrl(self.src_dir, 'origin')
-
-        return {
-            'hash': pretty('%H'),
-            'name': pretty('%aN'),
-            'email': pretty('%ae'),
-            'subject': pretty('%s'),
-            'remote': remote,
-        }
-
-    def PrintGitStatus(self):
-        """"Print the current git status for the sync target."""
-        print('<<<<<<<<<< STATUS FOR', self.name, '>>>>>>>>>>')
-        if os.path.exists(self.src_dir):
-            proc.check_call(['git', 'status'], cwd=self.src_dir)
-        print()
-
-
-def RevisionModifiesFile(f):
-    """Return True if the file f is modified in the index, working tree, or
-    HEAD commit."""
-    if not os.path.isfile(f):
-        return False
-    cwd = os.path.dirname(f)
-    # If the file is modified in the index or working tree, then return true.
-    # This happens on trybots.
-    status = proc.check_output(['git', 'status', '--porcelain', f],
-                               cwd=cwd).strip()
-    changed = len(status) != 0
-    s = status if changed else '(unchanged)'
-    print('%s git status: %s' % (f, s))
-    if changed:
-        return True
-    # Else find the most recent commit that modified f, and return true if
-    # that's the HEAD commit.
-    head_rev = proc.check_output(['git', 'rev-parse', 'HEAD'], cwd=cwd).strip()
-    last_rev = proc.check_output(
-        ['git', 'rev-list', '-n1', 'HEAD', f], cwd=cwd).strip()
-    print('Last rev modifying %s is %s, HEAD is %s' % (f, last_rev, head_rev))
-    return head_rev == last_rev
-
-
-def ChromiumFetchSync(name, work_dir, git_repo,
-                      checkout=RemoteBranch('master')):
-    """Some Chromium projects want to use gclient for clone and
-    dependencies."""
-    if os.path.isdir(work_dir):
-        print('%s directory already exists' % name)
-    else:
-        # Create Chromium repositories one deeper, separating .gclient files.
-        parent = os.path.split(work_dir)[0]
-        Mkdir(parent)
-        proc.check_call(['gclient', 'config', git_repo], cwd=parent)
-        proc.check_call(['git', 'clone', git_repo], cwd=parent)
-
-    GitUpdateRemote(work_dir, git_repo, WATERFALL_REMOTE)
-    proc.check_call(['git', 'fetch', WATERFALL_REMOTE], cwd=work_dir)
-    proc.check_call(['git', 'checkout', checkout], cwd=work_dir)
-    proc.check_call(['gclient', 'sync'], cwd=work_dir)
-    return (name, work_dir)
-
-
-def SyncToolchain(name, src_dir, git_repo):
-    if IsWindows():
-        host_toolchains.SyncWinToolchain()
-    else:
-        host_toolchains.SyncPrebuiltClang(src_dir)
-        cc = GetPrebuiltClang('clang')
-        cxx = GetPrebuiltClang('clang++')
-        assert os.path.isfile(cc), 'Expect clang at %s' % cc
-        assert os.path.isfile(cxx), 'Expect clang++ at %s' % cxx
-
-
-def SyncArchive(out_dir, name, url, create_out_dir=False):
-    """Download and extract an archive (zip, tar.gz or tar.xz) file from a URL.
-
-    The extraction happens in the prebuilt dir. If create_out_dir is True,
-    out_dir will be created and the archive will be extracted inside. Otherwise
-    the archive is expected to contain a top-level directory with all the
-    files; this is expected to be 'out_dir', so if 'out_dir' already exists
-    then the download will be skipped.
-    """
-    stamp_file = os.path.join(out_dir, 'stamp.txt')
-    if os.path.isdir(out_dir):
-        if os.path.isfile(stamp_file):
-            with open(stamp_file) as f:
-                stamp_url = f.read().strip()
-            if stamp_url == url:
-                print('%s directory already exists' % name)
-                return
-        print('%s directory exists but is not up-to-date' % name)
-    print('Downloading %s from %s' % (name, url))
-
-    if create_out_dir:
-        os.makedirs(out_dir)
-        work_dir = out_dir
-    else:
-        work_dir = os.path.dirname(out_dir)
-
-    try:
-        f = urlopen(url)
-        print('URL: %s' % f.geturl())
-        print('Info: %s' % f.info())
-        with tempfile.NamedTemporaryFile() as t:
-            t.write(f.read())
-            t.flush()
-            t.seek(0)
-            print('Extracting into %s' % work_dir)
-            ext = os.path.splitext(url)[-1]
-            if ext == '.zip':
-                with zipfile.ZipFile(t, 'r') as zip:
-                    zip.extractall(path=work_dir)
-            elif ext == '.xz':
-                proc.check_call(['tar', '-xf', t.name], cwd=work_dir)
-            else:
-                tarfile.open(fileobj=t).extractall(path=work_dir)
-    except URLError as e:
-        print('Error downloading %s: %s' % (url, e))
-        raise
-
-    with open(stamp_file, 'w') as f:
-        f.write(url + '\n')
-
-
-def SyncPrebuiltCMake(name, src_dir, git_repo):
-    extension = '.zip' if IsWindows() else '.tar.gz'
-    url = WASM_STORAGE_BASE + PREBUILT_CMAKE_BASE_NAME + extension
-    SyncArchive(PrebuiltCMakeDir(), 'cmake', url)
-
-
-def SyncPrebuiltNodeJS(name, src_dir, git_repo):
-    extension = {
-        'darwin': 'tar.xz',
-        'linux': 'tar.xz',
-        'win32': 'zip'
-    }[sys.platform]
-    out_dir = GetPrebuilt(NODE_BASE_NAME + NodePlatformName())
-    tarball = NODE_BASE_NAME + NodePlatformName() + '.' + extension
-    node_url = WASM_STORAGE_BASE + tarball
-    return SyncArchive(out_dir, name, node_url)
-
-
-# Utilities needed for running LLVM regression tests on Windows
-def SyncGNUWin32(name, src_dir, git_repo):
-    if not IsWindows():
-        return
-    url = WASM_STORAGE_BASE + GNUWIN32_ZIP
-    return SyncArchive(GetPrebuilt('gnuwin32'), name, url)
-
-
-def SyncPrebuiltJava(name, src_dir, git_repo):
-    platform = {
-        'linux': 'linux',
-        'linux2': 'linux',
-        'darwin': 'osx',
-        'win32': 'windows'
-    }[sys.platform]
-    tarball = 'jre-' + JAVA_VERSION + '_' + platform + '-x64_bin.tar.gz'
-    java_url = WASM_STORAGE_BASE + tarball
-    SyncArchive(JavaDir(), name, java_url)
-
-
-def SyncLinuxSysroot(name, src_dir, git_repo):
-    if not (IsLinux() and host_toolchains.ShouldUseSysroot()):
-        return
-    SyncArchive(GetPrebuilt(LINUX_SYSROOT),
-                name,
-                LINUX_SYSROOT_URL,
-                create_out_dir=True)
-
-
-def SyncReleaseDeps(name, src_dir, git_repo):
-    if not ShouldUseLTO():
-        print('ShouldUseLTO is false, skipping release DEPS')
-        return
-    shutil.copy2(GetSrcDir(RELEASE_DEPS_FILE), GetSrcDir('DEPS'))
-    proc.check_call(['gclient', 'sync'], cwd=GetSrcDir())
-
-
-def NoSync(*args):
-    pass
-
-
-def AllSources():
-    return [
-        Source('waterfall', SCRIPT_DIR, None, custom_sync=NoSync),
-        Source('llvm', GetSrcDir('llvm-project'),
-               LLVM_GIT_BASE + 'llvm-project.git'),
-        Source('llvm-test-suite', GetSrcDir('llvm-test-suite'),
-               LLVM_GIT_BASE + 'llvm-test-suite.git'),
-        Source('emscripten', GetSrcDir('emscripten'),
-               EMSCRIPTEN_GIT_BASE + 'emscripten.git'),
-        Source('gcc', GetSrcDir('gcc'),
-               GIT_MIRROR_BASE + 'chromiumos/third_party/gcc.git',
-               checkout=GCC_REVISION, depth=GCC_CLONE_DEPTH),
-        Source('v8', work_dirs.GetV8(), GIT_MIRROR_BASE + 'v8/v8.git',
-               custom_sync=ChromiumFetchSync, checkout=RemoteBranch('master')),
-        Source('host-toolchain', work_dirs.GetV8(), '',
-               custom_sync=SyncToolchain),
-        Source('cmake', '', '',  # The source and git args are ignored.
-               custom_sync=SyncPrebuiltCMake),
-        Source('nodejs', '', '',  # The source and git args are ignored.
-               custom_sync=SyncPrebuiltNodeJS),
-        Source('gnuwin32', '', '',  # The source and git args are ignored.
-               custom_sync=SyncGNUWin32),
-        Source('wabt', GetSrcDir('wabt'), WASM_GIT_BASE + 'wabt.git'),
-        Source('binaryen', GetSrcDir('binaryen'),
-               WASM_GIT_BASE + 'binaryen.git'),
-        Source('wasi-libc', GetSrcDir('wasi-libc'),
-               'https://github.com/CraneStation/wasi-libc.git'),
-        Source('java', '', '',  # The source and git args are ignored.
-               custom_sync=SyncPrebuiltJava),
-        Source('sysroot', '', '',  # The source and git args are ignored.
-               custom_sync=SyncLinuxSysroot),
-        Source('deps', '', '', custom_sync=SyncReleaseDeps)
-    ]
-
-
-def RemoveIfBot(work_dir):
-    if buildbot.IsBot():
-        Remove(work_dir)
-
-
-def Clobber():
-    # Don't automatically clobber non-bot (local) work directories
-    if not buildbot.IsBot() and not options.clobber:
-        return
-
-    clobber = options.clobber or buildbot.ShouldClobber()
-    clobber_file = GetBuildDir('clobber_version.txt')
-    if not clobber:
-        if not os.path.exists(clobber_file):
-            print('Clobber file %s does not exist.' % clobber_file)
-            clobber = True
-        else:
-            existing_tag = int(open(clobber_file).read().strip())
-            if existing_tag != CLOBBER_BUILD_TAG:
-                print('Clobber file %s has tag %s.' %
-                      (clobber_file, existing_tag))
-                clobber = True
-
-    if not clobber:
-        return
-
-    buildbot.Step('Clobbering work dir')
-    if buildbot.IsEmscriptenReleasesBot() or not buildbot.IsBot():
-        # Never clear source dirs locally.
-        # On emscripten-releases, depot_tools and the recipe clear the rest.
-        dirs = [work_dirs.GetBuild()]
-    else:
-        dirs = work_dirs.GetAll()
-    for work_dir in dirs:
-        RemoveIfBot(work_dir)
-        Mkdir(work_dir)
-    # Also clobber v8
-    v8_dir = os.path.join(work_dirs.GetV8(), V8_BUILD_SUBDIR)
-    Remove(v8_dir)
-    with open(clobber_file, 'w') as f:
-        f.write('%s\n' % CLOBBER_BUILD_TAG)
-
-
-def SyncRepos(filter, sync_lkgr=False):
-    if not filter.Any():
-        return
-    buildbot.Step('Sync Repos')
-
-    good_hashes = None
-    if sync_lkgr:
-        lkgr_file = GetBuildDir('lkgr.json')
-        cloud.Download('%s/lkgr.json' % BuilderPlatformName(), lkgr_file)
-        lkgr = json.loads(open(lkgr_file).read())
-        good_hashes = {}
-        for k, v in lkgr['repositories'].iteritems():
-            good_hashes[k] = v.get('hash') if v else None
-
-    for repo in filter.Apply(AllSources()):
-        repo.Sync(good_hashes)
-
-
-def GetRepoInfo():
-    """Collect a readable form of all repo information here, preventing the
-  summary from getting out of sync with the actual list of repos."""
-    info = {}
-    for r in AllSources():
-        info[r.name] = r.CurrentGitInfo()
-    return info
-
-
-# Build rules
-
-def OverrideCMakeCompiler():
-    if not host_toolchains.ShouldForceHostClang():
-        return []
-    cc = 'clang-cl' if IsWindows() else 'clang'
-    cxx = 'clang-cl' if IsWindows() else 'clang++'
-    tools = [
-        '-DCMAKE_C_COMPILER=' + Executable(GetPrebuiltClang(cc)),
-        '-DCMAKE_CXX_COMPILER=' + Executable(GetPrebuiltClang(cxx)),
-    ]
-    if IsWindows():
-        tools.append('-DCMAKE_LINKER=' +
-                     Executable(GetPrebuiltClang('lld-link')))
-
-    return tools
-
-
-def CMakeCommandBase():
-    command = [PrebuiltCMakeBin(), '-G', 'Ninja']
-    # Python's location could change, so always update CMake's cache
-    command.append('-DPYTHON_EXECUTABLE=%s' % sys.executable)
-    command.append('-DCMAKE_EXPORT_COMPILE_COMMANDS=ON')
-    command.append('-DCMAKE_BUILD_TYPE=Release')
-    if IsMac():
-        # Target MacOS Sierra (10.12)
-        command.append('-DCMAKE_OSX_DEPLOYMENT_TARGET=10.12')
-    elif IsWindows():
-        # CMake's usual logic fails to find LUCI's git on Windows
-        git_exe = proc.Which('git')
-        command.append('-DGIT_EXECUTABLE=%s' % git_exe)
-    return command
-
-
-def CMakeCommandNative(args, build_dir):
-    command = CMakeCommandBase()
-    command.append('-DCMAKE_INSTALL_PREFIX=%s' % GetInstallDir())
-    if IsLinux() and host_toolchains.ShouldUseSysroot():
-        command.append('-DCMAKE_SYSROOT=%s' % GetPrebuilt(LINUX_SYSROOT))
-        command.append('-DCMAKE_EXE_LINKER_FLAGS=-static-libstdc++')
-        command.append('-DCMAKE_SHARED_LINKER_FLAGS=-static-libstdc++')
-
-    elif IsMac() and host_toolchains.ShouldUseSysroot():
-        # Get XCode SDK path.
-        xcode_sdk_path = proc.check_output(['xcrun',
-                                            '--show-sdk-path']).strip()
-        # Create relpath symlink if it doesn't exist.
-        # If it does exist, but points to a different location, update it.
-        symlink_path = os.path.join(build_dir, 'xcode_sdk')
-        if os.path.lexists(
-                symlink_path) and os.readlink(symlink_path) != xcode_sdk_path:
-            os.remove(symlink_path)
-        if not os.path.exists(symlink_path):
-            os.symlink(xcode_sdk_path, symlink_path)
-        command.append('-DCMAKE_OSX_SYSROOT=%s' % symlink_path)
-        command.append('-DCMAKE_SYSROOT=%s' % symlink_path)
-
-    if host_toolchains.ShouldForceHostClang():
-        command.extend(OverrideCMakeCompiler())
-        # Goma doesn't have the "default" SDK compilers in its cache, so only
-        # use Goma when using our prebuilt Clang.
-        command.extend(host_toolchains.CMakeLauncherFlags())
-    command.extend(args)
-    # On Windows, CMake chokes on paths containing backslashes that come from
-    # the command line. Probably they just need to be escaped, but using '/'
-    # instead is easier and works just as well.
-    return [arg.replace('\\', '/') for arg in command]
-
-
-def CMakeCommandWasi(args):
-    command = CMakeCommandBase()
-    command.append('-DCMAKE_TOOLCHAIN_FILE=%s' %
-                   GetInstallDir(CMAKE_TOOLCHAIN_FILE))
-    command.extend(args)
-    return command
-
-
-def CopyLLVMTools(build_dir, prefix=''):
-    # The following aren't useful for now, and take up space.
-    # DLLs are in bin/ on Windows but in lib/ on posix.
-    for unneeded_tool in ('clang-check', 'clang-cl', 'clang-cpp',
-                          'clang-extdef-mapping', 'clang-format',
-                          'clang-func-mapping', 'clang-import-test',
-                          'clang-offload-bundler', 'clang-refactor',
-                          'clang-rename', 'clang-scan-deps', 'libclang.dll',
-                          'lld-link', 'ld.lld', 'ld64.lld', 'llvm-lib',
-                          'ld64.lld.darwinnew', 'ld64.lld.darwinold'):
-        Remove(GetInstallDir(prefix, 'bin', Executable(unneeded_tool)))
-
-    for lib in ['libclang.%s' for suffix in ('so.*', 'dylib')]:
-        Remove(GetInstallDir(prefix, 'lib', lib))
-
-    # The following are useful, LLVM_INSTALL_TOOLCHAIN_ONLY did away with them.
-    extra_bins = map(Executable, [
-        'FileCheck', 'llc', 'llvm-as', 'llvm-dis', 'llvm-link', 'llvm-mc',
-        'llvm-nm', 'llvm-objdump', 'llvm-readobj', 'llvm-size', 'opt',
-        'llvm-dwarfdump', 'llvm-dwp'
-    ])
-    for p in [
-            glob.glob(os.path.join(build_dir, 'bin', b)) for b in extra_bins
-    ]:
-        for e in p:
-            CopyBinaryToArchive(os.path.join(build_dir, 'bin', e), prefix)
-
-
-def BuildEnv(build_dir, use_gnuwin32=False, bin_subdir=False,
-             runtime='Release'):
-    if not IsWindows():
-        return None
-    cc_env = host_toolchains.SetUpVSEnv(build_dir)
-    if use_gnuwin32:
-        cc_env['PATH'] = cc_env['PATH'] + os.pathsep + GetSrcDir(
-            'gnuwin32', 'bin')
-    bin_dir = build_dir if not bin_subdir else os.path.join(build_dir, 'bin')
-    Mkdir(bin_dir)
-    assert runtime in ['Release', 'Debug']
-    host_toolchains.CopyDlls(bin_dir, runtime)
-    return cc_env
-
-
-def LLVM(build_dir):
-    buildbot.Step('LLVM')
-    Mkdir(build_dir)
-    cc_env = BuildEnv(build_dir, bin_subdir=True)
-    build_dylib = 'ON' if not IsWindows() and not ShouldUseLTO() else 'OFF'
-    command = CMakeCommandNative([
-        GetLLVMSrcDir('llvm'),
-        '-DCMAKE_CXX_FLAGS=-Wno-nonportable-include-path',
-        '-DLLVM_ENABLE_LIBXML2=OFF',
-        '-DLLVM_INCLUDE_EXAMPLES=OFF',
-        '-DLLVM_BUILD_LLVM_DYLIB=%s' % build_dylib,
-        '-DLLVM_LINK_LLVM_DYLIB=%s' % build_dylib,
-        '-DCMAKE_BUILD_WITH_INSTALL_RPATH=ON',
-        '-DLLVM_ENABLE_BINDINGS=OFF',
-        # Our mac bot's toolchain's ld64 is too old for trunk libLTO.
-        '-DLLVM_TOOL_LTO_BUILD=OFF',
-        '-DLLVM_INSTALL_TOOLCHAIN_ONLY=ON',
-        '-DLLVM_TARGETS_TO_BUILD=X86;WebAssembly',
-        '-DLLVM_ENABLE_PROJECTS=lld;clang',
-        # linking libtinfo dynamically causes problems on some linuxes,
-        # https://github.com/emscripten-core/emsdk/issues/252
-        '-DLLVM_ENABLE_TERMINFO=%d' % (not IsLinux()),
-        '-DCLANG_ENABLE_ARCMT=OFF',
-        '-DCLANG_ENABLE_STATIC_ANALYZER=OFF',
-    ], build_dir)
-
-    if not IsMac():
-        # LLD isn't fully baked on mac yet.
-        command.append('-DLLVM_ENABLE_LLD=ON')
-
-    ninja_targets = ('all', 'install')
-    if ShouldUseLTO():
-        targets = ['clang', 'lld', 'llvm-ar', 'llvm-addr2line', 'llvm-cxxfilt',
-                   'llvm-dwarfdump', 'llvm-dwp', 'llvm-nm', 'llvm-objcopy',
-                   'llvm-objdump', 'llvm-ranlib', 'llvm-readobj', 'llvm-size',
-                   'llvm-strings', 'llvm-symbolizer', 'clang-resource-headers']
-        ninja_targets = ('distribution', 'install-distribution')
-        targets.extend(['llc', 'opt'])  # TODO: remove uses of these upstream
-        command.extend(['-DLLVM_ENABLE_ASSERTIONS=OFF',
-                        '-DLLVM_INCLUDE_TESTS=OFF',
-                        '-DLLVM_TOOLCHAIN_TOOLS=' + ';'.join(targets),
-                        '-DLLVM_DISTRIBUTION_COMPONENTS=' + ';'.join(targets),
-                        '-DLLVM_ENABLE_LTO=Thin'])
-
-    else:
-        command.extend(['-DLLVM_ENABLE_ASSERTIONS=ON'])
-
-    jobs = host_toolchains.NinjaJobs()
-
-    proc.check_call(command, cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', '-v', ninja_targets[0]] + jobs,
-                    cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', ninja_targets[1]] + jobs,
-                    cwd=build_dir, env=cc_env)
-
-    CopyLLVMTools(build_dir)
-    install_bin = GetInstallDir('bin')
-    for target in ('clang', 'clang++'):
-        for link in 'wasm32-', 'wasm32-wasi-':
-            link = os.path.join(install_bin, link + target)
-            if not IsWindows():
-                if not os.path.islink(Executable(link)):
-                    os.symlink(Executable(target), Executable(link))
-            else:
-                # Windows has no symlinks (at least not from python). Also
-                # clang won't work as a native compiler anyway, so just install
-                # it as wasm32-wasi-clang
-                shutil.copy2(Executable(os.path.join(install_bin, target)),
-                             Executable(link))
-
-
-def LLVMTestDepends():
-    buildbot.Step('LLVM Test Dependencies')
-    build_dir = os.path.join(work_dirs.GetBuild(), 'llvm-out')
-    proc.check_call(['ninja', '-v', 'test-depends'] +
-                    host_toolchains.NinjaJobs(),
-                    cwd=build_dir,
-                    env=BuildEnv(build_dir, bin_subdir=True))
-
-
-def TestLLVMRegression():
-    build_dir = os.path.join(work_dirs.GetBuild(), 'llvm-out')
-    cc_env = BuildEnv(build_dir, bin_subdir=True)
-    if not os.path.isdir(build_dir):
-        print('LLVM Build dir %s does not exist' % build_dir)
-        buildbot.Fail()
-        return
-
-    def RunWithUnixUtils(cmd, **kwargs):
-        if IsWindows():
-            return proc.check_call(['git', 'bash'] + cmd, **kwargs)
-        else:
-            return proc.check_call(cmd, **kwargs)
-
-    try:
-        buildbot.Step('LLVM regression tests')
-        RunWithUnixUtils(['ninja', 'check-all'], cwd=build_dir, env=cc_env)
-    except proc.CalledProcessError:
-        buildbot.FailUnless(lambda: IsWindows())
-
-
-def V8():
-    buildbot.Step('V8')
-    src_dir = work_dirs.GetV8()
-    out_dir = os.path.join(src_dir, V8_BUILD_SUBDIR)
-    vpython = 'vpython' + ('.bat' if IsWindows() else '')
-
-    # Generate and write a GN args file.
-    gn_args = 'is_debug = false\ntarget_cpu = "x64"\n'
-    if host_toolchains.UsingGoma():
-        gn_args += 'use_goma = true\n'
-        gn_args += 'goma_dir = "%s"\n' % host_toolchains.GomaDir()
-    Mkdir(out_dir)
-    with open(os.path.join(out_dir, 'args.gn'), 'w') as f:
-        f.write(gn_args)
-    # Invoke GN to generate. We need to use vpython as the script interpreter
-    # since GN's scripts seem to require python2. Hence we need to invoke GN
-    # directly rather than using one of V8's GN wrapper scripts (e.g. mb.py).
-    # But because V8 has a different directory layout from Chrome, we can't
-    # just use the GN wrapper in depot_tools, we have to invoke the one in the
-    # V8 buildtools dir directly.
-    gn_platform = 'linux64' if IsLinux() else 'mac' if IsMac() else 'win'
-    gn_exe = Executable(os.path.join(src_dir, 'buildtools', gn_platform, 'gn'))
-    proc.check_call([gn_exe, 'gen', out_dir, '--script-executable=' + vpython],
-                    cwd=src_dir)
-
-    jobs = host_toolchains.NinjaJobs()
-    proc.check_call(['ninja', '-v', '-C', out_dir, 'd8', 'unittests'] + jobs,
-                    cwd=src_dir)
-    # Copy the V8 snapshot as well as the ICU data file for timezone data.
-    # icudtl.dat is the little-endian version, which goes with x64.
-    to_archive = [Executable('d8'), 'snapshot_blob.bin', 'icudtl.dat']
-    for a in to_archive:
-        CopyBinaryToArchive(os.path.join(out_dir, a))
-
-
-def Jsvu():
-    buildbot.Step('jsvu')
-    jsvu_dir = os.path.join(work_dirs.GetBuild(), 'jsvu')
-    Mkdir(jsvu_dir)
-
-    if IsWindows():
-        # jsvu OS identifiers:
-        # https://github.com/GoogleChromeLabs/jsvu#supported-engines
-        os_id = 'windows64'
-        js_engines = 'chakra'
-    elif IsMac():
-        os_id = 'mac64'
-        js_engines = 'javascriptcore,v8'
-    else:
-        os_id = 'linux64'
-        js_engines = 'javascriptcore'
-
-    try:
-        # https://github.com/GoogleChromeLabs/jsvu#installation
-        # ...except we install it locally instead of globally.
-        proc.check_call(['npm', 'install', 'jsvu'], cwd=jsvu_dir)
-
-        jsvu_bin = Executable(
-            os.path.join(jsvu_dir, 'node_modules', 'jsvu', 'cli.js'))
-        # https://github.com/GoogleChromeLabs/jsvu#integration-with-non-interactive-environments
-        proc.check_call(
-            [jsvu_bin,
-             '--os=%s' % os_id,
-             '--engines=%s' % js_engines])
-
-        # $HOME/.jsvu/chakra is now available on Windows.
-        # $HOME/.jsvu/javascriptcore is now available on Mac.
-
-        # TODO: Install the JSC binary in the output package, and add the
-        # version info to the repo info JSON file (currently in GetRepoInfo)
-    except proc.CalledProcessError:
-        buildbot.Warn()
-
-
-def Wabt(build_dir):
-    buildbot.Step('WABT')
-    Mkdir(build_dir)
-    cc_env = BuildEnv(build_dir)
-
-    cmd = CMakeCommandNative([GetSrcDir('wabt'),
-                              '-DBUILD_TESTS=OFF', '-DBUILD_LIBWASM=OFF'],
-                             build_dir)
-    proc.check_call(cmd, cwd=build_dir, env=cc_env)
-
-    proc.check_call(['ninja', '-v'] + host_toolchains.NinjaJobs(),
-                    cwd=build_dir,
-                    env=cc_env)
-    proc.check_call(['ninja', 'install'], cwd=build_dir, env=cc_env)
-
-
-def Binaryen(build_dir):
-    buildbot.Step('binaryen')
-    Mkdir(build_dir)
-    # Currently it's a bad idea to do a non-asserts build of Binaryen
-    cc_env = BuildEnv(build_dir, bin_subdir=True, runtime='Debug')
-
-    cmake_command = CMakeCommandNative([GetSrcDir('binaryen')], build_dir)
-    cmake_command.append('-DBYN_INSTALL_TOOLS_ONLY=ON')
-    if ShouldUseLTO():
-        cmake_command.append('-DBUILD_STATIC_LIB=ON')
-        cmake_command.append('-DBYN_ENABLE_LTO=ON')
-
-    proc.check_call(cmake_command, cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', '-v'] + host_toolchains.NinjaJobs(),
-                    cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', 'install'], cwd=build_dir, env=cc_env)
-
-
-def InstallEmscripten():
-    src_dir = GetSrcDir('emscripten')
-    em_install_dir = GetInstallDir('emscripten')
-    Remove(em_install_dir)
-    print('Installing emscripten into %s' % em_install_dir)
-    proc.check_call([os.path.join('tools', 'install.py'), em_install_dir],
-                    cwd=src_dir)
-
-    print('Running npm install ...')
-    proc.check_call(['npm', 'ci', '--no-optional'], cwd=em_install_dir)
-
-    # Manually install the appropriate native Closure Compiler package
-    # This is currently needed because npm ci will install the packages
-    # for Closure for all platforms, adding 180MB to the download size
-    # There are two problems here:
-    #   1. npm ci does not consider the platform of optional dependencies
-    #      https://github.com/npm/cli/issues/558
-    #   2. A bug with the native compiler has bloated the packages from
-    #      30MB to almost 300MB
-    #      https://github.com/google/closure-compiler-npm/issues/186
-    # If either of these bugs are fixed we could consider removing this
-    # hack.
-    native = None
-    if IsMac():
-        native = 'google-closure-compiler-osx'
-    elif IsWindows():
-        native = 'google-closure-compiler-windows'
-    elif IsLinux():
-        native = 'google-closure-compiler-linux'
-    proc.check_call(['npm', 'install', '--no-optional', native],
-                    cwd=em_install_dir)
-
-
-def Emscripten():
-    InstallEmscripten()
-
-    def WriteEmscriptenConfig(infile, outfile):
-        with open(infile) as config:
-            text = config.read().replace('{{WASM_INSTALL}}',
-                                         WindowsFSEscape(GetInstallDir()))
-            text = text.replace('{{PREBUILT_NODE}}',
-                                WindowsFSEscape(NodeBin()))
-            text = text.replace('{{PREBUILT_JAVA}}',
-                                WindowsFSEscape(JavaBin()))
-        with open(outfile, 'w') as config:
-            config.write(text)
-
-    # Set up the emscripten config and compile the libraries
-    buildbot.Step('emscripten')
-    config = GetInstallDir(EMSCRIPTEN_CONFIG_UPSTREAM)
-    print('Config file: ', config)
-    src_config = os.path.join(SCRIPT_DIR, os.path.basename(config))
-    WriteEmscriptenConfig(src_config, config)
-
-    env = os.environ.copy()
-    env['EM_CONFIG'] = config
-    try:
-        # Use emscripten's embuilder to prebuild the system libraries.
-        # This depends on binaryen already being built and installed into the
-        # archive/install dir.
-        proc.check_call([
-            sys.executable,
-            os.path.join(GetInstallDir('emscripten'), 'embuilder.py'), 'build',
-            'SYSTEM'
-        ], env=env)
-
-    except proc.CalledProcessError:
-        # Note the failure but allow the build to continue.
-        buildbot.Fail()
-
-    # Remove the sanity file.  This means it will get generated on first
-    # use without clearing the cache.
-    sanity = GetInstallDir('emscripten', 'cache', 'sanity.txt')
-    if os.path.exists(sanity):
-        os.remove(sanity)
-
-
-def CompilerRT():
-    # TODO(sbc): Figure out how to do this step as part of the llvm build.
-    # I suspect that this can be done using the llvm/runtimes directory but
-    # have yet to make it actually work this way.
-    buildbot.Step('compiler-rt')
-
-    build_dir = os.path.join(work_dirs.GetBuild(), 'compiler-rt-out')
-    # TODO(sbc): Remove this.
-    # The compiler-rt doesn't currently rebuild libraries when a new -DCMAKE_AR
-    # value is specified.
-    if os.path.isdir(build_dir):
-        Remove(build_dir)
-
-    Mkdir(build_dir)
-    src_dir = GetLLVMSrcDir('compiler-rt')
-    cc_env = BuildEnv(src_dir, bin_subdir=True)
-    command = CMakeCommandWasi([
-        os.path.join(src_dir, 'lib', 'builtins'),
-        '-DCMAKE_C_COMPILER_WORKS=ON', '-DCOMPILER_RT_BAREMETAL_BUILD=On',
-        '-DCOMPILER_RT_BUILD_XRAY=OFF', '-DCOMPILER_RT_INCLUDE_TESTS=OFF',
-        '-DCOMPILER_RT_ENABLE_IOS=OFF', '-DCOMPILER_RT_DEFAULT_TARGET_ONLY=On',
-        '-DLLVM_CONFIG_PATH=' + Executable(
-            os.path.join(work_dirs.GetBuild(), 'llvm-out', 'bin',
-                         'llvm-config')),
-        '-DCMAKE_INSTALL_PREFIX=' + GetInstallDir('lib', 'clang', LLVM_VERSION)
-    ])
-
-    proc.check_call(command, cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', '-v'], cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', 'install'], cwd=build_dir, env=cc_env)
-
-
-def LibCXX():
-    buildbot.Step('libcxx')
-    build_dir = os.path.join(work_dirs.GetBuild(), 'libcxx-out')
-    if os.path.isdir(build_dir):
-        Remove(build_dir)
-    Mkdir(build_dir)
-    src_dir = GetLLVMSrcDir('libcxx')
-    cc_env = BuildEnv(src_dir, bin_subdir=True)
-    command = CMakeCommandWasi([
-        src_dir,
-        '-DCMAKE_EXE_LINKER_FLAGS=-nostdlib++',
-        '-DLIBCXX_ENABLE_THREADS=OFF',
-        '-DLIBCXX_ENABLE_SHARED=OFF',
-        '-DLIBCXX_ENABLE_FILESYSTEM=OFF',
-        '-DLIBCXX_HAS_MUSL_LIBC=ON',
-        '-DLIBCXX_CXX_ABI=libcxxabi',
-        '-DLIBCXX_LIBDIR_SUFFIX=/wasm32-wasi',
-        '-DLIBCXX_CXX_ABI_INCLUDE_PATHS=' +
-        GetLLVMSrcDir('libcxxabi', 'include'),
-        '-DLLVM_PATH=' + GetLLVMSrcDir('llvm'),
-    ])
-
-    proc.check_call(command, cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', '-v'], cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', 'install'], cwd=build_dir, env=cc_env)
-
-
-def LibCXXABI():
-    buildbot.Step('libcxxabi')
-    build_dir = os.path.join(work_dirs.GetBuild(), 'libcxxabi-out')
-    if os.path.isdir(build_dir):
-        Remove(build_dir)
-    Mkdir(build_dir)
-    src_dir = GetLLVMSrcDir('libcxxabi')
-    cc_env = BuildEnv(src_dir, bin_subdir=True)
-    command = CMakeCommandWasi([
-        src_dir,
-        '-DCMAKE_EXE_LINKER_FLAGS=-nostdlib++',
-        '-DLIBCXXABI_ENABLE_PIC=OFF',
-        '-DLIBCXXABI_ENABLE_SHARED=OFF',
-        '-DLIBCXXABI_ENABLE_THREADS=OFF',
-        '-DLIBCXXABI_LIBDIR_SUFFIX=/wasm32-wasi',
-        '-DLIBCXXABI_LIBCXX_PATH=' + GetLLVMSrcDir('libcxx'),
-        '-DLIBCXXABI_LIBCXX_INCLUDES=' +
-        GetInstallDir('sysroot', 'include', 'c++', 'v1'),
-        '-DLLVM_PATH=' + GetLLVMSrcDir('llvm'),
-    ])
-
-    proc.check_call(command, cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', '-v'], cwd=build_dir, env=cc_env)
-    proc.check_call(['ninja', 'install'], cwd=build_dir, env=cc_env)
-    CopyLibraryToSysroot(os.path.join(SCRIPT_DIR, 'libc++abi.imports'))
-
-
-def WasiLibc():
-    buildbot.Step('Wasi')
-    build_dir = os.path.join(work_dirs.GetBuild(), 'wasi-libc-out')
-    if os.path.isdir(build_dir):
-        Remove(build_dir)
-    cc_env = BuildEnv(build_dir, use_gnuwin32=True)
-    src_dir = GetSrcDir('wasi-libc')
-    cmd = [
-        proc.Which('make'),
-        '-j%s' % NPROC, 'SYSROOT=' + build_dir,
-        'WASM_CC=' + GetInstallDir('bin', 'clang')
-    ]
-    proc.check_call(cmd, env=cc_env, cwd=src_dir)
-    CopyTree(build_dir, GetInstallDir('sysroot'))
-
-    # We add the cmake toolchain file and out JS polyfill script to make using
-    # the wasi toolchain easier.
-    shutil.copy2(os.path.join(SCRIPT_DIR, CMAKE_TOOLCHAIN_FILE),
-                 GetInstallDir(CMAKE_TOOLCHAIN_FILE))
-    Remove(GetInstallDir('cmake'))
-    shutil.copytree(os.path.join(SCRIPT_DIR, 'cmake'), GetInstallDir('cmake'))
-
-    shutil.copy2(os.path.join(SCRIPT_DIR, 'wasi.js'), GetInstallDir())
-
-
-def ArchiveBinaries():
-    buildbot.Step('Archive binaries')
-    archive = Archive(GetInstallDir(), print_content=buildbot.IsBot())
-    if not buildbot.IsUploadingBot():
-        return
-    # All relevant binaries were copied to the LLVM directory.
-    UploadArchive('binaries', archive)
-
-
-def DebianPackage():
-    if not (IsLinux() and buildbot.IsBot()):
-        return
-
-    buildbot.Step('Debian package')
-    top_dir = os.path.dirname(SCRIPT_DIR)
-    try:
-        if buildbot.BuildNumber():
-            message = ('Automatic build %s produced on http://wasm-stat.us' %
-                       buildbot.BuildNumber())
-            version = '0.1.' + buildbot.BuildNumber()
-            proc.check_call(['dch', '-D', 'unstable', '-v', version, message],
-                            cwd=top_dir)
-        proc.check_call(['debuild', '--no-lintian', '-i', '-us', '-uc', '-b'],
-                        cwd=top_dir)
-        if buildbot.BuildNumber():
-            proc.check_call(['git', 'checkout', 'debian/changelog'],
-                            cwd=top_dir)
-
-            debfile = os.path.join(os.path.dirname(top_dir),
-                                   'wasm-toolchain_%s_amd64.deb' % version)
-            UploadFile(debfile, os.path.basename(debfile))
-    except proc.CalledProcessError:
-        # Note the failure but allow the build to continue.
-        buildbot.Fail()
-        return
-
-
-def CompileLLVMTorture(outdir, opt):
-    name = 'Compile LLVM Torture (%s)' % opt
-    buildbot.Step(name)
-    install_bin = GetInstallDir('bin')
-    cc = Executable(os.path.join(install_bin, 'wasm32-wasi-clang'))
-    cxx = Executable(os.path.join(install_bin, 'wasm32-wasi-clang++'))
-    Remove(outdir)
-    Mkdir(outdir)
-    unexpected_result_count = compile_torture_tests.run(
-        cc=cc,
-        cxx=cxx,
-        testsuite=GccTestDir(),
-        sysroot_dir=GetInstallDir('sysroot'),
-        fails=[
-            GetLLVMSrcDir('llvm', 'lib', 'Target', 'WebAssembly', IT_IS_KNOWN)
-        ],
-        exclusions=LLVM_TORTURE_EXCLUSIONS,
-        out=outdir,
-        config='clang',
-        opt=opt)
-    if 0 != unexpected_result_count:
-        buildbot.Fail()
-
-
-def CompileLLVMTortureEmscripten(name, em_config, outdir, fails, opt):
-    buildbot.Step('Compile LLVM Torture (%s, %s)' % (name, opt))
-    cc = Executable(GetInstallDir('emscripten', 'emcc'), '.bat')
-    cxx = Executable(GetInstallDir('emscripten', 'em++'), '.bat')
-    Remove(outdir)
-    Mkdir(outdir)
-    os.environ['EM_CONFIG'] = em_config
-    unexpected_result_count = compile_torture_tests.run(
-        cc=cc,
-        cxx=cxx,
-        testsuite=GccTestDir(),
-        sysroot_dir=GetInstallDir('sysroot'),
-        fails=fails,
-        exclusions=LLVM_TORTURE_EXCLUSIONS,
-        out=outdir,
-        config='emscripten',
-        opt=opt)
-
-    if 0 != unexpected_result_count:
-        buildbot.Fail()
-
-
-def LinkLLVMTorture(name, linker, fails, indir, outdir, extension,
-                    opt, args=None):
-    buildbot.Step('Link LLVM Torture (%s, %s)' % (name, opt))
-    assert os.path.isfile(linker), 'Cannot find linker at %s' % linker
-    Remove(outdir)
-    Mkdir(outdir)
-    input_pattern = os.path.join(indir, '*.' + extension)
-    unexpected_result_count = link_assembly_files.run(linker=linker,
-                                                      files=input_pattern,
-                                                      fails=fails,
-                                                      attributes=[opt],
-                                                      out=outdir,
-                                                      args=args)
-    if 0 != unexpected_result_count:
-        buildbot.Fail()
-
-
-def ExecuteLLVMTorture(name, runner, indir, fails, attributes, extension, opt,
-                       outdir='', wasmjs='', extra_files=None,
-                       warn_only=False):
-    extra_files = [] if extra_files is None else extra_files
-
-    buildbot.Step('Execute LLVM Torture (%s, %s)' % (name, opt))
-    if not indir:
-        print('Step skipped: no input')
-        buildbot.Warn()
-        return None
-    assert os.path.isfile(runner), 'Cannot find runner at %s' % runner
-    files = os.path.join(indir, '*.%s' % extension)
-    if len(glob.glob(files)) == 0:
-        print("No files found by", files)
-        buildbot.Fail()
-        return
-    unexpected_result_count = execute_files.run(runner=runner,
-                                                files=files,
-                                                fails=fails,
-                                                attributes=attributes + [opt],
-                                                out=outdir,
-                                                wasmjs=wasmjs,
-                                                extra_files=extra_files)
-    if 0 != unexpected_result_count:
-        buildbot.FailUnless(lambda: warn_only)
-
-
-def ValidateLLVMTorture(indir, ext, opt):
-    validate = Executable(os.path.join(GetInstallDir('bin'), 'wasm-validate'))
-    # Object files contain a DataCount section, so enable bulk memory
-    ExecuteLLVMTorture(name='validate',
-                       runner=validate,
-                       indir=indir,
-                       fails=None,
-                       attributes=[opt],
-                       extension=ext,
-                       opt=opt)
-
-
-class Build(object):
-    def __init__(self, name_, runnable_, os_filter=None,
-                 incremental_build_dir=None, *args, **kwargs):
-        self.name = name_
-        self.runnable = runnable_
-        self.os_filter = os_filter
-        self.incremental_build_dir = incremental_build_dir
-        self.args = args
-        self.kwargs = kwargs
-
-        if incremental_build_dir:
-            self.kwargs['build_dir'] = incremental_build_dir
-
-    def Run(self):
-        if self.os_filter and not self.os_filter.Check(BuilderPlatformName()):
-            print("Skipping %s: Doesn't work on %s" %
-                  (self.runnable.__name__, BuilderPlatformName()))
-            return
-
-        # When using LTO we always want a clean build (the previous
-        # build was non-LTO)
-        if self.incremental_build_dir and ShouldUseLTO():
-            RemoveIfBot(self.incremental_build_dir)
-        try:
-            self.runnable(*self.args, **self.kwargs)
-        except Exception:
-            # If the build fails (even non-LTO), a possible cause is a build
-            # config change, so clobber the work dir for next time.
-            if self.incremental_build_dir:
-                RemoveIfBot(self.incremental_build_dir)
-            raise
-        finally:
-            # When using LTO we want to always clean up afterward,
-            # (the next build will be non-LTO).
-            if self.incremental_build_dir and ShouldUseLTO():
-                RemoveIfBot(self.incremental_build_dir)
-
-
-def Summary():
-    buildbot.Step('Summary')
-
-    # Emscripten-releases bots run the stages separately so LKGR has no way of
-    # knowing whether everything passed or not.
-    should_upload = (buildbot.IsUploadingBot() and
-                     not buildbot.IsEmscriptenReleasesBot())
-
-    if should_upload:
-        info = {'repositories': GetRepoInfo()}
-        info['build'] = buildbot.BuildNumber()
-        info['scheduler'] = buildbot.Scheduler()
-        info_file = GetInstallDir('buildinfo.json')
-        info_json = json.dumps(info, indent=2)
-        print(info_json)
-
-        with open(info_file, 'w+') as f:
-            f.write(info_json)
-            f.write('\n')
-
-    print('Failed steps: %s.' % buildbot.Failed())
-    for step in buildbot.FailedList():
-        print('    %s' % step)
-    print('Warned steps: %s.' % buildbot.Warned())
-    for step in buildbot.WarnedList():
-        print('    %s' % step)
-
-    if should_upload:
-        latest_file = '%s/%s' % (buildbot.BuilderName(), 'latest.json')
-        buildbot.Link('latest.json', cloud.Upload(info_file, latest_file))
-
-    if buildbot.Failed():
-        buildbot.Fail()
-    else:
-        if should_upload:
-            lkgr_file = '%s/%s' % (buildbot.BuilderName(), 'lkgr.json')
-            buildbot.Link('lkgr.json', cloud.Upload(info_file, lkgr_file))
-
-
-def AllBuilds():
-    return [
-        # Host tools
-        Build('llvm', LLVM,
-              incremental_build_dir=os.path.join(
-                  work_dirs.GetBuild(), 'llvm-out')),
-        Build('llvm-test-depends', LLVMTestDepends),
-        Build('v8', V8, os_filter=Filter(exclude=['mac'])),
-        Build('jsvu', Jsvu, os_filter=Filter(exclude=['windows'])),
-        Build('wabt', Wabt,
-              incremental_build_dir=os.path.join(
-                  work_dirs.GetBuild(), 'wabt-out')),
-        Build('binaryen', Binaryen,
-              incremental_build_dir=os.path.join(
-                  work_dirs.GetBuild(), 'binaryen-out')),
-        Build('emscripten-upstream', Emscripten),
-        # Target libs
-        # TODO: re-enable wasi on windows, see #517
-        Build('wasi-libc', WasiLibc, os_filter=Filter(exclude=['windows'])),
-        Build('compiler-rt', CompilerRT,
-              os_filter=Filter(exclude=['windows'])),
-        Build('libcxx', LibCXX, os_filter=Filter(exclude=['windows'])),
-        Build('libcxxabi', LibCXXABI, os_filter=Filter(exclude=['windows'])),
-        # Archive
-        Build('archive', ArchiveBinaries),
-        Build('debian', DebianPackage),
-    ]
-
-
-# For now, just the builds used to test WASI and emscripten torture tests
-# on wasm-stat.us
-DEFAULT_BUILDS = [
-    'llvm', 'v8', 'jsvu', 'wabt', 'binaryen',
-    'emscripten-upstream', 'wasi-libc', 'compiler-rt',
-    'libcxx', 'libcxxabi', 'archive'
-]
-
-
-def BuildRepos(filter):
-    for rule in filter.Apply(AllBuilds()):
-        rule.Run()
-
-
-class Test(object):
-    def __init__(self, name_, runnable_, os_filter=None):
-        self.name = name_
-        self.runnable = runnable_
-        self.os_filter = os_filter
-
-    def Test(self):
-        if self.os_filter and not self.os_filter.Check(BuilderPlatformName()):
-            print("Skipping %s: Doesn't work on %s" %
-                  (self.name, BuilderPlatformName()))
-            return
-        self.runnable()
-
-
-def GetTortureDir(name, opt):
-    dirs = {
-        'asm2wasm': GetTestDir('asm2wasm-torture-out', opt),
-        'emwasm': GetTestDir('emwasm-torture-out', opt),
-    }
-    if name in dirs:
-        return dirs[name]
-    return GetTestDir('torture-' + name, opt)
-
-
-def TestBare():
-    # Compile
-    for opt in BARE_TEST_OPT_FLAGS:
-        CompileLLVMTorture(GetTortureDir('o', opt), opt)
-        ValidateLLVMTorture(GetTortureDir('o', opt), 'o', opt)
-
-    # Link/Assemble
-    for opt in BARE_TEST_OPT_FLAGS:
-        LinkLLVMTorture(name='lld',
-                        linker=Executable(
-                            GetInstallDir('bin', 'wasm32-wasi-clang++')),
-                        fails=LLD_KNOWN_TORTURE_FAILURES,
-                        indir=GetTortureDir('o', opt),
-                        outdir=GetTortureDir('lld', opt),
-                        extension='o',
-                        opt=opt)
-
-    # Execute
-    common_attrs = ['bare']
-    common_attrs += ['win'] if IsWindows() else ['posix']
-
-    # Avoid d8 execution on windows because of flakiness,
-    # https://bugs.chromium.org/p/v8/issues/detail?id=8211
-    if not IsWindows():
-        for opt in BARE_TEST_OPT_FLAGS:
-            ExecuteLLVMTorture(name='d8',
-                               runner=D8Bin(),
-                               indir=GetTortureDir('lld', opt),
-                               fails=RUN_KNOWN_TORTURE_FAILURES,
-                               attributes=common_attrs + ['d8', 'lld', opt],
-                               extension='wasm',
-                               opt=opt,
-                               wasmjs=os.path.join(SCRIPT_DIR, 'wasi.js'))
-
-    if IsMac() and not buildbot.DidStepFailOrWarn('jsvu'):
-        for opt in BARE_TEST_OPT_FLAGS:
-            ExecuteLLVMTorture(name='jsc',
-                               runner=os.path.join(JSVU_OUT_DIR, 'jsc'),
-                               indir=GetTortureDir('lld', opt),
-                               fails=RUN_KNOWN_TORTURE_FAILURES,
-                               attributes=common_attrs + ['jsc', 'lld'],
-                               extension='wasm',
-                               opt=opt,
-                               warn_only=True,
-                               wasmjs=os.path.join(SCRIPT_DIR, 'wasi.js'))
-
-
-def TestEmwasm():
-    for opt in EMSCRIPTEN_TEST_OPT_FLAGS:
-        CompileLLVMTortureEmscripten('emwasm',
-                                     GetInstallDir(EMSCRIPTEN_CONFIG_UPSTREAM),
-                                     GetTortureDir('emwasm', opt),
-                                     EMWASM_KNOWN_TORTURE_COMPILE_FAILURES,
-                                     opt)
-
-    # Avoid d8 execution on windows because of flakiness,
-    # https://bugs.chromium.org/p/v8/issues/detail?id=8211
-    if not IsWindows():
-        for opt in EMSCRIPTEN_TEST_OPT_FLAGS:
-            ExecuteLLVMTorture(name='emwasm',
-                               runner=D8Bin(),
-                               indir=GetTortureDir('emwasm', opt),
-                               fails=RUN_KNOWN_TORTURE_FAILURES,
-                               attributes=['emwasm', 'lld', 'd8'],
-                               extension='c.js',
-                               opt=opt,
-                               outdir=GetTortureDir('emwasm', opt))
-
-
-def ExecuteEmscriptenTestSuite(name, tests, config, outdir, warn_only=False):
-    buildbot.Step('Execute emscripten testsuite (%s)' % name)
-    Mkdir(outdir)
-
-    # Before we can run the tests we prepare the installed emscripten
-    # directory by copying of some test data which is otherwise excluded by
-    # emscripten install script (tools/install.py).
-    em_install_dir = GetInstallDir('emscripten')
-    installed_tests = os.path.join(em_install_dir, 'tests', 'third_party')
-    if not os.path.exists(installed_tests):
-        src_dir = GetSrcDir('emscripten', 'tests', 'third_party')
-        print('Copying directory %s to %s' % (src_dir, em_install_dir))
-        shutil.copytree(src_dir, installed_tests)
-
-    cmd = [
-        GetInstallDir('emscripten', 'tests', 'runner.py'),
-        '--em-config', config
-    ] + tests
-    test_env = os.environ.copy()
-    if buildbot.IsBot() and IsWindows():
-        test_env['EMTEST_LACKS_NATIVE_CLANG'] = '1'
-    try:
-        proc.check_call(cmd, cwd=outdir, env=test_env)
-    except proc.CalledProcessError:
-        buildbot.FailUnless(lambda: warn_only)
-
-
-def TestEmtest():
-    tests = options.test_params if options.test_params else ['wasm2', 'other']
-    ExecuteEmscriptenTestSuite('emwasm', tests,
-                               GetInstallDir(EMSCRIPTEN_CONFIG_UPSTREAM),
-                               os.path.join(work_dirs.GetTest(), 'emtest-out'))
-
-
-def TestLLVMTestSuite():
-    buildbot.Step('Execute LLVM TestSuite')
-
-    outdir = GetBuildDir('llvmtest-out')
-    # The compiler changes on every run, so incremental builds don't make
-    # sense.
-    Remove(outdir)
-    Mkdir(outdir)
-    # The C++ tests explicitly link libstdc++ for some reason, but we use
-    # libc++ and it's unnecessary to link it anyway. So create an empty
-    # libstdc++.a
-    proc.check_call([GetInstallDir('bin', 'llvm-ar'), 'rc', 'libstdc++.a'],
-                    cwd=outdir)
-    # This has to be in the environment and not TEST_SUITE_EXTRA_C_FLAGS
-    # because CMake doesn't append the flags to the try-compiles.
-    os.environ['EM_CONFIG'] = GetInstallDir(EMSCRIPTEN_CONFIG_UPSTREAM)
-    command = [GetInstallDir('emscripten', 'emcmake')] + CMakeCommandBase() + [
-        GetSrcDir('llvm-test-suite'), '-DCMAKE_C_COMPILER=' +
-        GetInstallDir('emscripten', 'emcc'), '-DCMAKE_CXX_COMPILER=' +
-        GetInstallDir('emscripten', 'em++'), '-DTEST_SUITE_RUN_UNDER=' +
-        NodeBin(), '-DTEST_SUITE_USER_MODE_EMULATION=ON',
-        '-DTEST_SUITE_SUBDIRS=SingleSource',
-        '-DTEST_SUITE_EXTRA_EXE_LINKER_FLAGS=' +
-        '-L %s -s TOTAL_MEMORY=1024MB' % outdir,
-        '-DTEST_SUITE_LLVM_SIZE=' + GetInstallDir('emscripten', 'emsize.py')
-    ]
-
-    proc.check_call(command, cwd=outdir)
-    proc.check_call(['ninja', '-v'], cwd=outdir)
-    results_file = 'results.json'
-    lit = GetBuildDir('llvm-out', 'bin', 'llvm-lit')
-    proc.call([lit, '-v', '-o', results_file, '.'], cwd=outdir)
-
-    with open(os.path.join(outdir, results_file)) as results_fd:
-        json_results = json.loads(results_fd.read())
-
-    def get_names(code):
-        # Strip the unneccessary spaces from the test name
-        return [
-            r['name'].replace('test-suite :: ', '')
-            for r in json_results['tests'] if r['code'] == code
-        ]
-
-    failures = get_names('FAIL')
-    successes = get_names('PASS')
-
-    expected_failures = testing.parse_exclude_files(
-        RUN_LLVM_TESTSUITE_FAILURES, [])
-    unexpected_failures = [f for f in failures if f not in expected_failures]
-    unexpected_successes = [f for f in successes if f in expected_failures]
-
-    if len(unexpected_failures) > 0:
-        print('Emscripten unexpected failures:')
-        for test in unexpected_failures:
-            print(test)
-    if len(unexpected_successes) > 0:
-        print('Emscripten unexpected successes:')
-        for test in unexpected_successes:
-            print(test)
-
-    if len(unexpected_failures) + len(unexpected_successes) > 0:
-        buildbot.Fail()
-
-
-ALL_TESTS = [
-    Test('llvm-regression', TestLLVMRegression),
-    # TODO: re-enable wasi on windows, see #517
-    Test('bare', TestBare, Filter(exclude=['windows'])),
-    Test('emwasm', TestEmwasm, Filter(exclude=['mac'])),
-    # These tests do have interesting differences on OSes (especially the
-    # 'other' tests) and eventually should run everywhere.
-    Test('emtest', TestEmtest),
-    Test('llvmtest', TestLLVMTestSuite, Filter(include=['linux'])),
-]
-
-# The default tests to run on wasm-stat.us (just WASI and emwasm torture)
-DEFAULT_TESTS = ['bare', 'emwasm', 'llvmtest']
-
-
-def TextWrapNameList(prefix, items):
-    width = 80  # TODO(binji): better guess?
-    names = sorted(item.name for item in items)
-    return '%s%s' % (prefix,
-                     textwrap.fill(' '.join(names),
-                                   width,
-                                   initial_indent='  ',
-                                   subsequent_indent='  '))
-
-
-def ParseArgs():
-    def SplitComma(arg):
-        if not arg:
-            return None
-        return arg.split(',')
-
-    epilog = '\n\n'.join([
-        TextWrapNameList('sync targets:\n', AllSources()),
-        TextWrapNameList('build targets:\n', AllBuilds()),
-        TextWrapNameList('test targets:\n', ALL_TESTS),
-    ])
-
-    parser = argparse.ArgumentParser(
-        description='Wasm waterfall top-level CI script',
-        formatter_class=argparse.RawDescriptionHelpFormatter,
-        epilog=epilog)
-
-    parser.add_argument(
-        '--sync-dir', dest='sync_dir', help='Directory for syncing sources')
-    parser.add_argument(
-        '--build-dir', dest='build_dir', help='Directory for build output')
-    parser.add_argument(
-        '--prebuilt-dir', dest='prebuilt_dir',
-        help='Directory for prebuilt output')
-    parser.add_argument(
-        '--v8-dir', dest='v8_dir',
-        help='Directory for V8 checkout/build')
-    parser.add_argument(
-        '--test-dir', dest='test_dir', help='Directory for test output')
-    parser.add_argument(
-        '--install-dir', dest='install_dir',
-        help='Directory for installed output')
-
-    sync_grp = parser.add_mutually_exclusive_group()
-    sync_grp.add_argument(
-        '--no-sync', dest='sync', default=True, action='store_false',
-        help='Skip fetching and checking out source repos')
-    sync_grp.add_argument(
-        '--sync-include', dest='sync_include', default='', type=SplitComma,
-        help='Include only the comma-separated list of sync targets')
-    sync_grp.add_argument(
-        '--sync-exclude', dest='sync_exclude', default='', type=SplitComma,
-        help='Exclude the comma-separated list of sync targets')
-
-    parser.add_argument(
-        '--sync-lkgr', dest='sync_lkgr', default=False, action='store_true',
-        help='When syncing, only sync up to the Last Known Good Revision '
-             'for each sync target')
-
-    build_grp = parser.add_mutually_exclusive_group()
-    build_grp.add_argument(
-        '--no-build', dest='build', default=True, action='store_false',
-        help='Skip building source repos (also skips V8 and LLVM unit tests)')
-    build_grp.add_argument(
-        '--build-include', dest='build_include', default='', type=SplitComma,
-        help='Include only the comma-separated list of build targets')
-    build_grp.add_argument(
-        '--build-exclude', dest='build_exclude', default='', type=SplitComma,
-        help='Exclude the comma-separated list of build targets')
-
-    test_grp = parser.add_mutually_exclusive_group()
-    test_grp.add_argument(
-        '--no-test', dest='test', default=True, action='store_false',
-        help='Skip running tests')
-    test_grp.add_argument(
-        '--test-include', dest='test_include', default='', type=SplitComma,
-        help='Include only the comma-separated list of test targets')
-    test_grp.add_argument(
-        '--test-exclude', dest='test_exclude', default='', type=SplitComma,
-        help='Exclude the comma-separated list of test targets')
-    parser.add_argument(
-        '--test-params', dest='test_params', default='', type=SplitComma,
-        help='Test selector to pass through to emscripten testsuite runner')
-
-    parser.add_argument(
-        '--no-threads', action='store_true',
-        help='Disable use of thread pool to building and testing')
-    parser.add_argument(
-        '--torture-filter',
-        help='Limit which torture tests are run by applying the given glob')
-    parser.add_argument(
-        '--git-status', dest='git_status', default=False, action='store_true',
-        help='Show git status for each sync target. '
-             "Doesn't sync, build, or test")
-    parser.add_argument(
-        '--no-host-clang', dest='host_clang', action='store_false',
-        help="Don't force chrome clang as the host compiler")
-    parser.add_argument(
-        '--no-sysroot', dest='use_sysroot', action='store_false',
-        help="Don't use the V8 sysroot to build on Linux")
-    parser.add_argument(
-        '--clobber', dest='clobber', default=False, action='store_true',
-        help="Delete working directories, forcing a clean build")
-    parser.add_argument(
-        '--use-lto', dest='use_lto', default=False, action='store',
-        choices=['true', 'false', 'auto'],
-        help='Use extra optimization for host binaries')
-
-    return parser.parse_args()
-
-
-def AddToPath(path):
-    print("adding to path: %s" % path)
-    os.environ['PATH'] = path + os.pathsep + os.environ['PATH']
-
-
-def run(sync_filter, build_filter, test_filter):
-    if options.git_status:
-        for s in AllSources():
-            s.PrintGitStatus()
-        return 0
-
-    Clobber()
-    Chdir(SCRIPT_DIR)
-    for work_dir in work_dirs.GetAll():
-        Mkdir(work_dir)
-    SyncRepos(sync_filter, options.sync_lkgr)
-    if build_filter.All():
-        Remove(GetInstallDir())
-        Mkdir(GetInstallDir())
-        Mkdir(GetInstallDir('bin'))
-        Mkdir(GetInstallDir('lib'))
-
-    # Add prebuilt cmake to PATH so any subprocesses use a consistent cmake.
-    AddToPath(os.path.dirname(PrebuiltCMakeBin()))
-
-    # `npm` uses whatever `node` is in `PATH`. To make sure it uses the
-    # Node.js version we want, we prepend the node bin dir to `PATH`.
-    AddToPath(NodeBinDir())
-
-    # TODO(dschuff): Figure out how to make these statically linked?
-    if IsWindows() and build_filter.Any():
-        host_toolchains.CopyDlls(GetInstallDir('bin'), 'Debug')
-
-    try:
-        BuildRepos(build_filter)
-    except Exception:
-        # If any exception reaches here, do not attempt to run the tests; just
-        # log the error for buildbot and exit
-        print("Exception thrown in build step.")
-        traceback.print_exc()
-        buildbot.Fail()
-        Summary()
-        return 1
-
-    # Override the default locale to use UTF-8 encoding for all files and stdio
-    # streams (see PEP540), since oure test files are encoded with UTF-8.
-    os.environ['PYTHONUTF8'] = '1'
-    for t in test_filter.Apply(ALL_TESTS):
-        t.Test()
-
-    # Keep the summary step last: it'll be marked as red if the return code is
-    # non-zero. Individual steps are marked as red with buildbot.Fail().
-    Summary()
-    return buildbot.Failed()
-
-
-def main():
-    global options
-    start = time.time()
-    options = ParseArgs()
-    print('Python version %s' % sys.version)
-
-    if options.no_threads:
-        testing.single_threaded = True
-    if options.torture_filter:
-        compile_torture_tests.test_filter = options.torture_filter
-
-    if options.sync_dir:
-        work_dirs.SetSync(options.sync_dir)
-    if options.build_dir:
-        work_dirs.SetBuild(options.build_dir)
-    if options.v8_dir:
-        work_dirs.SetV8(options.v8_dir)
-    if options.test_dir:
-        work_dirs.SetTest(options.test_dir)
-    if options.install_dir:
-        work_dirs.SetInstall(options.install_dir)
-    if options.prebuilt_dir:
-        work_dirs.SetPrebuilt(options.prebuilt_dir)
-    if not options.host_clang:
-        host_toolchains.SetForceHostClang(False)
-    if not options.use_sysroot:
-        host_toolchains.SetUseSysroot(False)
-
-    if ShouldUseLTO() and IsMac():
-        # The prebuilt clang on mac doesn't include libLTO, so use the SDK
-        host_toolchains.SetForceHostClang(False)
-
-    sync_include = options.sync_include if options.sync else []
-    sync_filter = Filter('sync', sync_include, options.sync_exclude)
-    build_include = [] if not options.build else (
-        options.build_include if options.build_include else DEFAULT_BUILDS)
-    build_filter = Filter('build', build_include, options.build_exclude)
-    test_include = [] if not options.test else (
-        options.test_include if options.test_include else DEFAULT_TESTS)
-    test_filter = Filter('test', test_include, options.test_exclude)
-
-    try:
-        ret = run(sync_filter, build_filter, test_filter)
-        print('Completed in {}s'.format(time.time() - start))
-        return ret
-    except:  # noqa
-        traceback.print_exc()
-        # If an except is raised during one of the steps we still need to
-        # print the @@@STEP_FAILURE@@@ annotation otherwise the annotator
-        # makes the failed stap as green:
-        # TODO(sbc): Remove this if the annotator is fixed:
-        # http://crbug.com/647357
-        if buildbot.current_step:
-            buildbot.Fail()
-        return 1
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff --git a/src/buildbot.py b/src/buildbot.py
deleted file mode 100644
index e6ee248..0000000
--- a/src/buildbot.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#   Copyright 2016 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import os
-import sys
-
-failed_steps = []
-warned_steps = []
-current_step = None
-
-# Schedulers which can kick off new builds, from:
-# https://chromium.googlesource.com/chromium/tools/build/+/master/masters/master.client.wasm.llvm/builders.pyl
-SCHEDULERS = {
-    None: 'forced',
-    'None': 'forced',
-    'llvm_commits': 'llvm',
-    'clang_commits': 'clang'
-}
-
-# Buildbot-provided environment.
-BUILDBOT_SCHEDULER = os.environ.get('BUILDBOT_SCHEDULER', None)
-SCHEDULER = SCHEDULERS[BUILDBOT_SCHEDULER]
-BUILDBOT_REVISION = os.environ.get('BUILDBOT_REVISION', None)
-BUILDBOT_BUILDNUMBER = os.environ.get('BUILDBOT_BUILDNUMBER', None)
-BUILDBOT_BUILDERNAME = os.environ.get('BUILDBOT_BUILDERNAME', None)
-BUILDBOT_MASTERNAME = os.environ.get('BUILDBOT_MASTERNAME', None)
-BUILDBOT_BUCKET = os.environ.get('BUILDBOT_BUCKET', None)
-
-# Possible masters include None (running locally), the waterfall integration
-# bot, or the emscripten-releases bot.
-WATERFALL_BOT = 'client.wasm.llvm'
-EMSCRIPTEN_RELEASES_BOT = 'emscripten-releases'
-
-assert BUILDBOT_MASTERNAME in [None, WATERFALL_BOT, EMSCRIPTEN_RELEASES_BOT], \
-    'unknown mastername: %s' % str(BUILDBOT_MASTERNAME)
-
-# Possible buckets include "ci" for normal builds, "try" for try builds, and
-# none if not on a bot at all.
-CI_BUCKET = 'ci'
-TRY_BUCKET = 'try'
-
-assert BUILDBOT_BUCKET in [None, CI_BUCKET, TRY_BUCKET], \
-    'unknown bucket: %s' % str(BUILDBOT_BUCKET)
-
-
-def IsBot():
-    """Return True if we are running on bot, False otherwise."""
-    return BUILDBOT_BUILDNUMBER is not None
-
-
-def IsEmscriptenReleasesBot():
-    """Return true if running on the emscripten-releases builders,
-     False otherwise."""
-    return BUILDBOT_MASTERNAME == EMSCRIPTEN_RELEASES_BOT
-
-
-def BuildNumber():
-    if IsEmscriptenReleasesBot():
-        return BUILDBOT_REVISION
-    return BUILDBOT_BUILDNUMBER
-
-
-def IsUploadingBot():
-    """Return True if this is a bot that should upload builds."""
-    if not IsBot():
-        return False
-    if not IsEmscriptenReleasesBot():
-        # We are on the waterfall bot. None of these upload.
-        return False
-    else:
-        # We are on emscripten-releases. CI bots upload, but not try.
-        return BUILDBOT_BUCKET == CI_BUCKET
-
-
-def ShouldClobber():
-    return os.environ.get('BUILDBOT_CLOBBER')
-
-
-def BuilderName():
-    return BUILDBOT_BUILDERNAME
-
-
-def Scheduler():
-    return BUILDBOT_SCHEDULER
-
-
-# Magic annotations:
-# https://chromium.googlesource.com/chromium/tools/build/+/master/scripts/common/annotator.py
-def Step(name):
-    global current_step
-    current_step = name
-    sys.stdout.flush()
-    sys.stdout.write('\n@@@BUILD_STEP %s@@@\n' % name)
-
-
-def Link(label, url):
-    sys.stdout.write('@@@STEP_LINK@%s@%s@@@\n' % (label, url))
-
-
-def Fail():
-    """Mark one step as failing, but keep going."""
-    sys.stdout.flush()
-    sys.stdout.write('\n@@@STEP_FAILURE@@@\n')
-    global failed_steps
-    failed_steps.append(current_step)
-
-
-def Failed():
-    return len(failed_steps)
-
-
-def FailedList():
-    return list(failed_steps)
-
-
-def Warn():
-    """We mark this step as failing, but this step is flaky so we don't care
-  enough about this to make the bot red."""
-    sys.stdout.flush()
-    sys.stdout.write('\n@@@STEP_WARNINGS@@@\n')
-    global warned_steps
-    warned_steps.append(current_step)
-
-
-def Warned():
-    return len(warned_steps)
-
-
-def WarnedList():
-    return list(warned_steps)
-
-
-def DidStepFailOrWarn(step):
-    return step in failed_steps or step in warned_steps
-
-
-def FailUnless(predicate):
-    if predicate():
-        Warn()
-    else:
-        Fail()
diff --git a/src/cloud.py b/src/cloud.py
deleted file mode 100644
index accb142..0000000
--- a/src/cloud.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#   Copyright 2016 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import proc
-from buildbot import IsEmscriptenReleasesBot, IsUploadingBot
-
-CLOUD_STORAGE_BASE_URL = 'https://storage.googleapis.com/'
-WATERFALL_CLOUD_STORAGE_PATH = 'wasm-llvm/builds/'
-EMSCRIPTEN_RELEASES_CLOUD_STORAGE_PATH = \
-    'webassembly/emscripten-releases-builds/'
-
-
-def GetCloudStoragePath():
-    if IsEmscriptenReleasesBot():
-        return EMSCRIPTEN_RELEASES_CLOUD_STORAGE_PATH
-    else:
-        return WATERFALL_CLOUD_STORAGE_PATH
-
-
-def Upload(local, remote):
-    """Upload file to Cloud Storage."""
-    if not IsUploadingBot():
-        return
-    remote = GetCloudStoragePath() + remote
-    proc.check_call(['gsutil.py', 'cp', local, 'gs://' + remote])
-    return CLOUD_STORAGE_BASE_URL + remote
-
-
-def Download(remote, local):
-    remote = GetCloudStoragePath() + remote
-    proc.check_call(['gsutil.py', 'cp', 'gs://' + remote, local])
diff --git a/src/cmake/Modules/Platform/Wasi.cmake b/src/cmake/Modules/Platform/Wasi.cmake
deleted file mode 100644
index 92e41b8..0000000
--- a/src/cmake/Modules/Platform/Wasi.cmake
+++ /dev/null
@@ -1,28 +0,0 @@
-# WASI (WebAssembly System Interface) is an experimental WebAssembly standalone
-# toolchain.
-
-# This is arbitrary, AFAIK, for now.
-cmake_minimum_required(VERSION 3.4.0)
-
-set(CMAKE_SYSTEM_VERSION 1)
-set(CMAKE_SYSTEM_PROCESSOR wasm32)
-set(triple wasm32-wasi)
-
-# Make HandleLLVMOptions.cmake happy.
-# TODO(sbc): We should probably fix llvm or libcxxabi instead.
-# See: https://reviews.llvm.org/D33753
-set(UNIX 1)
-
-set(CMAKE_C_COMPILER_TARGET ${triple})
-set(CMAKE_CXX_COMPILER_TARGET ${triple})
-
-
-set(CMAKE_SYSROOT ${WASM_SDKROOT}/sysroot)
-set(CMAKE_STAGING_PREFIX ${WASM_SDKROOT}/sysroot)
-
-# Don't look in the sysroot for executables to run during the build
-set(CMAKE_FIND_ROOT_PATH_MODE_PROGRAM NEVER)
-# Only look in the sysroot (not in the host paths) for the rest
-set(CMAKE_FIND_ROOT_PATH_MODE_LIBRARY ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_INCLUDE ONLY)
-set(CMAKE_FIND_ROOT_PATH_MODE_PACKAGE ONLY)
diff --git a/src/compile_torture_tests.py b/src/compile_torture_tests.py
deleted file mode 100755
index d2a02b7..0000000
--- a/src/compile_torture_tests.py
+++ /dev/null
@@ -1,167 +0,0 @@
-#!/usr/bin/env python3
-
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import argparse
-import fnmatch
-import glob
-import os
-import os.path
-import sys
-
-import testing
-
-# For debugging purposes set this to a source file name to test just a single
-# file.
-test_filter = None
-
-
-def do_compile(infile, outfile, extras):
-    """Create the command-line for a C compiler invocation."""
-    # its not enough to check only for capital `.C` extension because
-    # windows.
-    if os.path.splitext(infile)[1] == '.C' or 'g++.dg' in infile:
-        return [extras['cxx'], infile, '-o', outfile] + extras['cxxflags']
-    else:
-        return [extras['cc'], infile, '-o', outfile] + extras['cflags']
-
-
-def create_outname(outdir, infile, extras):
-    if os.path.splitext(infile)[1] == '.C':
-        parts = infile.split(os.path.sep)
-        parts = parts[parts.index('testsuite') + 2:]
-        basename = '__'.join(parts)
-    else:
-        basename = os.path.basename(infile)
-    rtn = os.path.join(outdir, basename + extras['suffix'])
-    if os.path.exists(rtn):
-        raise Exception("already exists: " + rtn)
-    return rtn
-
-
-def find_runnable_tests(directory, pattern):
-    results = []
-    for root, dirs, files in os.walk(directory):
-        if os.path.basename(root) == 'ext':
-            continue
-        for filename in files:
-            if fnmatch.fnmatch(filename, pattern):
-                fullname = os.path.join(root, filename)
-                with open(fullname, 'rb') as f:
-                    header = f.read(1024)
-                    # Some of these files really do have non-utf8 in them.
-                    # TODO: open in text mode with the encoding kwarg when we
-                    # drop py2.
-                    header = header.decode('ISO8859-1')
-                if ('{ dg-do run }' in header and
-                        'dg-additional-sources' not in header):
-                    results.append(fullname)
-    return results
-
-
-def run(cc, cxx, testsuite, sysroot_dir, fails, exclusions, out, config, opt):
-    """Compile all torture tests."""
-    script_dir = os.path.dirname(os.path.abspath(__file__))
-    pre_js = os.path.join(script_dir, 'em_pre.js')
-
-    cflags_common = [
-        '-DSTACK_SIZE=524288',
-        '-D_WASI_EMULATED_MMAN',
-        '-D_WASI_EMULATED_SIGNAL',
-        '-w', '-Wno-implicit-function-declaration', '-' + opt
-    ]
-    cflags_c = ['--std=gnu89']
-    cflags_cxx = []
-    cflags_extra = {
-        'clang': ['-c', '--sysroot=%s' % sysroot_dir],
-        'emscripten': ['--pre-js', pre_js],
-    }
-    suffix = {
-        'clang': '.o',
-        'emscripten': '.js',
-    }[config]
-
-    assert os.path.isdir(out), 'Cannot find outdir %s' % out
-    assert os.path.isfile(cc), 'Cannot find C compiler at %s' % cc
-    assert os.path.isfile(cxx), 'Cannot find C++ compiler at %s' % cxx
-    assert os.path.isdir(testsuite), 'Cannot find testsuite at %s' % testsuite
-
-    # Currently we build the following parts of the gcc test suite:
-    #  - testsuite/gcc.c-torture/execute/*.c
-    #  - testsuite/g++.dg (all executable tests)
-    # TODO(sbc) Also more parts of the test suite
-    c_torture = os.path.join(testsuite, 'gcc.c-torture', 'execute')
-    assert os.path.isdir(c_torture), ('Cannot find C tests at %s' % c_torture)
-    test_files = glob.glob(os.path.join(c_torture, '*.c'))
-
-    if config == 'clang':
-        # Only build the C++ tests when linking with lld
-        cxx_test_dir = os.path.join(testsuite, 'g++.dg')
-        assert os.path.isdir(cxx_test_dir), ('Cannot find C++ tests at %s' %
-                                             cxx_test_dir)
-        test_files += find_runnable_tests(cxx_test_dir, '*.[Cc]')
-
-    cflags = cflags_common + cflags_c + cflags_extra[config]
-    cxxflags = cflags_common + cflags_cxx + cflags_extra[config]
-
-    if test_filter:
-        test_files = fnmatch.filter(test_files, test_filter)
-
-    result = testing.execute(tester=testing.Tester(command_ctor=do_compile,
-                                                   outname_ctor=create_outname,
-                                                   outdir=out,
-                                                   extras={
-                                                       'cc': cc,
-                                                       'cxx': cxx,
-                                                       'cflags': cflags,
-                                                       'cxxflags': cxxflags,
-                                                       'suffix': suffix
-                                                   }),
-                             inputs=test_files,
-                             fails=fails,
-                             exclusions=exclusions,
-                             attributes=[config, opt])
-
-    return result
-
-
-def main():
-    parser = argparse.ArgumentParser(description='Compile GCC torture tests.')
-    parser.add_argument('--cc', type=str, required=True,
-                        help='C compiler path')
-    parser.add_argument('--cxx', type=str, required=True,
-                        help='C++ compiler path')
-    parser.add_argument('--testsuite', type=str, required=True,
-                        help='GCC testsuite tests path')
-    parser.add_argument('--sysroot', type=str, required=True,
-                        help='Sysroot directory')
-    parser.add_argument('--fails', type=str, required=True,
-                        help='Expected failures')
-    parser.add_argument('--out', type=str, required=True,
-                        help='Output directory')
-    parser.add_argument('--config', type=str, required=True,
-                        help='configuration to use')
-    args = parser.parse_args()
-    return run(cc=args.cc,
-               cxx=args.cxx,
-               testsuite=args.testsuite,
-               sysroot_dir=args.sysroot,
-               fails=args.fails,
-               out=args.out,
-               config=args.config)
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff --git a/src/create_sysroot.sh b/src/create_sysroot.sh
deleted file mode 100755
index e652323..0000000
--- a/src/create_sysroot.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-#
-# Create a linux sysroot image (tar archive) based on debian
-# This is than used in the build scripts as the --sysroot arguments
-# when compiling linux binaries.
-#
-# See https://xw.is/wiki/Create_Debian_sysroots
-#
-# Once created the sysroot should be uploaded to google storage.
-# e.g:
-#  gsutil cp sysroot_debian_stretch_amd64.tar.xz gs://wasm/
-
-set -o errexit
-
-SUITE=stretch
-TARGET_DIR=sysroot_debian_${SUITE}_amd64
-VERSION=2
-
-mkdir $TARGET_DIR
-
-# Perform minimal installation
-debootstrap $SUITE $TARGET_DIR http://deb.debian.org/debian
-
-# Install additional packages
-chroot $TARGET_DIR apt-get install -y -q libstdc++-6-dev zlib1g-dev
-
-# Convert absolute symlinks to relative
-find $TARGET_DIR -type l -lname '/*' -exec sh -c 'file="$0"; dir=$(dirname "$file"); target=$(readlink "$0"); prefix=$(dirname "$dir" | sed 's@[^/]*@\.\.@g'); newtarget="$prefix$target"; ln -snf $newtarget $file' {} \;
-
-# Remove parts that are not relevent to --sysroot
-for d in dev proc tmp home run var boot media sys srv mnt; do
-  rm -rf $TARGET_DIR/$d
-done
-
-tar cJf sysroot_debian_${SUITE}_amd64_v${VERSION}.tar.xz -C $TARGET_DIR .
diff --git a/src/em_pre.js b/src/em_pre.js
deleted file mode 100644
index 4846937..0000000
--- a/src/em_pre.js
+++ /dev/null
@@ -1,12 +0,0 @@
-var Module = {}
-
-Module['onAbort'] = function(reason) {
-  // JS shells do not exit with status 1 when a promise is rejected. Emscripten
-  // calls abort when a wasm module fails to initialize, which is implemented in
-  // JS as a function that terminates execution by throwing an exception, which
-  // causes the instantiate promise to be rejected, which causes the shell to
-  // falsely return 0.
-  // Emscripten's abort has an 'onAbort' hook, so we can use that to call d8's
-  // quit, which correctly returns an error code even from a promise.
-  quit(1);
-};
diff --git a/src/emscripten_config_upstream b/src/emscripten_config_upstream
deleted file mode 100644
index 4911ee5..0000000
--- a/src/emscripten_config_upstream
+++ /dev/null
@@ -1,17 +0,0 @@
-import os
-
-WASM_INSTALL = '{{WASM_INSTALL}}'
-# this helps projects using emscripten find it
-EMSCRIPTEN_ROOT = os.path.join(WASM_INSTALL, 'emscripten')
-LLVM_ROOT = os.path.join(WASM_INSTALL, 'bin')
-BINARYEN_ROOT = os.path.join(WASM_INSTALL)
-
-prebuilt_node = '{{PREBUILT_NODE}}'
-if not os.path.isfile(prebuilt_node):
-   prebuilt_node = None
-NODE_JS = os.path.expanduser(os.getenv('NODE') or prebuilt_node or '/usr/bin/nodejs')
-JAVA = '{{PREBUILT_JAVA}}'
-
-# For testing only
-V8_ENGINE = os.path.join(WASM_INSTALL, 'bin', 'd8')
-JS_ENGINES = [NODE_JS]
diff --git a/src/execute_files.py b/src/execute_files.py
deleted file mode 100755
index 92eaa61..0000000
--- a/src/execute_files.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#!/usr/bin/env python3
-
-#   Copyright 2016 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import argparse
-import glob
-import os
-import os.path
-import sys
-
-import testing
-
-
-def create_outname(outdir, infile, extras):
-    """Create the output file's name."""
-    basename = os.path.basename(infile)
-    outname = basename + '.out'
-    return os.path.join(outdir, outname)
-
-
-def execute(infile, outfile, extras):
-    """Create the command-line for an execution."""
-    runner = extras['runner']
-    basename = os.path.splitext(os.path.basename(runner))[0]
-    if basename == 'v8':
-        # The d8 installed by JSVU is named 'v8'
-        basename = 'd8'
-    out_opt = ['-o', outfile] if outfile else []
-    extra_files = extras['extra_files']
-    config = basename
-    wasmjs = [extras['wasmjs']] if extras['wasmjs'] else []
-    if basename == 'd8' or basename == 'jsc':
-        config = basename + ('-wasm' if wasmjs else '-asm2wasm')
-
-    # TODO(jgravelle): Remove --no-wasm-async-compilation by adding
-    # testRunner.waitUntilDone()/.notifyDone(), as used in V8's mjsunit.js:
-    # https://cs.chromium.org/chromium/src/v8/test/mjsunit/mjsunit.js
-    commands = {
-        'wasm-shell': [runner, '--entry=main', infile] + out_opt,
-        'd8-wasm': [runner, '--experimental-wasm-bigint',
-                    '--no-wasm-async-compilation'] + wasmjs + [
-                        '--', infile] + extra_files,
-        'd8-asm2wasm': [runner, '--experimental-wasm-bigint',
-                        '--no-wasm-async-compilation', infile],
-        'jsc-wasm': [runner, '--useWebAssembly=1'] + wasmjs + [
-            '--', infile] + extra_files,
-        'jsc-asm2wasm': [runner, '--useWebAssembly=1', infile],
-        'wasm': [runner, infile],
-        'node': [runner] + wasmjs + [infile] + extra_files,
-        'wasm-validate': [runner, '--enable-bulk-memory', infile],
-    }
-    return commands[config]
-
-
-def run(runner, files, fails, attributes, out, wasmjs='', extra_files=[]):
-    """Execute all files."""
-    assert os.path.isfile(runner), 'Cannot find runner at %s' % runner
-    if out:
-        assert os.path.isdir(out), 'Cannot find outdir %s' % out
-    if wasmjs:
-        assert os.path.isfile(wasmjs), 'Cannot find wasm.js %s' % wasmjs
-    executable_files = glob.glob(files)
-    if len(executable_files) == 0:
-        print('No files found by %s' % files)
-        return 1
-    tester = testing.Tester(command_ctor=execute,
-                            outname_ctor=create_outname,
-                            outdir=out,
-                            extras={
-                                'runner': runner,
-                                'wasmjs': wasmjs,
-                                'extra_files':
-                                extra_files if extra_files else []
-                            }),
-    return testing.execute(tester=tester,
-                           inputs=executable_files,
-                           fails=fails,
-                           attributes=attributes)
-
-
-def main():
-    parser = argparse.ArgumentParser(
-        description='Execute .wast or .wasm files.')
-    parser.add_argument('--runner', type=str, required=True,
-                        help='Runner path')
-    parser.add_argument('--files', type=str, required=True,
-                        help='Glob pattern for .wast / .wasm files')
-    parser.add_argument('--fails', type=str, required=True,
-                        help='Expected failures')
-    parser.add_argument('--out', type=str, required=False,
-                        help='Output directory')
-    parser.add_argument('--wasmjs', type=str, required=False,
-                        help='JavaScript support runtime for WebAssembly')
-    parser.add_argument('--extra', type=str, required=False, action='append',
-                        help='Extra files to pass to the runner')
-    args = parser.parse_args()
-    return run(args.runner, [args.files], args.fails, set(), args.out,
-               args.wasmjs, args.extra)
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff --git a/src/file_util.py b/src/file_util.py
deleted file mode 100644
index 86481a1..0000000
--- a/src/file_util.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-# Shell utilities
-
-import errno
-import os
-import shutil
-import sys
-
-import proc
-
-
-def Chdir(path):
-    print('Change directory to: %s' % path)
-    os.chdir(path)
-
-
-def Mkdir(path):
-    """Create a directory at a specified path.
-
-    Creates all intermediate directories along the way.
-    e.g.: Mkdir('a/b/c') when 'a/' is an empty directory will
-          cause the creation of directories 'a/b/' and 'a/b/c/'.
-
-    If the path already exists (and is already a directory), this does nothing.
-    """
-    try:
-        os.makedirs(path)
-    except OSError as e:
-        if not os.path.isdir(path):
-            raise Exception('Path %s is not a directory!' % path)
-        if not e.errno == errno.EEXIST:
-            raise e
-
-
-def Remove(path):
-    """Remove file or directory if it exists, do nothing otherwise."""
-    if not os.path.exists(path):
-        return
-    print('Removing %s' % path)
-    if not os.path.isdir(path):
-        os.remove(path)
-        return
-    if sys.platform == 'win32':
-        # shutil.rmtree() may not work in Windows if a directory contains
-        # read-only files.
-        proc.check_call('rmdir /S /Q "%s"' % path, shell=True)
-    else:
-        shutil.rmtree(path)
-
-
-def CopyTree(src, dst):
-    """Recursively copy the items in the src directory to the dst directory.
-
-    Unlike shutil.copytree, the destination directory and any subdirectories
-    and files may exist. Existing directories are left untouched, and existing
-    files are removed and copied from the source using shutil.copy2. It is also
-    not symlink-aware.
-
-    Args:
-      src: Source. Must be an existing directory.
-      dst: Destination directory. If it exists, must be a directory. Otherwise
-           it will be created, along with parent directories.
-    """
-    print('Copying directory %s to %s' % (src, dst))
-    if not os.path.isdir(dst):
-        os.makedirs(dst)
-    for root, dirs, files in os.walk(src):
-        relroot = os.path.relpath(root, src)
-        dstroot = os.path.join(dst, relroot)
-        for d in dirs:
-            dstdir = os.path.join(dstroot, d)
-            if not os.path.isdir(dstdir):
-                os.mkdir(dstdir)
-        for f in files:
-            dstfile = os.path.join(dstroot, f)
-            if os.path.isfile(dstfile):
-                os.remove(dstfile)
-            shutil.copy2(os.path.join(root, f), dstfile)
diff --git a/src/host_toolchains.py b/src/host_toolchains.py
deleted file mode 100644
index fbaa1da..0000000
--- a/src/host_toolchains.py
+++ /dev/null
@@ -1,169 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#   Copyright 2016 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import glob
-import json
-import os
-import shutil
-
-import file_util
-import proc
-import work_dirs
-
-force_host_clang = True
-use_sysroot = True
-
-
-def SetupToolchain():
-    return [
-        'vpython.bat',
-        os.path.join(work_dirs.GetV8(), 'build', 'toolchain', 'win',
-                     'setup_toolchain.py')
-    ]
-
-
-def VSToolchainPy():
-    return [
-        'vpython.bat',
-        os.path.join(work_dirs.GetV8(), 'build', 'vs_toolchain.py')
-    ]
-
-
-def WinToolchainJson():
-    return os.path.join(work_dirs.GetV8(), 'build', 'win_toolchain.json')
-
-
-def SyncPrebuiltClang(src_dir):
-    """Update the prebuilt clang toolchain used by chromium bots"""
-    tools_clang = os.path.join(src_dir, 'tools', 'clang')
-    assert os.path.isdir(tools_clang)
-    proc.check_call([os.path.join(tools_clang, 'scripts', 'update.py')])
-
-
-def SyncWinToolchain():
-    """Update the VS toolchain used by Chromium bots"""
-    proc.check_call(VSToolchainPy() + ['update'])
-
-
-def GetVSEnv(dir):
-    """Return the configured VS build environment block as a python dict."""
-    # The format is a list of nul-terminated strings of the form var=val
-    # where 'var' is the environment variable name, and 'val' is its value
-    env = os.environ.copy()
-    with open(os.path.join(dir, 'environment.x64'), 'rb') as f:
-        entries = f.read().decode().split('\0')
-        for e in entries:
-            if not e:
-                continue
-            var, val = e.split('=', 1)
-            env[var] = val
-            print('ENV: %s = %s' % (var, val))
-
-    return env
-
-
-def GetRuntimeDir():
-    # Get the chromium-packaged toolchain directory info in a JSON file
-    proc.check_call(VSToolchainPy() + ['get_toolchain_dir'])
-    with open(WinToolchainJson()) as f:
-        paths = json.load(f)
-    # Extract the 64-bit runtime path
-    return [path for path in paths['runtime_dirs'] if path.endswith('64')][0]
-
-
-def SetUpVSEnv(outdir):
-    """Set up the VS build environment used by Chromium bots"""
-
-    # Get the chromium-packaged toolchain directory info in a JSON file
-    proc.check_call(VSToolchainPy() + ['get_toolchain_dir'])
-    with open(WinToolchainJson()) as f:
-        paths = json.load(f)
-
-    # Write path information (usable by a non-chromium build) into an
-    # environment block
-    runtime_dirs = os.pathsep.join(paths['runtime_dirs'])
-    proc.check_call(SetupToolchain() +
-                    [paths['path'], paths['win_sdk'], runtime_dirs, 'win',
-                     'x64', 'environment.x64'],
-                    cwd=outdir)
-    return GetVSEnv(outdir)
-
-
-def CopyDlls(dir, configuration):
-    """Copy MSVS Runtime dlls into a build directory"""
-    file_util.Mkdir(dir)
-    proc.check_call(VSToolchainPy() + ['copy_dlls', dir, configuration, 'x64'])
-    # LLD needs also concrt140.dll, which the Chromium copy_dlls doesn't
-    # include.
-    for dll in glob.glob(os.path.join(GetRuntimeDir(), 'concrt140*.dll')):
-        print('Copying %s to %s' % (dll, dir))
-        shutil.copy2(dll, dir)
-
-
-def UsingGoma():
-    return 'GOMA_DIR' in os.environ
-
-
-def GomaDir():
-    return os.environ['GOMA_DIR']
-
-
-def CMakeLauncherFlags():
-    flags = []
-    if UsingGoma():
-        compiler_launcher = os.path.join(GomaDir(), 'gomacc')
-    else:
-        try:
-            compiler_launcher = proc.Which('ccache')
-        except:  # noqa
-            return flags
-
-        if ShouldForceHostClang():
-            # This flag is only present in clang.
-            flags.extend([
-                '-DCMAKE_%s_FLAGS=-Qunused-arguments' % c
-                for c in ['C', 'CXX']
-            ])
-
-    flags.extend([
-        '-DCMAKE_%s_COMPILER_LAUNCHER=%s' % (c, compiler_launcher)
-        for c in ['C', 'CXX']
-    ])
-    return flags
-
-
-def NinjaJobs():
-    if UsingGoma() and force_host_clang:
-        return ['-j', '50']
-    return []
-
-
-def SetForceHostClang(force):
-    global force_host_clang
-    force_host_clang = force
-
-
-def ShouldForceHostClang():
-    return force_host_clang
-
-
-def SetUseSysroot(use):
-    global use_sysroot
-    use_sysroot = use
-
-
-def ShouldUseSysroot():
-    return use_sysroot
diff --git a/src/libc++abi.imports b/src/libc++abi.imports
deleted file mode 100644
index a5ba844..0000000
--- a/src/libc++abi.imports
+++ /dev/null
@@ -1,3 +0,0 @@
-_Unwind_RaiseException
-_Unwind_DeleteException
-_Unwind_ForcedUnwind
diff --git a/src/link_assembly_files.py b/src/link_assembly_files.py
deleted file mode 100755
index f9cb735..0000000
--- a/src/link_assembly_files.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#!/usr/bin/env python3
-
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import argparse
-import glob
-import os
-import os.path
-import sys
-
-import testing
-
-
-def create_outname(outdir, infile, extras):
-    """Create the output file's name."""
-    basename = os.path.basename(infile)
-    outname = basename + '.wasm'
-    return os.path.join(outdir, outname)
-
-
-def link(infile, outfile, extras):
-    """Create the command-line for a linker invocation."""
-    linker = extras['linker']
-    install_root = os.path.dirname(os.path.dirname(linker))
-    sysroot_dir = os.path.join(install_root, 'sysroot')
-    command = [
-        linker,
-        '--sysroot=%s' % sysroot_dir, '-Wl,-zstack-size=1048576', '-o',
-        outfile, infile, '-lwasi-emulated-mman', '-lwasi-emulated-signal'
-    ]
-    return command + extras['args']
-
-
-def run(linker, files, fails, attributes, out, args):
-    """Link all files."""
-    assert os.path.isfile(linker), 'Cannot find linker at %s' % linker
-    assert os.path.isdir(out), 'Cannot find outdir %s' % out
-    input_files = glob.glob(files)
-    if len(input_files) == 0:
-        print('No files found by %s' % files)
-        return 1
-    if not args:
-        args = []
-    return testing.execute(tester=testing.Tester(command_ctor=link,
-                                                 outname_ctor=create_outname,
-                                                 outdir=out,
-                                                 extras={
-                                                     'linker': linker,
-                                                     'args': args
-                                                 }),
-                           inputs=input_files,
-                           fails=fails,
-                           attributes=attributes)
-
-
-def main():
-    parser = argparse.ArgumentParser(description='Link .o files into a .wasm.')
-    parser.add_argument('--linker', type=str, required=True,
-                        help='Linker path')
-    parser.add_argument('--files', type=str, required=True,
-                        help='Glob pattern for .s files')
-    parser.add_argument('--fails', type=str, required=True,
-                        help='Expected failures')
-    parser.add_argument('--out', type=str, required=True,
-                        help='Output directory')
-    args = parser.parse_args()
-    return run(args.linker, args.files, args.fails, args.out)
-
-
-if __name__ == '__main__':
-    sys.exit(main())
diff --git a/src/parallel_runner.py b/src/parallel_runner.py
deleted file mode 100644
index 13dbfde..0000000
--- a/src/parallel_runner.py
+++ /dev/null
@@ -1,83 +0,0 @@
-#   Copyright 2018 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import multiprocessing
-import sys
-import queue
-
-
-def g_testing_thread(test_function, work_queue, result_queue):
-    for test in iter(lambda: get_from_queue(work_queue), None):
-        result = None
-        try:
-            result = test_function(test)
-        except Exception as e:
-            print("Something went wrong", e, file=sys.stderr)
-            raise
-        result_queue.put(result)
-
-
-class ParallelRunner(object):
-    def __init__(self):
-        self.processes = None
-        self.result_queue = None
-
-    def map(self, test_function, inputs):
-        test_queue = self.create_test_queue(inputs)
-        self.init_processes(test_function, test_queue)
-        results = self.collect_results()
-        return results
-
-    def create_test_queue(self, inputs):
-        test_queue = multiprocessing.Queue()
-        for test in inputs:
-            test_queue.put(test)
-        return test_queue
-
-    def init_processes(self, test_function, test_queue):
-        self.processes = []
-        self.result_queue = multiprocessing.Queue()
-        for x in range(multiprocessing.cpu_count()):
-            p = multiprocessing.Process(target=g_testing_thread,
-                                        args=(test_function, test_queue,
-                                              self.result_queue))
-            p.start()
-            self.processes.append(p)
-
-    def collect_results(self):
-        buffered_results = []
-        num = 0
-        while len(self.processes):
-            res = get_from_queue(self.result_queue)
-            if res is not None:
-                num += 1
-                # Print periodically to assure the bot monitor that we are
-                # still alive
-                if num % 10 == 0:
-                    print('Got test results:', num)
-                buffered_results.append(res)
-            else:
-                self.clear_finished_processes()
-        return buffered_results
-
-    def clear_finished_processes(self):
-        self.processes = [p for p in self.processes if p.is_alive()]
-
-
-def get_from_queue(q):
-    try:
-        return q.get(True, 0.1)
-    except queue.Empty:
-        pass
-    return None
diff --git a/src/proc.py b/src/proc.py
deleted file mode 100755
index 3d599ce..0000000
--- a/src/proc.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#   Copyright 2016 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-# This module is intended to be a drop-in replacement for the standard
-# subprocess module, with the difference that it logs commands before it runs
-# them. Everything not overriden should pass through to the subprocess module
-# via the import trick below.
-
-# Imports subprocess in its own namespace so we can always refer directly to
-# its attributes.
-import subprocess
-import os
-import sys
-# Imports all of subprocess into the current namespace, effectively
-# re-exporting everything.
-from subprocess import *  # noqa
-
-
-def Which(filename, cwd=None, is_executable=True):
-    if os.path.isabs(filename):
-        return filename
-
-    to_search = os.environ.get('PATH', '').split(os.pathsep)
-    if cwd:
-        to_search.insert(0, cwd)
-    exe_suffixes = ['']
-    if sys.platform == 'win32' and is_executable:
-        exe_suffixes = ['.exe', '.bat', '.cmd'] + exe_suffixes
-    for path in to_search:
-        abs_path = os.path.abspath(os.path.join(path, filename))
-        for suffix in exe_suffixes:
-            full_path = abs_path + suffix
-            if (os.path.isfile(full_path) and
-                    (not is_executable or os.access(full_path, os.X_OK))):
-                return full_path
-    raise Exception('File "%s" not found. (cwd=`%s`, PATH=`%s`' %
-                    (filename, cwd, os.environ['PATH']))
-
-
-def MungeExe(cmd, cwd):
-    exe = cmd[0]
-    if exe.endswith('.py'):
-        script = Which(exe, cwd, is_executable=False)
-        return [sys.executable, script] + cmd[1:]
-    if exe in ('git', 'npm', 'gclient'):
-        return [Which(exe, cwd)] + cmd[1:]
-    return cmd
-
-
-def MungeKwargs(kwargs):
-    should_log = True
-    if 'should_log' in kwargs:
-        should_log = kwargs['should_log']
-        del kwargs['should_log']
-    return should_log, kwargs
-
-
-def LogCall(funcname, cmd, cwd):
-    if isinstance(cmd, str):
-        c = cmd
-    else:
-        c = ' '.join('"' + c + '"' if ' ' in c else c for c in cmd)
-    print('%s(`%s`, cwd=`%s`)' % (funcname, c, cwd))
-
-
-# Now we can override any parts of subprocess we want, while leaving the rest.
-def check_call(cmd, **kwargs):
-    cwd = kwargs.get('cwd', os.getcwd())
-    should_log, kwargs = MungeKwargs(kwargs)
-    cmd = MungeExe(cmd, cwd)
-    if should_log:
-        LogCall('subprocess.check_call', cmd, cwd)
-    sys.stdout.flush()
-    try:
-        subprocess.check_call(cmd, **kwargs)
-    finally:
-        sys.stdout.flush()
-
-
-def check_output(cmd, **kwargs):
-    cwd = kwargs.get('cwd', os.getcwd())
-    should_log, kwargs = MungeKwargs(kwargs)
-    cmd = MungeExe(cmd, cwd)
-    if should_log:
-        LogCall('subprocess.check_output', cmd, cwd)
-    sys.stdout.flush()
-    return subprocess.check_output(cmd, **kwargs)
diff --git a/src/test/asm2wasm_compile_known_gcc_test_failures.txt b/src/test/asm2wasm_compile_known_gcc_test_failures.txt
deleted file mode 100644
index 75b2a40..0000000
--- a/src/test/asm2wasm_compile_known_gcc_test_failures.txt
+++ /dev/null
@@ -1,98 +0,0 @@
-20010122-1.c # undefined symbol llvm_returnaddress
-20071018-1.c # undefined symbol __builtin_malloc
-20071120-1.c # undefined symbol __builtin_malloc
-builtin-bitops-1.c # undefined symbol __builtin_clrsb
-frame-address.c # undefined symbol llvm_frameaddress
-pr17377.c # undefined symbol llvm_returnaddress
-pr36765.c # undefined symbol __builtin_malloc
-pr39228.c # undefined symbol __builtin_isinff
-pr43008.c # undefined symbol __builtin_malloc
-pr47237.c # undefined symbol __builtin_apply
-va-arg-pack-1.c # undefined symbol bar
-
-# Fails with direct LLVM wasm backend (no emcc)
-	20000822-1.c
-	20010209-1.c
-	20010605-1.c
-	20020412-1.c
-	20030501-1.c
-	20040308-1.c
-	20040423-1.c
-	20040520-1.c
-	20041218-2.c
-	20061220-1.c
-	20070919-1.c
-	20090219-1.c
-	920302-1.c
-	920415-1.c
-	920428-2.c
-	920501-3.c
-	920501-7.c
-	920612-2.c
-	920721-4.c
-	920728-1.c
-	921017-1.c
-	921215-1.c
-	931002-1.c
-	990413-2.c
-	align-nest.c
-	built-in-setjmp.c
-	comp-goto-2.c
-	nest-align-1.c
-	nest-stdar-1.c
-	nestfunc-1.c
-	nestfunc-2.c
-	nestfunc-3.c
-	nestfunc-5.c
-	nestfunc-6.c
-	nestfunc-7.c
-	pr22061-3.c
-	pr22061-4.c
-	pr24135.c
-	pr28865.c
-	pr41935.c
-	pr51447.c
-	pr60003.c
-	scal-to-vec1.c
-	scal-to-vec2.c
-	scal-to-vec3.c O0
-	scal-to-vec3.c O2
-	pr60960.c
-
-
-# Works with direct LLVM wasm backend (but fails with emcc)
-	20020107-1.c
-	20021127-1.c
-	20030125-1.c
-	20050316-1.c O0
-	20050316-2.c O0
-	20050316-3.c O0
-	20090711-1.c
-	960909-1.c
-	980701-1.c
-	990130-1.c
-	cbrt.c
-	pr23135.c
-	pr43385.c
-	pr43560.c
-	pr45695.c
-	pr49390.c
-	pr52286.c
-	20050607-1.c O0
-	simd-4.c O0
-
-# Additional failures with BINARYEN=1
-	20030222-1.c
-	20050604-1.c
-	20060420-1.c
-	20071220-1.c
-	20071220-2.c
-	pr38533.c
-	pr41239.c
-	pr49279.c
-	pr53645-2.c
-	pr53645.c
-	simd-1.c
-	simd-2.c
-	simd-5.c
-	simd-6.c
diff --git a/src/test/emwasm_compile_known_gcc_test_failures.txt b/src/test/emwasm_compile_known_gcc_test_failures.txt
deleted file mode 100644
index 05bc0bd..0000000
--- a/src/test/emwasm_compile_known_gcc_test_failures.txt
+++ /dev/null
@@ -1,57 +0,0 @@
-# Fails with bare wasm clang, no emcc (some details in LLVM's
-# /lib/target/WebAssembly/known_gcc_test_failures.txt)
-	20000822-1.c
-	20010209-1.c
-	20010605-1.c
-	20020412-1.c
-	20030501-1.c
-	20040308-1.c
-	20040423-1.c
-	20040520-1.c
-	20041218-2.c
-	20061220-1.c
-	20070919-1.c
-	20071220-1.c O3
-	20071220-2.c
-	20090219-1.c
-	920302-1.c
-	920415-1.c
-	920428-2.c
-	920501-3.c
-	920501-7.c
-	920612-2.c
-	920721-4.c
-	920728-1.c
-	921017-1.c
-	921215-1.c
-	931002-1.c
-	990208-1.c
-	990413-2.c
-	align-nest.c
-	built-in-setjmp.c
-	comp-goto-2.c
-	nest-align-1.c
-	nest-stdar-1.c
-	nestfunc-1.c
-	nestfunc-2.c
-	nestfunc-3.c
-	nestfunc-5.c
-	nestfunc-6.c
-	nestfunc-7.c
-	pr22061-3.c
-	pr22061-4.c
-	pr24135.c
-	pr28865.c
-	pr41935.c
-	pr51447.c
-	pr60003.c
-
-
-20071018-1.c # undefined symbol __builtin_malloc
-20071120-1.c # undefined symbol __builtin_malloc
-medce-1.c O0 # link_error
-pr36765.c # undefined symbol __builtin_malloc
-pr39228.c # undefined symbol __builtin_isinff
-pr43008.c # undefined symbol __builtin_malloc
-pr47237.c # undefined symbol __builtin_apply
-va-arg-pack-1.c # undefined symbol bar
diff --git a/src/test/lld_known_gcc_test_failures.txt b/src/test/lld_known_gcc_test_failures.txt
deleted file mode 100644
index bb91cd7..0000000
--- a/src/test/lld_known_gcc_test_failures.txt
+++ /dev/null
@@ -1,30 +0,0 @@
-# Expected failures from using lld to link on clang-generated .o files generated
-# from the GCC torture tests, and linked with libraries from wasi-sysroot.
-
-# These builtins are not supported by clang
-va-arg-pack-1.c.o # __builtin_va_arg_pack
-pr39228.c.o # __builtin_isinff
-0071018-1.c.o # __builtin_malloc
-20071018-1.c.o # __builtin_malloc
-20071120-1.c.o # __builtin_malloc
-pr36765.c.o # __builtin_malloc
-pr43008.c.o # __builtin_malloc
-pr47237.c.o # __builtin_apply
-
-# Same behviour with x86 clang
-medce-1.c.o O0 # undefined symbol: link_error
-
-asan__interception-test-1.C.o  # Undefined symbol: __interceptor_strtol
-tree-ssa__pr20458.C.o  # Undefined symbol: std::locale::locale
-pr58419.c.o # Undefined symbol: getpid
-
-# These tests compile but need to actually create threads to run correctly.
-tls__thread_local3.C.o
-tls__thread_local3g.C.o
-tls__thread_local4.C.o
-tls__thread_local4g.C.o
-tls__thread_local5.C.o
-tls__thread_local5g.C.o
-
-# Untriaged
-warn__pr33738.C.o
diff --git a/src/test/llvm_torture_exclusions b/src/test/llvm_torture_exclusions
deleted file mode 100644
index 86fbaaf..0000000
--- a/src/test/llvm_torture_exclusions
+++ /dev/null
@@ -1,12 +0,0 @@
-# Clang optimizes loop into infinite loop for -O2
-930529-1.c
-
-# https://bugs.chromium.org/p/v8/issues/detail?id=8211
-# These have flaky behavior when run on windows, which makes it impossible
-# to keep the bots green. Putting them here means we don't even build them,
-# and ignore them on non-windows as well, so we should look into this when
-# we can.
-20040629-1.c
-20040705-1.c
-20040705-2.c
-pr53645-2.c
diff --git a/src/test/llvmtest_known_failures.txt b/src/test/llvmtest_known_failures.txt
deleted file mode 100644
index ab6b4fb..0000000
--- a/src/test/llvmtest_known_failures.txt
+++ /dev/null
@@ -1,53 +0,0 @@
-# Known failures for the LLVM test suite (SingleSource only, so far)
-
-# These all require exceptions. Adding -s DISABLE_EXCEPTION_CATCHING=0
-# (e.g. in SingleSource/Regression/C++/EH/CMakeLists.txt) makes them work.
-SingleSource/Regression/C++/EH/Regression-C++-class_hierarchy.js.test
-SingleSource/Regression/C++/EH/Regression-C++-ctor_dtor_count-2.js.test
-SingleSource/Regression/C++/EH/Regression-C++-ctor_dtor_count.js.test
-SingleSource/Regression/C++/EH/Regression-C++-function_try_block.js.test
-SingleSource/Regression/C++/EH/Regression-C++-inlined_cleanup.js.test
-SingleSource/Regression/C++/EH/Regression-C++-recursive-throw.js.test
-SingleSource/Regression/C++/EH/Regression-C++-simple_rethrow.js.test
-SingleSource/Regression/C++/EH/Regression-C++-simple_throw.js.test
-SingleSource/Regression/C++/EH/Regression-C++-throw_rethrow_test.js.test
-
-# All of these are currently untriaged.
-SingleSource/Benchmarks/Misc-C++-EH/spirit.js.test
-SingleSource/Benchmarks/Misc-C++/Large/ray.js.test
-SingleSource/Benchmarks/Polybench/datamining/correlation/correlation.js.test
-SingleSource/Benchmarks/Polybench/datamining/covariance/covariance.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/2mm/2mm.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/3mm/3mm.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/atax/atax.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/bicg/bicg.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/cholesky/cholesky.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/doitgen/doitgen.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/gemm/gemm.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/gemver/gemver.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/gesummv/gesummv.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/mvt/mvt.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/symm/symm.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/syr2k/syr2k.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/syrk/syrk.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/trisolv/trisolv.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/kernels/trmm/trmm.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/solvers/durbin/durbin.js.test
-SingleSource/Benchmarks/Polybench/linear-algebra/solvers/lu/lu.js.test
-SingleSource/Benchmarks/Polybench/medley/floyd-warshall/floyd-warshall.js.test
-SingleSource/Benchmarks/Polybench/stencils/adi/adi.js.test
-SingleSource/Benchmarks/Polybench/stencils/fdtd-2d/fdtd-2d.js.test
-SingleSource/Benchmarks/Polybench/stencils/jacobi-1d-imper/jacobi-1d-imper.js.test
-SingleSource/Benchmarks/Polybench/stencils/jacobi-2d-imper/jacobi-2d-imper.js.test
-SingleSource/Benchmarks/Polybench/stencils/seidel-2d/seidel-2d.js.test
-SingleSource/Benchmarks/Shootout-C++/EH/Shootout-C++-except.js.test
-SingleSource/Benchmarks/Shootout-C++/Shootout-C++-ackermann.js.test
-SingleSource/Regression/C++/EH/Regression-C++-exception_spec_test.js.test
-SingleSource/Regression/C++/Regression-C++-BuiltinTypeInfo.js.test
-SingleSource/Regression/C++/Regression-C++-global_ctor.js.test
-SingleSource/Regression/C/Regression-C-2003-05-23-TransparentUnion.js.test
-SingleSource/Regression/C/Regression-C-ConstructorDestructorAttributes.js.test
-SingleSource/UnitTests/2003-05-14-AtExit.js.test
-SingleSource/UnitTests/C++11/stdthreadbug.js.test
-SingleSource/UnitTests/Threads/2010-12-08-tls.js.test
-SingleSource/UnitTests/Threads/tls.js.test
diff --git a/src/test/run_known_gcc_test_failures.txt b/src/test/run_known_gcc_test_failures.txt
deleted file mode 100644
index 86cd46d..0000000
--- a/src/test/run_known_gcc_test_failures.txt
+++ /dev/null
@@ -1,275 +0,0 @@
-# Expected failures from running the assembled GCC torture tests.
-
-# Syntax: Each line has a single test to be marked as a 'known failure' (or
-# 'exclusion'. Known failures are expected to fail, and will cause an error if
-# they pass. (Known failures that do not run at all will not cause an
-# error). The format is
-# <name> <attributes> # comment
-# where <name> matches the file name (no directory) passed to the test runner,
-# and <attributes> is a comma-separated list of filter attributes. Each run of
-# the test runner is said to have " a set of "attributes" that represent the
-# configuration: e.g. which compiler, optimization level, JS engine etc is used
-# for that run. A test will be excluded if the configuration has all of the
-# attributes in the exclusion line. For example if a configuration has
-# ['asm2wasm', 'O2', 'd8'] then the exclusion will apply if it has the
-# attributes 'asm2wasm' or 'O2,d8' or no attributes, but not if it has 'bare' or
-# 'asm2wasm,d8'. A test can be listed multiple times with different attribute
-# sets, but not with the same (or a subset).
-
-# Files ending with .c.o.wasm are 'bare' tests run without emscripten; they
-# come with the 'bare' attributes. The failures are mostly the same
-# for those. These tests also are run with d8 and JSC ('d8' and 'jsc' attributes).
-#
-# Files with a .js extension are emscripten builds, which come in 'asm2wasm' or
-# 'emwasm' (aka LLVM upstream wasm backend) attributes.
-#
-# Files with a .o.wasm extension are built using clang and lld native backends.
-#
-# Near-term TODO: Add different optimization levels, with their own attributes.
-# A possible TODO: Allow wildcard matching to combine the corresponding bare and
-# emscripten entry into a single line. This may not be worth it, as there isn't
-# a huge amount of overlap.
-
-
-### Undefined behavior: native clang also fails
-
-
-# Results of signed integer overflow are undefined in C, so don't care.
-# 'clang -O2' runs -instcombine pass that does these transformations:
-# > add nsw x, INT_MIN -> or x, INT_MIN
-# > add nuw x, INT_MIN -> xor x, INT_MIN
-# which makes the tests below fail.
-20040409-1.c.o.wasm O2 # abort()
-20040409-2.c.o.wasm O2 # abort()
-20040409-3.c.o.wasm O2 # abort()
-
-### bare wasm.js tests: need some libc support.
-# Not yet implemented.
-# See wasm.js for the list of libc functions which are missing.
-# The right place to put libc functionality would really be libc anyways.
-20101011-1.c.js # signal
-builtin-bitops-1.c.js asm2wasm # __builtin_clrsb
-pr47237.c.js # __builtin_apply_args
-pr39228.c.js #  __builtin_isinff/isinfl
-
-# Trying to import function 'bar'. The test is likely wrong.
-va-arg-pack-1.c.js # No import error but missing __builtin_va_arg_pack
-
-# Additionally there are a bunch of unexpected failures when disabling IR
-# optimization, which this margin is too small to contain.
-# (a lot of them are unsupported features and missing libcalls which are
-# eliminated by IR optzns)
-
-
-# Don't care/won't fix:
-eeprof-1.c.js # tests -finstrument-functions
-pr23047.c.js O2,emwasm # tests -fwrapv
-pr23047.c.js O3,emwasm # tests -fwrapv
-
-# Low priority
-# Bitfield tests
-bitfld-3.c.o.wasm
-bitfld-3.c.js # abort()
-bitfld-5.c.o.wasm
-bitfld-5.c.js # memory access out of bounds
-pr32244-1.c.o.wasm
-pr32244-1.c.js # abort()
-pr34971.c.o.wasm
-pr34971.c.js # abort()
-
-# __builtin_return_address
-# This is supported on wasm backend, but due to old v8 versions specifying
-# offsets in a non-standard way, programs using __builtin_return_address
-# require -s USE_OFFSET_CONVERTER Emscripten setting.
-20010122-1.c.js
-pr17377.c.js
-
-### Failures specific to emscripten
-# no builtin frameaddress
-frame-address.c.js asm2wasm
-
-# inline assembly tricks
-20030222-1.c.js asm2wasm
-20071220-2.c.js
-pr38533.c.js asm2wasm
-pr41239.c.js asm2wasm
-pr49279.c.js asm2wasm
-
-# aborts in native clang
-20021127-1.c.o.wasm
-20021127-1.c.js emwasm
-20031003-1.c.o.wasm
-20031003-1.c.js # abort() in emwasm
-
-# hoisting of conditional cast causing wasm float->int conversion trap
-# https://github.com/WebAssembly/binaryen/issues/983
-20040831-1.c.js asm2wasm,O0
-
-# This seems to be spuriously passing for now because the stack below it
-# happens to align it to 32 byte alignment. Will probably need to toggle this
-# in the future.
-# alloca-1.c.js
-
-20071018-1.c.js # missing __builtin_malloc
-20071120-1.c.js # missing __builtin_malloc
-pr36765.c.js # missing __builtin_malloc
-pr43008.c.js # missing __builtin_malloc
-
-
-# SIMD
-20050316-2.c.js asm2wasm,O0
-20050604-1.c.js asm2wasm
-20050607-1.c.js asm2wasm,O0
-20060420-1.c.js asm2wasm
-simd-1.c.js asm2wasm
-simd-2.c.js asm2wasm
-simd-4.c.js asm2wasm,O0
-simd-5.c.js asm2wasm
-simd-6.c.js asm2wasm
-pr53645-2.c.js asm2wasm
-pr53645.c.js asm2wasm
-pr60960.c.js asm2wasm # actually fails in asm2wasm, but JS file is still there
-
-# Untriaged emscripten O3 failures
-20040409-1.c.js O3
-20040409-2.c.js O3
-20040409-3.c.js O3
-920612-1.c.js O3
-920711-1.c.js O3
-990208-1.c.js O3
-bcp-1.c.js asm2wasm,O3
-builtin-constant.c.js asm2wasm,O3
-fprintf-chk-1.c.js O3
-pr22493-1.c.js O3
-printf-chk-1.c.js O3
-vfprintf-chk-1.c.js O3
-vprintf-chk-1.c.js O3
-
-
-### Failures for lld-linked binaries
-
-# printf or other libc feature missing
-pr56982.c.o.wasm
-pr39228.c.o.wasm
-pr47237.c.o.wasm
-
-printf-chk-1.c.o.wasm O2
-vprintf-chk-1.c.o.wasm O2
-
-# Don't care/won't fix:
-920612-1.c.o.wasm O2 # abort() # UB
-920711-1.c.o.wasm O2 # abort() # UB for 32-bit longs
-pr22493-1.c.o.wasm O2 # abort() # UB
-eeprof-1.c.o.wasm # tests -finstrument-functions
-pr23047.c.o.wasm O2 # tests -fwrapv
-
-### Failures specific to lld-linked binaries
-
-# Calls to user-defined-new is elided by clang (effects non-wasm too)
-init__new41.C.o.wasm O2
-template__new11.C.o.wasm O2
-
-# abort()
-20101011-1.c.o.wasm
-30101025-1.c.o.wasm
-3r39339.c.o.wasm
-
-fprintf-chk-1.c.o.wasm O2
-vfprintf-chk-1.c.o.wasm O2
-
-# Missing _Unwind_* functions
-cleanup-5.C.o.wasm
-
-# signature mismatches
-tc1__dr20.C.o.wasm # printf
-
-# invalid main() signautre
-20091229-1.c.o.wasm
-pr61375.c.o.wasm
-switch-1.c.o.wasm
-
-# Untriaged lld failures
-torture__pr48695.C.o.wasm O2
-
-pr23135.c.o.wasm O0
-
-abi__bitfield1.C.o.wasm
-abi__vbase13.C.o.wasm
-eh__alias1.C.o.wasm
-eh__cond1.C.o.wasm
-eh__cond4.C.o.wasm
-eh__cond5.C.o.wasm
-eh__cond6.C.o.wasm
-eh__crossjump1.C.o.wasm
-eh__ctor1.C.o.wasm
-eh__ctor2.C.o.wasm
-eh__defarg1.C.o.wasm
-eh__delayslot1.C.o.wasm
-eh__dtor1.C.o.wasm
-eh__elide1.C.o.wasm
-eh__elide2.C.o.wasm
-eh__filter1.C.o.wasm
-eh__filter2.C.o.wasm
-eh__fp-regs.C.o.wasm
-eh__ia64-2.C.o.wasm
-eh__init-temp1.C.o.wasm
-eh__loop1.C.o.wasm
-eh__loop2.C.o.wasm
-eh__new1.C.o.wasm
-eh__omit-frame-pointer.C.o.wasm
-eh__omit-frame-pointer2.C.o.wasm
-eh__partial1.C.o.wasm
-eh__pr29166.C.o.wasm
-eh__registers1.C.o.wasm
-eh__simd-1.C.o.wasm
-eh__simd-2.C.o.wasm
-eh__simd-3.C.o.wasm
-eh__spbp.C.o.wasm
-eh__spec10.C.o.wasm
-eh__spec3.C.o.wasm
-eh__spec7.C.o.wasm
-eh__spec9.C.o.wasm
-eh__synth2.C.o.wasm
-eh__template1.C.o.wasm
-eh__uncaught1.C.o.wasm
-eh__uncaught4.C.o.wasm
-eh__unexpected1.C.o.wasm
-expr__cond12.C.o.wasm
-expr__cond6.C.o.wasm
-init__array12.C.o.wasm
-init__array5.C.o.wasm
-init__copy3.C.o.wasm
-init__ctor1.C.o.wasm
-init__new36.C.o.wasm
-init__placement2.C.o.wasm
-init__ref19.C.o.wasm
-init__ref9.C.o.wasm
-ipa__pr63838.C.o.wasm
-opt__20050511-1.C.o.wasm
-opt__const3.C.o.wasm
-opt__eh2.C.o.wasm
-opt__eh3.C.o.wasm
-opt__eh4.C.o.wasm
-opt__pr23299.C.o.wasm
-opt__pr23478.C.o.wasm
-other__copy2.C.o.wasm
-rtti__dyncast3.C.o.wasm
-rtti__typeid10.C.o.wasm
-template__pretty1.C.o.wasm
-torture__pr49115.C.o.wasm
-torture__pr60750.C.o.wasm
-torture__stackalign__eh-alloca-1.C.o.wasm
-torture__stackalign__eh-global-1.C.o.wasm
-torture__stackalign__eh-inline-1.C.o.wasm
-torture__stackalign__eh-inline-2.C.o.wasm
-torture__stackalign__eh-vararg-1.C.o.wasm
-torture__stackalign__eh-vararg-2.C.o.wasm
-torture__stackalign__throw-1.C.o.wasm
-torture__stackalign__throw-2.C.o.wasm
-torture__stackalign__throw-3.C.o.wasm
-tree-ssa__pr33604.C.o.wasm
-
-### Sanitizer tests
-# Because we don't run sanitizers on them, they may succeed or fail, and we
-# should not care about the results. For now we list tests that happen to fail.
-ubsan__return-1.C.o.wasm
diff --git a/src/testing.py b/src/testing.py
deleted file mode 100755
index de2e92d..0000000
--- a/src/testing.py
+++ /dev/null
@@ -1,359 +0,0 @@
-#   Copyright 2015 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import difflib
-import math
-import os
-import os.path
-import sys
-
-import parallel_runner
-import proc
-
-# Set to True to disable execution via thread pool
-single_threaded = False
-
-
-class Result:
-    """Result from a single test that was run."""
-    def __init__(self, test, success, output):
-        self.test = test
-        self.success = success
-        self.output = output
-
-    def __str__(self):
-        return '%s %s%s%s' % ('SUCCEEDED' if self.success else 'FAILED',
-                              self.test, '\n' if self.output else '',
-                              self.output.decode('utf-8'))
-
-    def __bool__(self):
-        return self.success
-
-    # py2 compat
-    __nonzero__ = __bool__
-
-    def __lt__(self, other):
-        """Sort by test name so that the output files can be compared
-        easily."""
-        return self.test < other.test
-
-    def similarity(self, other):
-        """Compare output similarity, returning a float in the range [0,1]."""
-        # Even quick_ratio is fairly slow on big inputs, capture just the
-        # start.
-        max_size = 1024
-        return difflib.SequenceMatcher(None, self.output[:max_size],
-                                       other.output[:max_size]).quick_ratio()
-
-
-class Tester(object):
-    """Test runner."""
-    def __init__(self, command_ctor, outname_ctor, outdir, extras):
-        """Command-line constructor accepting input and output file names."""
-        if outdir:
-            assert os.path.isdir(
-                outdir), 'Expected output directory %s' % outdir
-        self.command_ctor = command_ctor
-        self.outname_ctor = outname_ctor
-        self.outdir = outdir
-        self.extras = extras
-
-    @staticmethod
-    def setlimits():
-        # Set maximum CPU time to 90 seconds in child process
-        try:
-            import resource
-            resource.setrlimit(resource.RLIMIT_CPU, (90, 90))
-        except:  # noqa
-            pass
-
-    def __call__(self, test_file):
-        """Execute a single test."""
-        basename = os.path.basename(test_file)
-        if self.outdir:
-            outfile = self.outname_ctor(self.outdir, test_file, self.extras)
-        else:
-            outfile = ''
-        should_log = sys.platform != 'darwin'
-        try:
-
-            output = proc.check_output(
-                self.command_ctor(test_file, outfile, self.extras),
-                stderr=proc.STDOUT,
-                cwd=self.outdir or os.getcwd(),
-                # preexec_fn is not supported on Windows
-                preexec_fn=Tester.setlimits
-                if sys.platform != 'win32' else None,
-                should_log=should_log)
-            return Result(test=basename, success=True, output=output)
-        except proc.CalledProcessError as e:
-            return Result(test=basename, success=False, output=e.output)
-
-
-def parse_exclude_files(fails, config_attributes):
-    """Returns a sorted list  of exclusions which match the attributes.
-
-    Parse the files containing tests to exclude (i.e. expected fails).
-    * Each line may contain a comma-separated list of attributes restricting
-      the test configurations which are expected to fail. (e.g. JS engine
-      or optimization level).
-    * A test is only excluded if the configuration has all the attributes
-      specified in the exclude line.
-    * Lines which have no attributes will match everything
-    * Lines which specify only one attribute (e.g. engine) will match all
-      configurations with that attribute (e.g. both opt levels with that
-      engine).
-    For more details and example, see test/run_known_gcc_test_failures.txt
-    """
-    excludes = {}  # maps name of excluded test to file from whence it came
-    config_attributes = set(config_attributes) if config_attributes else set()
-
-    def parse_line(line):
-        line = line.strip()
-        if '#' in line:
-            line = line[:line.index('#')].strip()
-        tokens = line.split()
-        return tokens
-
-    for excludefile in fails:
-        f = open(excludefile)
-        for line in f:
-            tokens = parse_line(line)
-            if not tokens:
-                continue
-            if len(tokens) > 1:
-                attributes = set(tokens[1].split(','))
-                if not attributes.issubset(config_attributes):
-                    continue
-            test = tokens[0]
-
-            if test in excludes:
-                print('ERROR: duplicate exclude: [%s]' % line)
-                print('Files: %s and %s' % (excludes[test], excludefile))
-                sys.exit(1)
-            excludes[test] = excludefile
-        f.close()
-    return sorted(excludes.keys())
-
-
-class TriangularArray:
-    """Indexed with two commutable keys."""
-    def __init__(self):
-        self.arr = {}
-
-    def canonicalize(self, key):
-        return (min(key[0], key[1]), max(key[0], key[1]))
-
-    def __getitem__(self, key):
-        return self.arr[self.canonicalize(key)]
-
-    def __setitem__(self, key, value):
-        k = self.canonicalize(key)
-        # Support single-insertion only, the intended usage would be a bug if
-        # there were multiple insertions of the same key.
-        assert k not in self.arr, 'Double insertion of key %s' % str(k)
-        self.arr[k] = value
-
-    def __iter__(self):
-        return iter(self.arr.items())
-
-
-class SimilarityGroup:
-    """Group of similar results."""
-    def __init__(self, tests, similarities):
-        self.tests = sorted(tests)
-        self.similarities = [100. * s for s in similarities]
-        self.average = (sum(self.similarities) /
-                        len(self.similarities) if self.similarities else 0.)
-        squared_diffs = [(s - self.average)**2 for s in self.similarities]
-        self.stddev = (math.sqrt(sum(squared_diffs) / len(squared_diffs))
-                       if self.similarities else 0.)
-
-
-def similarity(results, cutoff):
-    """List of lists of result test names with similar outputs."""
-    similarities = TriangularArray()
-    for x in range(0, len(results)):
-        for y in range(x + 1, len(results)):
-            rx = results[x]
-            ry = results[y]
-            similarities[(rx.test, ry.test)] = rx.similarity(ry)
-    # A maximum clique would be better suited to group similarities, but this
-    # silly traversal is simpler and seems to do the job pretty well.
-    similar_groups = []
-    worklist = set()
-    for k, v in similarities:
-        if v > cutoff:
-            worklist.add(k[0])
-            worklist.add(k[1])
-    for result in results:
-        test = result.test
-        if test in worklist:
-            worklist.remove(test)
-            group_tests = [test]
-            group_similarities = []
-            for other_result in results:
-                other_test = other_result.test
-                if other_test in worklist:
-                    similar = similarities[(test, other_test)]
-                    if similar > cutoff:
-                        worklist.remove(other_test)
-                        group_tests.append(other_test)
-                        group_similarities.append(similar)
-            if len(group_tests) > 1:
-                # Some tests could have similar matches which were more similar
-                # to other tests, leaving this group with a single entry.
-                similar_groups.append(
-                    SimilarityGroup(tests=group_tests,
-                                    similarities=group_similarities))
-    assert len(worklist) == 0, 'Failed emptying worklist %s' % worklist
-    # Put all the ungrouped tests into their own group.
-    grouped = set()
-    for group in similar_groups:
-        for test in group.tests:
-            grouped.add(test)
-    uniques = list(set([r.test for r in results]) - grouped)
-    if uniques:
-        s = [similarities[(uniques[0], u)] for u in uniques[1:]]
-        similar_groups.append(SimilarityGroup(tests=uniques, similarities=s))
-    return similar_groups
-
-
-def make_blocking(fileno):
-    try:
-        from fcntl import fcntl, F_GETFL, F_SETFL
-        flags = fcntl(fileno, F_GETFL)
-        if flags & os.O_NONBLOCK:
-            fcntl(fileno, F_SETFL, flags & ~os.O_NONBLOCK)
-        print('make_blocking old flags %s' % hex(flags))
-    except ImportError:
-        pass
-
-
-def execute(tester, inputs, fails, exclusions=None, attributes=None):
-    """Execute tests in parallel, output results, return failure count."""
-    if exclusions:
-        input_exclusions = parse_exclude_files(exclusions, None)
-        inputs = [
-            i for i in inputs if os.path.basename(i) not in input_exclusions
-        ]
-    sys.stdout.write('Executing tests.\n')
-    if single_threaded:
-        results = map(tester, inputs)
-    else:
-        runner = parallel_runner.ParallelRunner()
-        results = runner.map(tester, inputs)
-
-    sys.stdout.flush()
-    sys.stdout.write('Done.\n')
-
-    results = sorted(results)
-    successes = [r for r in results if r]
-    failures = [r for r in results if not r]
-
-    # For some reason it's always here.
-    make_blocking(sys.stdout.fileno())
-    make_blocking(sys.stderr.fileno())
-
-    sys.stdout.write('\nResults:\n')
-    for result in results:
-        sys.stdout.flush()
-        sys.stdout.write(str(result) + '\n\n')
-
-    if not fails:
-        sys.stdout.write('\n'.join([
-            'Ran %s tests.' % len(results),
-            'Got %s successes.' % len(successes),
-            'Got %s failures.' % len(failures)
-        ]) + '\n')
-        if failures:
-            sys.stdout.write('Unexpected failures:\n')
-            for f in failures:
-                sys.stdout.write('\t%s\n' % f.test)
-        return len(failures)
-
-    input_expected_failures = parse_exclude_files(fails, attributes)
-    expected_failures = [
-        t for t in failures if t.test in input_expected_failures
-    ]
-    unexpected_failures = [
-        t for t in failures if t.test not in input_expected_failures
-    ]
-    unexpected_successes = [
-        t for t in successes if t.test in input_expected_failures
-    ]
-
-    similarity_cutoff = 0.9
-    # Calculating similarity is pretty expensive. If too many tests are
-    # failing, it can take minutes, and most of them are probably failing for
-    # the same fundamental reason. Skip in that case.
-    failure_cutoff = 0.5
-    max_failure_count = max(1, len(inputs) * failure_cutoff)
-
-    def similar_failures(label, failures):
-        if len(failures) > max_failure_count:
-            print('Too many %s failures to show similarity' % label)
-            return []
-        return similarity(failures, similarity_cutoff)
-
-    similar_expected_failures = similar_failures('expected', expected_failures)
-    similar_unexpected_failures = similar_failures('unexpected',
-                                                   unexpected_failures)
-
-    def show_similar_failures(label, similar, failures):
-        for s in similar:
-            tests = ' '.join(s.tests)
-            if s.average >= similarity_cutoff * 100.:
-                sys.stdout.write(
-                    ('\nSimilar %s failures, '
-                     'average %s%% similarity with stddev %s: '
-                     '%s\n') % (label, s.average, s.stddev, tests))
-                sample = [f for f in failures if f.test == s.tests[0]][0]
-                sys.stdout.write('Sample failure: %s\n' % sample)
-            else:
-                sys.stdout.write(
-                    ('\nUngrouped %s failures, '
-                     'average %s%% similarity with stddev %s: '
-                     '%s\n') % (label, s.average, s.stddev, tests))
-
-    show_similar_failures('expected', similar_expected_failures,
-                          expected_failures)
-    show_similar_failures('unexpected', similar_unexpected_failures,
-                          unexpected_failures)
-
-    if expected_failures:
-        sys.stdout.write('Expected failures:\n')
-        for f in expected_failures:
-            sys.stdout.write('\t%s\n' % f.test)
-    if unexpected_failures:
-        sys.stdout.write('Unexpected failures:\n')
-        for f in unexpected_failures:
-            sys.stdout.write('\t%s\n' % f.test)
-    if unexpected_successes:
-        sys.stdout.write('Unexpected successes:\n')
-        for f in unexpected_successes:
-            sys.stdout.write('\t%s\n' % f.test)
-    sys.stdout.write('\n'.join([
-        '\n',
-        'Ran %s tests.' % len(results),
-        'Got %s successes.' % len(successes),
-        'Got %s failures.' % len(failures),
-        'Expected %s failures.' % len(input_expected_failures),
-        'Got %s expected failures in %s similarity groups.' %
-        (len(expected_failures), len(similar_expected_failures)),
-        'Got %s unexpected failures in %s similarity groups.' %
-        (len(unexpected_failures), len(similar_unexpected_failures)),
-        'Got %s unexpected successes.' % len(unexpected_successes), '\n'
-    ]))
-    return len(unexpected_failures) + len(unexpected_successes)
diff --git a/src/wasi.js b/src/wasi.js
deleted file mode 100755
index e7a4cc4..0000000
--- a/src/wasi.js
+++ /dev/null
@@ -1,607 +0,0 @@
-#!/usr/bin/env node
-/*
- * Copyright 2016 WebAssembly Community Group participants
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Simple implmentation of WASI in JS in order to support running of tests
- * with minimal system dependencies such as the GCC torture tests.
- *
- * This script is designed to run under both d8 and nodejs.
- *
- * Usage: wasi.js <wasm_binary>
- */
-
-const PAGE_SIZE = (64 * 1024);
-let heap_size_bytes = 16 * 1024 * 1024;
-let heap_size_pages = heap_size_bytes / PAGE_SIZE;
-let default_memory = new WebAssembly.Memory({initial: heap_size_pages, maximum: heap_size_pages})
-let heap;
-let heap_uint8;
-let heap_uint16;
-let heap_uint32;
-
-// This is node.js
-if (typeof process === 'object' && typeof require === 'function') {
-  // Emulate JS shell behavior used below
-  var nodeFS = require('fs');
-  var nodePath = require('path');
-  var read = function(file_path) {
-    filename = nodePath['normalize'](file_path);
-    return nodeFS['readFileSync'](filename);
-  }
-  var print = console.log;
-  var arguments = process['argv'].slice(2);
-  var quit = process.exit
-}
-
-// Exceptions
-function TerminateWasmException(value, code) {
-  this.stack = (new Error()).stack;
-  this.value = value;
-  this.exit_code = code;
-  this.message = 'Terminating WebAssembly';
-  this.toString = function() { return this.message + ': ' + this.value; };
-}
-
-function NotYetImplementedException(what) {
-  this.stack = (new Error()).stack;
-  this.message = 'Not yet implemented';
-  this.what = what;
-  this.toString = function() { return this.message + ': ' + this.what; };
-}
-
-// Heap access helpers.
-function setHeap(m) {
-  memory = m
-  heap = m.buffer
-  heap_uint8 = new Uint8Array(heap);
-  heap_uint16 = new Uint16Array(heap);
-  heap_uint32 = new Uint32Array(heap);
-  heap_size_bytes = heap.byteLength;
-}
-
-function checkHeap() {
-  if (heap.byteLength == 0) {
-    setHeap(main_module.exports.memory);
-  }
-}
-
-function readChar(ptr) {
-  return String.fromCharCode(heap_uint8[ptr]);
-}
-
-function readStr(ptr, len = -1) {
-  let str = '';
-  var end = heap_size_bytes;
-  if (len != -1)
-    end = ptr + len;
-  for (var i = ptr; i < end && heap_uint8[i] != 0; ++i)
-    str += readChar(i);
-  return str;
-}
-
-function writeBuffer(offset, buf) {
-  buf.copy(heap_uint8, offset);
-}
-
-function writeStr(offset, str) {
-  var start = offset;
-  for (var i = 0; i < str.length; i++ ) {
-    write8(offset, str.charCodeAt(i));
-    offset++;
-  }
-  write8(offset, 0);
-  offset++;
-  return offset - start;
-}
-
-function write8(offset, value) { heap_uint8[offset] = value; }
-function write16(offset, value) { heap_uint16[offset>>1] = value; }
-function write32(offset, value) { heap_uint32[offset>>2] = value; }
-
-function write64(offset, valueFirst, valueLast) {
-  heap_uint32[(offset+0)>>2] = valueFirst;
-  heap_uint32[(offset+4)>>2] = valueLast;
-}
-
-function read8(offset) { return heap_uint8[offset]; }
-function read16(offset) { return heap_uint16[offset>>1]; }
-function read32(offset) { return heap_uint32[offset>>2]; }
-
-let DEBUG = false;
-
-function dbg(message) {
-  if (DEBUG)
-    print(message);
-}
-
-// WASI implemenation
-// See: https://github.com/WebAssembly/WASI/blob/master/design/WASI-core.md
-var wasi_interface = (function() {
-  const STDIN  = 0;
-  const STDOUT = 1;
-  const STDERR = 2;
-  const MAXFD  = 2;
-
-  const WASI_ESUCCESS = 0;
-  const WASI_EBADF    = 8;
-  const WASI_ENOTSUP  = 58;
-  const WASI_EPERM    = 63;
-
-  const WASI_PREOPENTYPE_DIR = 0;
-
-  const WASI_LOOKUP_SYMLINK_FOLLOW = 0x1;
-
-  const WASI_FDFLAG_APPEND   = 0x0001;
-  const WASI_FDFLAG_DSYNC    = 0x0002;
-  const WASI_FDFLAG_NONBLOCK = 0x0004;
-  const WASI_FDFLAG_RSYNC    = 0x0008;
-  const WASI_FDFLAG_SYNC     = 0x0010;
-
-  const WASI_RIGHT_FD_DATASYNC       = 0x00000001;
-  const WASI_RIGHT_FD_READ           = 0x00000002;
-  const WASI_RIGHT_FD_SEEK           = 0x00000004;
-  const WASI_RIGHT_PATH_OPEN         = 0x00002000;
-  const WASI_RIGHT_PATH_FILESTAT_GET = 0x00040000;
-  const WASI_RIGHT_FD_READDIR        = 0x00004000;
-  const WASI_RIGHT_FD_FILESTAT_GET   = 0x00200000;
-  const WASI_RIGHT_ALL               = 0xffffffff;
-
-  const WASI_FILETYPE_UNKNOWN          = 0;
-  const WASI_FILETYPE_BLOCK_DEVICE     = 1;
-  const WASI_FILETYPE_CHARACTER_DEVICE = 2;
-  const WASI_FILETYPE_DIRECTORY        = 3;
-  const WASI_FILETYPE_REGULAR_FILE     = 4;
-  const WASI_FILETYPE_SOCKET_DGRAM     = 5;
-  const WASI_FILETYPE_SOCKET_STREAM    = 6;
-  const WASI_FILETYPE_SYMBOLIC_LINK    = 7;
-
-  const WASI_WHENCE_CUR = 0;
-  const WASI_WHENCE_END = 1;
-  const WASI_WHENCE_SET = 2;
-
-  let env = {
-    USER: 'alice',
-  };
-
-  let argv = [];
-
-  let stdin = (function() {
-    return {
-      flush: function() {}
-    };
-  })();
-
-  let stdout = (function() {
-    let buf = '';
-    return {
-      type: WASI_FILETYPE_CHARACTER_DEVICE,
-      flags: WASI_FDFLAG_APPEND,
-      write: function(str) {
-        buf += str;
-        if (buf[-1] == '\n') {
-          buf = buf.slice(0, -1);
-          print(buf);
-          buf = '';
-        }
-      },
-      flush: function() {
-        if (buf[-1] == '\n')
-          buf = buf.slice(0, -1);
-        print(buf);
-        buf = '';
-      }
-    }
-  })();
-
-  let stderr = (function() {
-    let buf = '';
-    return {
-      type: WASI_FILETYPE_CHARACTER_DEVICE,
-      flags: WASI_FDFLAG_APPEND,
-      write: function(str) {
-        buf += str;
-        if (buf[-1] == '\n') {
-          buf = buf.slice(0, -1);
-          print(buf);
-          buf = '';
-        }
-      },
-      flush: function() {
-        if (buf[-1] == '\n')
-          buf = buf.slice(0, -1);
-        print(buf);
-        buf = '';
-      }
-    }
-  })();
-
-  let rootdir = (function() {
-    return {
-      type: WASI_FILETYPE_DIRECTORY,
-      flags: 0,
-      flush: function() {},
-      name: "/",
-      rootdir: "/",
-      preopen: true,
-      rights_base: WASI_RIGHT_ALL,
-      rights_inheriting: WASI_RIGHT_ALL,
-    };
-  })();
-
-  let openFile = function(filename) {
-    dbg('openFile: ' + filename);
-    let data = read(filename);
-    let position = 0;
-    let end = data.length;
-    return {
-      read: function(len) {
-        let start = position;
-        let end = Math.min(position + len, data.length);
-        position = end;
-        return data.slice(start, end)
-      },
-      seek: function(offset, whence) {
-        if (whence == WASI_WHENCE_CUR) {
-          position += offset;
-        } else if (whence == WASI_WHENCE_END) {
-          position += end + offset;
-        } else if (whence == WASI_WHENCE_SET) {
-          position = offset;
-        }
-        if (position > end) {
-          position = end;
-        } else if (position < 0) {
-          position = 0;
-        }
-        return position;
-      },
-      flush: function() {}
-    };
-  };
-
-  let openFiles = [
-    stdin,
-    stdout,
-    stderr,
-    rootdir,
-  ];
-
-  let nextFD = openFiles.length;
-
-  function isValidFD(fd) {
-    return openFiles.hasOwnProperty(fd)
-  }
-
-  function trace(syscall_name, syscall_args) {
-    if (DEBUG)
-      dbg('wasi_snapshot_preview1.' + syscall_name + '(' + Array.from(syscall_args) + ')');
-  }
-
-  let module_api = {
-    proc_exit: function(code) {
-      trace('proc_exit', arguments);
-      throw new TerminateWasmException('proc_exit(' + code + ')', code);
-    },
-    environ_sizes_get: function(environ_count_out_ptr, environ_buf_size_out_ptr) {
-      trace('environ_sizes_get', arguments);
-      checkHeap();
-      const names = Object.getOwnPropertyNames(env);
-      let total_space = 0;
-      for (const i in names) {
-        let name = names[i];
-        let value = env[name];
-        // Format of each env entry is name=value with null terminator.
-        total_space += name.length + value.length + 2;
-      }
-      write64(environ_count_out_ptr, names.length);
-      write64(environ_buf_size_out_ptr, total_space)
-      return WASI_ESUCCESS;
-    },
-    environ_get: function(environ_pointers_out, environ_out) {
-      trace('environ_get', arguments);
-      let names = Object.getOwnPropertyNames(env);
-      for (const i in names) {
-        write32(environ_pointers_out, environ_out);
-        environ_pointers_out += 4;
-        let name = names[i];
-        let value = env[name];
-        let full_string = name + "=" + value;
-        environ_out += writeStr(environ_out, full_string);
-      }
-      write32(environ_pointers_out, 0);
-      return WASI_ESUCCESS;
-    },
-    args_sizes_get: function(args_count_out_ptr, args_buf_size_out_ptr) {
-      trace('args_sizes_get', arguments);
-      checkHeap();
-      let total_space = 0;
-      for (const value of argv) {
-        total_space += value.length + 1;
-      }
-      write64(args_count_out_ptr, argv.length);
-      write64(args_buf_size_out_ptr, total_space);
-      dbg(argv);
-      return WASI_ESUCCESS;
-    },
-    args_get: function(args_pointers_out, args_out) {
-      trace('args_get', arguments);
-      for (const value of argv) {
-        write32(args_pointers_out, args_out);
-        args_pointers_out += 4;
-        args_out += writeStr(args_out, value);
-      }
-      write32(args_pointers_out, 0);
-      return WASI_ESUCCESS;
-    },
-    fd_pread: function(fd, iovs, iovs_len, offset, nread) {
-      trace('fd_pread', arguments);
-      checkHeap();
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      var file = openFiles[fd];
-      if (fd.read == undefined)
-        return WASI_EBADF;
-      throw new NotYetImplementedException('fd_pread');
-    },
-    fd_prestat_get: function(fd, prestat_ptr) {
-      trace('fd_prestat_get', arguments);
-      checkHeap();
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      var file = openFiles[fd];
-      if (!file.preopen)
-        return WASI_EBADF;
-      write8(prestat_ptr, WASI_PREOPENTYPE_DIR);
-      write64(prestat_ptr+4, file.name.length);
-      return 0;
-    },
-    fd_prestat_dir_name: function(fd, path_ptr, path_len) {
-      trace('fd_prestat_dir_name', arguments);
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      var file = openFiles[fd];
-      if (!file.preopen)
-        return WASI_EBADF;
-      write64(path_len, file.name.length);
-      writeStr(path_ptr, file.name);
-      return 0;
-    },
-    fd_fdstat_get: function(fd, fdstat_ptr) {
-      trace('fd_fdstat_get', arguments);
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      var file = openFiles[fd];
-      write8(fdstat_ptr, file.type);
-      write16(fdstat_ptr+2, file.flags);
-      write64(fdstat_ptr+8, file.rights_base);
-      write64(fdstat_ptr+16, file.rights_inheriting);
-      return WASI_ESUCCESS;
-    },
-    fd_fdstat_set_flags: function(fd, fdflags) {
-      trace('fd_fdstat_set_flags', arguments);
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      return WASI_ESUCCESS;
-    },
-    fd_read: function(fd, iovs_ptr, iovs_len, nread) {
-      trace('fd_read', arguments);
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      var file = openFiles[fd];
-      if (!file.hasOwnProperty('read'))
-        return WASI_EBADF;
-      checkHeap();
-      let total = 0;
-      for (let i = 0; i < iovs_len; i++) {
-        let buf = read32(iovs_ptr); iovs_ptr += 4;
-        let len = read32(iovs_ptr); iovs_ptr += 4;
-        let data = file.read(len);
-        if (data.length == 0) {
-          break;
-        }
-        writeBuffer(buf, data);
-        total += data.length;
-      }
-      write32(nread, total);
-      return WASI_ESUCCESS;
-    },
-    fd_write: function(fd, iovs_ptr, iovs_len, nwritten) {
-      trace('fd_write', arguments);
-      if (!isValidFD(fd))
-        return WASI_EBADF;
-      var file = openFiles[fd];
-      if (!file.hasOwnProperty('write'))
-        return WASI_EPERM;
-      checkHeap();
-      let total = 0;
-      for (let i = 0; i < iovs_len; i++) {
-        let buf = read32(iovs_ptr); iovs_ptr += 4;
-        let len = read32(iovs_ptr); iovs_ptr += 4;
-        file.write(readStr(buf, len));
-        total += len;
-      }
-      write32(nwritten, total);
-      return WASI_ESUCCESS;
-    },
-    fd_close: function(fd) {
-      trace('fd_close', arguments);
-      if (!isValidFD(fd)) {
-        return WASI_EBADF;
-      }
-      openFiles[fd].flush();
-      delete openFiles[fd];
-      if (fd < nextFD) {
-        nextFD = fd;
-      }
-      return WASI_ESUCCESS;
-    },
-    fd_seek: function(fd, offset, whence, newoffset_ptr) {
-      trace('fd_seek', arguments);
-      if (!isValidFD(fd)) {
-        return WASI_EBADF;
-      }
-      let file = openFiles[fd];
-      checkHeap();
-      let intOffset = parseInt(offset.toString());
-      let newPos = file.seek(intOffset, whence);
-      write64(newoffset_ptr, newPos);
-      dbg("done seek: " + newPos);
-      return WASI_ESUCCESS;
-    },
-    path_filestat_get: function(dirfd, lookupflags, path, path_len, buf) {
-      trace('path_filestat_get', arguments);
-      if (!isValidFD(dirfd)) {
-        return WASI_EBADF;
-      }
-      let file = openFiles[dirfd];
-      if (file != rootdir) {
-        return WASI_EBADF;
-      }
-      let filename = readStr(path, path_len);
-      let stat = nodeFS.statSync(filename);
-      if (stat.isFile()) {
-        write32(buf+16, WASI_FILETYPE_REGULAR_FILE);
-      } else if (stat.isSymbolicLink()) {
-        write32(buf+16, WASI_FILETYPE_SYMBOLIC_LINK);
-      } else if (stat.isDirectory()) {
-        write32(buf+16, WASI_FILETYPE_DIRECTORY);
-      } else if (stat.isCharDevice()) {
-        write32(buf+16, WASI_FILETYPE_CHARACTER_DEVICE);
-      } else if (stat.isBlockDevice()) {
-        write32(buf+16, WASI_FILETYPE_BLOCK_DEVICE);
-      } else {
-        write32(buf+16, WASI_FILETYPE_UNKNOWN);
-      }
-      return WASI_ESUCCESS;
-    },
-    path_open: function(dirfd, dirflags, path, path_len, oflags, fs_rights_base, fs_rights_inheriting, fs_flags, fd_out) {
-      trace('path_open', arguments);
-      checkHeap();
-      let filename = readStr(path, path_len);
-      trace('path_open', ['dirfd=' + dirfd, 'path=' + filename, 'flags=' + oflags]);
-      if (!isValidFD(dirfd))
-        return WASI_EBADF;
-      let file = openFiles[dirfd];
-      if (file != rootdir)
-        return WASI_EBADF;
-      // TODO(sbc): Implement open flags (e.g. O_CREAT)
-      if (oflags)
-        return WASI_ENOTSUP;
-      if (fs_flags)
-        return WASI_ENOTSUP;
-      let fd = nextFD;
-      filename = file.rootdir + filename;
-      openFiles[fd] = openFile(filename);
-      write32(fd_out, fd);
-      while (openFiles[nextFD] != undefined)
-        nextFD++;
-      return WASI_ESUCCESS;
-    },
-    path_unlink_file: function(dirfd, path, path_len) {
-      checkHeap();
-      let filename = readStr(path, path_len);
-      trace('path_unlink_file', ['dirfd=' + dirfd, 'path=' + filename]);
-      let file = openFiles[dirfd];
-      if (file != rootdir)
-        return WASI_EBADF;
-      filename = file.rootdir + filename;
-      trace('path_unlink_file', ['path=' + filename]);
-      //fs.unlinkSync(filename);
-      return WASI_ENOTSUP;
-    },
-    path_remove_directory: function(dirfd, path, path_len) {
-      trace('path_remove_directory', ['dirfd=' + dirfd, 'path=' + readStr(path, path_len)]);
-      throw new NotYetImplementedException('path_remove_directory');
-    },
-    random_get: function(buf, buf_len) {
-      trace('random_get', arguments);
-      return WASI_ESUCCESS;
-    }
-  }
-
-  return {
-    onExit: function() {
-      for (let k in openFiles){
-        if (openFiles.hasOwnProperty(k)) {
-          openFiles[k].flush();
-        }
-      }
-    },
-    setArgv: function(new_argv) {
-      argv = new_argv;
-    },
-    api: module_api
-  };
-})();
-
-let ffi = (function() {
-  let env = {
-    memory: default_memory,
-    // Any non-wasi dependencies end up under 'env'.
-    // TODO(sbc): Implement on the wasm side or add to WASI?
-    _Unwind_RaiseException: function() {
-      throw new NotYetImplementedException('_Unwind_RaiseException');
-    }
-  }
-  return {
-    env: env,
-    wasi_snapshot_preview1: wasi_interface.api
-  };
-})();
-
-if (arguments.length < 1)
-  throw new Error('Expected at least one wasm module to load.');
-
-function loadWasm(file_path) {
-  const buf = (typeof readbuffer === 'function')
-    ? new Uint8Array(readbuffer(file_path))
-    : read(file_path, 'binary');
-  let instance = new WebAssembly.Instance(new WebAssembly.Module(buf), ffi)
-  if (instance.exports.memory) {
-    setHeap(instance.exports.memory);
-  } else {
-    setHeap(default_memory)
-  }
-  return instance;
-}
-
-let main_module_name = arguments[0];
-wasi_interface.setArgv(arguments)
-
-main_module = loadWasm(main_module_name);
-
-if (!(main_module.exports._start instanceof Function))
-  throw new Error('_start not found');
-
-try {
-  main_module.exports._start();
-  wasi_interface.onExit();
-  print(main_module_name + '::_start returned normally');
-} catch (e) {
-  wasi_interface.onExit();
-  if (e instanceof TerminateWasmException) {
-    print('Program terminated with: ' + e.exit_code);
-    quit(e.exit_code);
-  } else if (e instanceof NotYetImplementedException) {
-    print('NotYetImplemented: ' + e.what);
-  } else if (e instanceof WebAssembly.RuntimeError) {
-    print('Runtime trap: ' + e.message);
-  } else {
-    print('Unknown exception of type `' + typeof(e) + '`: ' + e);
-  }
-  throw e;
-}
diff --git a/src/work_dirs.py b/src/work_dirs.py
deleted file mode 100644
index d46dfe3..0000000
--- a/src/work_dirs.py
+++ /dev/null
@@ -1,53 +0,0 @@
-# -*- coding: utf-8 -*-
-
-#   Copyright 2019 WebAssembly Community Group participants
-#
-#   Licensed under the Apache License, Version 2.0 (the "License");
-#   you may not use this file except in compliance with the License.
-#   You may obtain a copy of the License at
-#
-#       http://www.apache.org/licenses/LICENSE-2.0
-#
-#   Unless required by applicable law or agreed to in writing, software
-#   distributed under the License is distributed on an "AS IS" BASIS,
-#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#   See the License for the specific language governing permissions and
-#   limitations under the License.
-
-import os
-
-SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
-DEFAULT_WORK_DIR = os.path.join(os.path.dirname(SCRIPT_DIR), 'src', 'work')
-
-DEFAULT_SYNC_DIR = DEFAULT_WORK_DIR
-DEFAULT_BUILD_DIR = DEFAULT_WORK_DIR
-DEFAULT_PREBUILT_DIR = DEFAULT_WORK_DIR
-DEFAULT_V8_DIR = os.path.join(DEFAULT_WORK_DIR, 'v8', 'v8')
-DEFAULT_TEST_DIR = DEFAULT_WORK_DIR
-DEFAULT_INSTALL_DIR = os.path.join(DEFAULT_WORK_DIR, 'wasm-install')
-
-dirs = {}
-
-
-def MakeGetterSetter(path_type, default):
-    def getter():
-        return dirs.get(path_type, default)
-
-    def setter(dir):
-        if path_type in dirs:
-            raise Exception('Path %s set more than once' % path_type)
-        dirs[path_type] = os.path.abspath(dir)
-
-    return getter, setter
-
-
-GetSync, SetSync = MakeGetterSetter('sync', DEFAULT_SYNC_DIR)
-GetBuild, SetBuild = MakeGetterSetter('build', DEFAULT_BUILD_DIR)
-GetPrebuilt, SetPrebuilt = MakeGetterSetter('prebuilt', DEFAULT_PREBUILT_DIR)
-GetV8, SetV8 = MakeGetterSetter('v8', DEFAULT_V8_DIR)
-GetTest, SetTest = MakeGetterSetter('test', DEFAULT_TEST_DIR)
-GetInstall, SetInstall = MakeGetterSetter('install', DEFAULT_INSTALL_DIR)
-
-
-def GetAll():
-    return [GetSync(), GetBuild(), GetTest(), GetInstall()]