feat: add a histrical wit-bindgen
This commit is contained in:
1
__wasm/wit-bindgen-sample/.gitignore
vendored
1
__wasm/wit-bindgen-sample/.gitignore
vendored
@@ -1 +0,0 @@
|
||||
wit-bindgen
|
||||
@@ -0,0 +1,28 @@
|
||||
# See here for image contents: https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/python-3/.devcontainer/base.Dockerfile
|
||||
|
||||
# [Choice] Python version (use -bullseye variants on local arm64/Apple Silicon): 3, 3.10, 3.9, 3.8, 3.7, 3.6, 3-bullseye, 3.10-bullseye, 3.9-bullseye, 3.8-bullseye, 3.7-bullseye, 3.6-bullseye, 3-buster, 3.10-buster, 3.9-buster, 3.8-buster, 3.7-buster, 3.6-buster
|
||||
ARG VARIANT="3.10-bullseye"
|
||||
FROM mcr.microsoft.com/vscode/devcontainers/python:0-${VARIANT}
|
||||
|
||||
# [Choice] Node.js version: none, lts/*, 16, 14, 12, 10
|
||||
ARG NODE_VERSION="none"
|
||||
RUN if [ "${NODE_VERSION}" != "none" ]; then su vscode -c "umask 0002 && . /usr/local/share/nvm/nvm.sh && nvm install ${NODE_VERSION} 2>&1"; fi
|
||||
|
||||
# [Optional] If your pip requirements rarely change, uncomment this section to add them to the image.
|
||||
# COPY requirements.txt /tmp/pip-tmp/
|
||||
# RUN pip3 --disable-pip-version-check --no-cache-dir install -r /tmp/pip-tmp/requirements.txt \
|
||||
# && rm -rf /tmp/pip-tmp
|
||||
|
||||
# [Optional] Uncomment this section to install additional OS packages.
|
||||
# RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \
|
||||
# && apt-get -y install --no-install-recommends <your-package-list-here>
|
||||
|
||||
# [Optional] Uncomment this line to install global node packages.
|
||||
# RUN su vscode -c "source /usr/local/share/nvm/nvm.sh && npm install -g <your-package-here>" 2>&1
|
||||
|
||||
RUN cd /opt && curl -L https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-16/wasi-sdk-16.0-linux.tar.gz \
|
||||
| tar -xz
|
||||
ENV WASI_SDK_PATH=/opt/wasi-sdk-16.0
|
||||
|
||||
RUN echo 'alias clang=/opt/wasi-sdk-16.0/bin/clang' >> /etc/bash.bashrc
|
||||
RUN echo 'alias clang++=/opt/wasi-sdk-16.0/bin/clang++' >> /etc/bash.bashrc
|
||||
@@ -0,0 +1,51 @@
|
||||
// For format details, see https://aka.ms/devcontainer.json. For config options, see the README at:
|
||||
// https://github.com/microsoft/vscode-dev-containers/tree/v0.234.0/containers/python-3
|
||||
{
|
||||
"name": "Dev",
|
||||
"build": {
|
||||
"dockerfile": "Dockerfile",
|
||||
"context": "..",
|
||||
"args": {
|
||||
// Update 'VARIANT' to pick a Python version: 3, 3.10, 3.9, 3.8, 3.7, 3.6
|
||||
// Append -bullseye or -buster to pin to an OS version.
|
||||
// Use -bullseye variants on local on arm64/Apple Silicon.
|
||||
"VARIANT": "3.9",
|
||||
// Options
|
||||
"NODE_VERSION": "16"
|
||||
}
|
||||
},
|
||||
|
||||
// Set *default* container specific settings.json values on container create.
|
||||
"settings": {
|
||||
"python.defaultInterpreterPath": "/usr/local/bin/python",
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.formatting.autopep8Path": "/usr/local/py-utils/bin/autopep8",
|
||||
"python.formatting.blackPath": "/usr/local/py-utils/bin/black",
|
||||
"python.formatting.yapfPath": "/usr/local/py-utils/bin/yapf",
|
||||
"python.linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||
"python.linting.flake8Path": "/usr/local/py-utils/bin/flake8",
|
||||
"python.linting.mypyPath": "/usr/local/py-utils/bin/mypy",
|
||||
"python.linting.pycodestylePath": "/usr/local/py-utils/bin/pycodestyle",
|
||||
"python.linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||
"python.linting.pylintPath": "/usr/local/py-utils/bin/pylint"
|
||||
},
|
||||
|
||||
// Add the IDs of extensions you want installed when the container is created.
|
||||
"extensions": [
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance"
|
||||
],
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
"postCreateCommand": "bash .devcontainer/finalize.sh",
|
||||
|
||||
// Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root.
|
||||
"remoteUser": "vscode",
|
||||
"features": {
|
||||
"rust": "latest"
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
printf "Running 'postCreateCommand' Script\n"
|
||||
|
||||
# Install Rust Targets
|
||||
printf "Installing Rust Targets\n"
|
||||
rustup update stable --no-self-update
|
||||
rustup default stable
|
||||
rustup target add wasm32-unknown-unknown
|
||||
rustup target add wasm32-wasi
|
||||
|
||||
# Install Python stuff
|
||||
printf "Installing Python Dependencies"
|
||||
pip install mypy wasmtime
|
||||
|
||||
# Install NPM dependencies
|
||||
printf "Installing NPM Dependencies"
|
||||
cd crates/gen-js && npm install
|
||||
91
__wasm/wit-bindgen-sample/wit-bindgen/.github/workflows/main.yml
vendored
Normal file
91
__wasm/wit-bindgen-sample/wit-bindgen/.github/workflows/main.yml
vendored
Normal file
@@ -0,0 +1,91 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
branches: [main]
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
# Cancel any in-flight jobs for the same PR/branch so there's only one active
|
||||
# at a time
|
||||
concurrency:
|
||||
group: ${{ github.workflow }}-${{ github.ref }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
||||
mode: [debug, release]
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install Rust
|
||||
run: rustup update stable --no-self-update && rustup default stable
|
||||
- name: Install wasm32-unknown-unknown target
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: Install wasm32-wasi target
|
||||
run: rustup target add wasm32-wasi
|
||||
|
||||
- run: |
|
||||
curl https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-16/wasi-sdk-16.0-linux.tar.gz -L | tar xzvf -
|
||||
echo "WASI_SDK_PATH=`pwd`/wasi-sdk-16.0" >> $GITHUB_ENV
|
||||
if : matrix.os == 'ubuntu-latest'
|
||||
- run: |
|
||||
curl https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-16/wasi-sdk-16.0-macos.tar.gz -L | tar xzvf -
|
||||
echo "WASI_SDK_PATH=`pwd`/wasi-sdk-16.0" >> $GITHUB_ENV
|
||||
if : matrix.os == 'macos-latest'
|
||||
- run: |
|
||||
curl https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-16/wasi-sdk-16.0-mingw.tar.gz -L | tar xzvf -
|
||||
echo "WASI_SDK_PATH=`pwd`/wasi-sdk-16.0" >> $GITHUB_ENV
|
||||
if : matrix.os == 'windows-latest'
|
||||
|
||||
- uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: '16'
|
||||
- name: Install NPM packages
|
||||
run: npm install
|
||||
working-directory: crates/gen-js
|
||||
- uses: actions/setup-python@v1
|
||||
with:
|
||||
python-version: 3.9
|
||||
- run: pip install mypy wasmtime
|
||||
- if: matrix.mode == 'release'
|
||||
name: Test release build
|
||||
run: cargo test --workspace --release
|
||||
- if: matrix.mode != 'release'
|
||||
name: Test debug build
|
||||
run: cargo test --workspace
|
||||
|
||||
rustfmt:
|
||||
name: Rustfmt
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Install Rust
|
||||
run: rustup update stable && rustup default stable && rustup component add rustfmt
|
||||
- name: Format source code
|
||||
run: cargo fmt -- --check
|
||||
|
||||
demo:
|
||||
name: Build wit-bindgen demo
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- run: rustup update stable --no-self-update && rustup default stable
|
||||
- run: rustup target add wasm32-unknown-unknown
|
||||
- run: npm install
|
||||
working-directory: crates/wit-bindgen-demo
|
||||
- run: ./crates/wit-bindgen-demo/build.sh
|
||||
- uses: JamesIves/github-pages-deploy-action@4.1.4
|
||||
with:
|
||||
branch: gh-pages
|
||||
folder: static
|
||||
single-commit: true
|
||||
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
|
||||
6
__wasm/wit-bindgen-sample/wit-bindgen/.gitignore
vendored
Normal file
6
__wasm/wit-bindgen-sample/wit-bindgen/.gitignore
vendored
Normal file
@@ -0,0 +1,6 @@
|
||||
target/
|
||||
static
|
||||
package-lock.json
|
||||
node_modules
|
||||
ace
|
||||
*.wasm
|
||||
2259
__wasm/wit-bindgen-sample/wit-bindgen/Cargo.lock
generated
Normal file
2259
__wasm/wit-bindgen-sample/wit-bindgen/Cargo.lock
generated
Normal file
File diff suppressed because it is too large
Load Diff
34
__wasm/wit-bindgen-sample/wit-bindgen/Cargo.toml
Normal file
34
__wasm/wit-bindgen-sample/wit-bindgen/Cargo.toml
Normal file
@@ -0,0 +1,34 @@
|
||||
[package]
|
||||
name = "wit-bindgen-cli"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[workspace]
|
||||
members = [
|
||||
"crates/test-rust-wasm",
|
||||
"crates/wit-bindgen-demo",
|
||||
"crates/wit-component",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[[bin]]
|
||||
name = "wit-bindgen"
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
structopt = { version = "0.3", default-features = false }
|
||||
wit-bindgen-gen-core = { path = 'crates/gen-core' }
|
||||
wit-bindgen-gen-rust-wasm = { path = 'crates/gen-rust-wasm', features = ['structopt'] }
|
||||
wit-bindgen-gen-wasmtime = { path = 'crates/gen-wasmtime', features = ['structopt'] }
|
||||
wit-bindgen-gen-wasmtime-py = { path = 'crates/gen-wasmtime-py', features = ['structopt'] }
|
||||
wit-bindgen-gen-js = { path = 'crates/gen-js', features = ['structopt'] }
|
||||
wit-bindgen-gen-c = { path = 'crates/gen-c', features = ['structopt'] }
|
||||
wit-bindgen-gen-markdown = { path = 'crates/gen-markdown', features = ['structopt'] }
|
||||
wit-bindgen-gen-spidermonkey = { path = 'crates/gen-spidermonkey', features = ['structopt'] }
|
||||
|
||||
# Compiling `spidermonkey.wasm` takes way too long without this.
|
||||
[profile.dev.package.cranelift-codegen]
|
||||
debug-assertions = false
|
||||
opt-level = 2
|
||||
220
__wasm/wit-bindgen-sample/wit-bindgen/LICENSE
Normal file
220
__wasm/wit-bindgen-sample/wit-bindgen/LICENSE
Normal file
@@ -0,0 +1,220 @@
|
||||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
|
||||
--- LLVM Exceptions to the Apache 2.0 License ----
|
||||
|
||||
As an exception, if, as a result of your compiling your source code, portions
|
||||
of this Software are embedded into an Object form of such source code, you
|
||||
may redistribute such embedded portions in such Object form without complying
|
||||
with the conditions of Sections 4(a), 4(b) and 4(d) of the License.
|
||||
|
||||
In addition, if you combine or link compiled forms of this Software with
|
||||
software that is licensed under the GPLv2 ("Combined Software") and if a
|
||||
court of competent jurisdiction determines that the patent provision (Section
|
||||
3), the indemnity provision (Section 9) or other Section of the License
|
||||
conflicts with the conditions of the GPLv2, you may retroactively and
|
||||
prospectively choose to deem waived or otherwise exclude such Section(s) of
|
||||
the License, but only in their entirety and only with respect to the Combined
|
||||
Software.
|
||||
|
||||
177
__wasm/wit-bindgen-sample/wit-bindgen/README.md
Normal file
177
__wasm/wit-bindgen-sample/wit-bindgen/README.md
Normal file
@@ -0,0 +1,177 @@
|
||||
<div align="center">
|
||||
<h1><code>wit-bindgen</code></h1>
|
||||
|
||||
<p>
|
||||
<strong>A language bindings generator for <code>wit</code></strong>
|
||||
</p>
|
||||
|
||||
<strong>A <a href="https://bytecodealliance.org/">Bytecode Alliance</a> project</strong>
|
||||
|
||||
<p>
|
||||
<a href="https://github.com/bytecodealliance/wit-bindgen/actions?query=workflow%3ACI"><img src="https://github.com/bytecodealliance/wit-bindgen/workflows/CI/badge.svg" alt="build status" /></a>
|
||||
<img src="https://img.shields.io/badge/rustc-stable+-green.svg" alt="supported rustc stable" />
|
||||
</p>
|
||||
</div>
|
||||
|
||||
## About
|
||||
|
||||
> **Note**: this project is still relatively young and active development with
|
||||
> large changes is still happening. If you're considering depending on this at
|
||||
> this time it's recommended to reach out to the authors on [zulip] and get more
|
||||
> information first.
|
||||
|
||||
[zulip]: https://bytecodealliance.zulipchat.com/
|
||||
|
||||
This project is a bindings generator framework for WebAssembly programs and
|
||||
embeddings of WebAssembly. This works with `*.wit` files which describe the
|
||||
interface of a module, either imported or exported. For example this project can
|
||||
be used in cases such as:
|
||||
|
||||
* Your language (say, Rust) is compiled to WebAssembly and you'd like to import
|
||||
WASI. This project will generate Rust bindings to import WASI APIs that are
|
||||
described with `*.wit`.
|
||||
|
||||
* Your runtime (say, Wasmtime) wants to then provide WASI functionality to guest
|
||||
programs. This project will generate a Rust `trait` for you to implement for
|
||||
the WASI interface.
|
||||
|
||||
* You're consuming a WebAssembly module (say, in a browser) and you don't want
|
||||
to deal with funky ABI details. You'd use this project to generate JS bindings
|
||||
which give you a TypeScript interface dealing with native JS types for the
|
||||
WebAssembly module described by `*.wit`.
|
||||
|
||||
This project is based on the [interface types
|
||||
proposal](https://github.com/webassembly/interface-types) and the [canonical
|
||||
ABI](https://github.com/WebAssembly/interface-types/pull/132), both of which are
|
||||
at the time of this writing a work in progress. This repository will be
|
||||
following upstream changes. The purpose of `wit-bindgen` is to provide a
|
||||
forwards-compatible toolchain and story for interface types and a canonical ABI.
|
||||
Generated language bindings all use the canonical ABI for communication,
|
||||
enabling WebAssembly modules to be written in any language with support and for
|
||||
WebAssembly modules to be consumed in any environment with language support.
|
||||
|
||||
## Demo
|
||||
|
||||
[View generated bindings
|
||||
online!](https://bytecodealliance.github.io/wit-bindgen/)
|
||||
|
||||
If you're curious to poke around and see what generated bindings look like for a
|
||||
given input `*.wit`, you can explore the generated code online to get an idea
|
||||
of what's being generated and what the glue code looks like.
|
||||
|
||||
## Installation
|
||||
|
||||
At this time a CLI tool is provided mostly for debugging and exploratory
|
||||
purposes. It can be installed with:
|
||||
|
||||
```
|
||||
cargo install --git https://github.com/bytecodealliance/wit-bindgen wit-bindgen-cli
|
||||
```
|
||||
|
||||
This tool is not necessarily intended to be integrated into toolchains. For
|
||||
example usage in Rust would more likely be done through procedural macros and
|
||||
Cargo dependencies. Usage in a Web application would probably use a version of
|
||||
`wit-bindgen` compiled to WebAssembly and published to NPM.
|
||||
|
||||
For now, though, you can explore what bindings look like in each language
|
||||
through the CLI. Again if you'd like to depend on this if you wouldn't mind
|
||||
please reach out on [zulip] so we can figure out a better story than relying on
|
||||
the CLI tool for your use case.
|
||||
|
||||
## Supported Languages
|
||||
|
||||
First here's a list of supported languages for generating a WebAssembly binary
|
||||
which uses interface types. This means that these languages support
|
||||
`*.wit`-defined imports and exports.
|
||||
|
||||
* `rust-wasm` - this is for Rust compiled to WebAssembly, typically using either
|
||||
the `wasm32-wasi` or `wasm32-unknown-unknown` targets depending on your use
|
||||
case. In this mode you'd probably depend on the `wit-bindgen-rust` crate
|
||||
(located at `crates/rust-wasm`) and use the `import!` and `export!` macros to
|
||||
generate code.
|
||||
|
||||
* `c` - this is for C compiled to WebAssembly, using either of the targets above
|
||||
for Rust as well. With C the `wit-bindgen` CLI tool will emit a `*.h` and a
|
||||
`*.c` file to be compiled into the wasm module.
|
||||
|
||||
This repository also supports a number of host languages/runtimes which can be
|
||||
used to consume WebAssembly modules that use interface types. These modules need
|
||||
to follow the canonical ABI for their exports/imports:
|
||||
|
||||
* `wasmtime` - this is for Rust users using the `wasmtime` crate. This generator
|
||||
is used through the `wit-bindgen-wasmtime` crate (located at
|
||||
`crates/wasmtime`) and, like the compiled-to-wasm Rust support, has an
|
||||
`import!` and an `export!` macro for generating code.
|
||||
|
||||
* `js` - this is for JavaScript users executing WebAssembly modules. This could
|
||||
be in a browsers, Node.js, or Deno. In theory this covers browser use cases
|
||||
like web workers and such as well. In this mode the `wit-bindgen` CLI tool
|
||||
will emit a `*.js` and a `*.d.ts` file describing the interface and providing
|
||||
necessary runtime support in JS to implement the canonical ABI. Note that the
|
||||
intended long-term integration of this language is to compile `wit-bindgen`
|
||||
itself to WebAssembly and publish NPM packages for popular JS build systems to
|
||||
integrate `wit-bindgen` into JS build processes.
|
||||
|
||||
* `wasmtime-py` - this is for Python users using the `wasmtime` PyPI package.
|
||||
This uses Wasmtime under the hood but you get to write Python in providing
|
||||
imports to WebAssembly modules or consume modules using interface types. This
|
||||
generates a `*.py` file which is annotated with types for usage in `mypy` or
|
||||
other type-checkers.
|
||||
|
||||
All generators support the `--import` and `--export` flags in the `wit-bindgen`
|
||||
CLI tool:
|
||||
|
||||
```
|
||||
$ wit-bindgen js --import browser.wit
|
||||
$ wit-bindgen rust-wasm --export my-interface.wit
|
||||
$ wit-bindgen wasmtime --import host-functions.wit
|
||||
```
|
||||
|
||||
Here "import" means "I want to import and call the functions in this interface"
|
||||
and "export" means "I want to define the functions in this interface for others
|
||||
to call".
|
||||
|
||||
Finally in a sort of "miscellaneous" category the `wit-bindgen` CLI also
|
||||
supports:
|
||||
|
||||
* `markdown` - generates a `*.md` and a `*.html` file with readable
|
||||
documentation rendered from the comments in the source `*.wit` file.
|
||||
|
||||
Note that the list of supported languages here is a snapshot in time and is not
|
||||
final. The purpose of the interface-types proposal is to be language agnostic
|
||||
both in how WebAssembly modules are written as well as how they are consumed. If
|
||||
you have a runtime that isn't listed here or you're compiling to WebAssembly and
|
||||
your language isn't listed here, it doesn't mean that it will never be
|
||||
supported! A language binding generator is intended to be not the hardest thing
|
||||
in the world (but unfortunately also not the easiest) to write, and the crates
|
||||
and support in this repository mostly exist to make writing generators as easy
|
||||
as possible.
|
||||
|
||||
Some other languages and runtimes, for example, that don't have support in
|
||||
`wit-bindgen` today but are possible in the future (and may get written here
|
||||
too) are:
|
||||
|
||||
* `wasmtime-go` - same as for `wasmtime-py` but for Go. Basically for Go users
|
||||
using the [`wasmtime-go`
|
||||
package](https://github.com/bytecodealliance/wasmtime-go) who want to work
|
||||
with interface types rather than raw pointers/memories/etc.
|
||||
|
||||
* `wasmtime-cpp` - again the same as for `wasmtime-py`, but for users of the
|
||||
[`wasmtime-cpp` header file](https://github.com/alexcrichton/wasmtime-cpp) to
|
||||
use interface types from C++.
|
||||
|
||||
* JS - while host runtime support is provided for JS today it should also be
|
||||
supported for
|
||||
[JS-compiled-to-WebAssembly](https://bytecodealliance.org/articles/making-javascript-run-fast-on-webassembly).
|
||||
For example a `*.d.ts` file could be generated for what JS projects could
|
||||
import and then corresponding glue code for the engine-compiled-to-wasm would
|
||||
also be generated. This means that you could use both JS-in-wasm but also JS
|
||||
as a host (or more realistically another runtime like Wasmtime since if you're
|
||||
running in a JS environment you're probably best off running the JS there
|
||||
instead).
|
||||
|
||||
Note that this is not an exclusive list, only intended to give you an idea of
|
||||
what other bindings could look like. There's a plethora of runtimes and
|
||||
languages that compile to WebAssembly, and interface types should be able to
|
||||
work with all of them and it's theoretically just some work-hours away from
|
||||
having support in `wit-bindgen`.
|
||||
60
__wasm/wit-bindgen-sample/wit-bindgen/TODO.md
Normal file
60
__wasm/wit-bindgen-sample/wit-bindgen/TODO.md
Normal file
@@ -0,0 +1,60 @@
|
||||
# All generators
|
||||
|
||||
* handle support in exports
|
||||
|
||||
* push/pull-buffer support in exports
|
||||
|
||||
# wasmtime
|
||||
|
||||
* buffer-in-buffer doesn't work. Doesn't work because we can't get a re-access
|
||||
of the transaction to add more buffers into it after-the-fact.
|
||||
|
||||
* Needs more testing on big-endian.
|
||||
|
||||
* Features from wiggle:
|
||||
* use `GuestError::InFunc` more liberally
|
||||
- stores/loads
|
||||
- `try_from` conversions
|
||||
* generate just the trait (??? what to do about `wasmtime` dep ???)
|
||||
|
||||
# JS
|
||||
|
||||
* Is there a better representation for general `variant` types? Currently it's
|
||||
`{ tag: string, val: T }` but that seems like it's probably sub-par. There's
|
||||
specializations for `option<T>` and `enum` variants, but that's it.
|
||||
|
||||
* Is there a better representation for flags than simply an integer?
|
||||
|
||||
* Should functions returning `expected<T, E>` get translated in JS to functions
|
||||
that return `T` and throw `E`?
|
||||
|
||||
* Adding imports to an import object is clunky because you need to also pass in
|
||||
a closure which extracts values from the raw instance. Unsure how to make this
|
||||
less clunky though.
|
||||
|
||||
* Needs more testing on big-endian. Specifically slice copies are probably not
|
||||
correct.
|
||||
|
||||
* Style with names needs to be respected, currently things are using
|
||||
`to_snake_case` but I think JS prefers camelCase?
|
||||
|
||||
* The `bigint` type is strict in that it does not accept plain `number` types to
|
||||
work with it. Should generated bindings be more flexible though and work with
|
||||
`number` in addition to `bigint`?
|
||||
|
||||
* Host-handle types are always ascribed as `any` but ideally we'd do better than
|
||||
that and assign them types. Maybe the type should be imported from somewhere
|
||||
else?
|
||||
|
||||
* Lifting/lowering of variants can almost surely use a more compressed technique
|
||||
which generates less code.
|
||||
|
||||
* Enums are handled in lowering as either strings or numbers, but should only
|
||||
numbers be handled here? Does anyone pass around strings as enum values?
|
||||
|
||||
* Exported handle types in JS aren't nominal. As of this writing they all only
|
||||
have a `drop` and a `clone` method so they're interchangeable from `tsc`'s
|
||||
perspective. Ideally these would be nominal separate types.
|
||||
|
||||
* Imported handle types show up as `any` in TS, unsure how to plumb through
|
||||
actual types to get that actually typed.
|
||||
550
__wasm/wit-bindgen-sample/wit-bindgen/WIT.md
Normal file
550
__wasm/wit-bindgen-sample/wit-bindgen/WIT.md
Normal file
@@ -0,0 +1,550 @@
|
||||
# The `*.wit` format
|
||||
|
||||
This is intended to document the `*.wit` format as it exists today. The goal is
|
||||
to provide an overview to understand what features `wit` files give you and how
|
||||
they're structured. This isn't intended to be a formal grammar, although it's
|
||||
expected that one day we'll have a formal grammar for `*.wit` files.
|
||||
|
||||
If you're curious to give things a spin try out the [online
|
||||
demo](https://bytecodealliance.github.io/wit-bindgen/) of `wit-bindgen` where
|
||||
you can input `*.wit` on the left and see output of generated bindings for
|
||||
languages on the right. If you're looking to start you can try out the
|
||||
"markdown" output mode which generates documentation for the input document on
|
||||
the left.
|
||||
|
||||
## Lexical structure
|
||||
|
||||
The `wit` format is a curly-braced-based format where whitespace is optional (but
|
||||
recommended). It is intended to be easily human readable and supports features
|
||||
like comments, multi-line comments, and custom identifiers. A `wit` document
|
||||
is parsed as a unicode string, and when stored in a file is expected to be
|
||||
encoded as UTF-8.
|
||||
|
||||
Additionally, wit files must not contain any bidirectional override scalar values,
|
||||
control codes other than newline, carriage return, and horizontal tab, or
|
||||
codepoints that Unicode officially deprecates or strongly discourages.
|
||||
|
||||
The current structure of tokens are:
|
||||
|
||||
```wit
|
||||
token ::= whitespace
|
||||
| comment
|
||||
| operator
|
||||
| keyword
|
||||
| identifier
|
||||
```
|
||||
|
||||
Whitespace and comments are ignored when parsing structures defined elsewhere
|
||||
here.
|
||||
|
||||
### Whitespace
|
||||
|
||||
A `whitespace` token in `*.wit` is a space, a newline, a carriage return, or a
|
||||
tab character:
|
||||
|
||||
```wit
|
||||
whitespace ::= ' ' | '\n' | '\r' | '\t'
|
||||
```
|
||||
|
||||
### Comments
|
||||
|
||||
A `comment` token in `*.wit` is either a line comment preceded with `//` which
|
||||
ends at the next newline (`\n`) character or it's a block comment which starts
|
||||
with `/*` and ends with `*/`. Note that block comments are allowed to be nested
|
||||
and their delimiters must be balanced
|
||||
|
||||
```wit
|
||||
comment ::= '//' character-that-isnt-a-newline*
|
||||
| '/*' any-unicode-character* '*/'
|
||||
```
|
||||
|
||||
There is a special type of comment called `documentation comment`. A
|
||||
`doc-comment` is either a line comment preceded with `///` whichends at the next
|
||||
newline (`\n`) character or it's a block comment which starts with `/**` and ends
|
||||
with `*/`. Note that block comments are allowed to be nested and their delimiters
|
||||
must be balanced
|
||||
|
||||
```wit
|
||||
doc-comment ::= '///' character-that-isnt-a-newline*
|
||||
| '/**' any-unicode-character* '*/'
|
||||
```
|
||||
|
||||
### Operators
|
||||
|
||||
There are some common operators in the lexical structure of `wit` used for
|
||||
various constructs. Note that delimiters such as `{` and `(` must all be
|
||||
balanced.
|
||||
|
||||
```wit
|
||||
operator ::= '=' | ',' | ':' | ';' | '(' | ')' | '{' | '}' | '<' | '>' | '*' | '->'
|
||||
```
|
||||
|
||||
### Keywords
|
||||
|
||||
Certain identifiers are reserved for use in `wit` documents and cannot be used
|
||||
bare as an identifier. These are used to help parse the format, and the list of
|
||||
keywords is still in flux at this time but the current set is:
|
||||
|
||||
```wit
|
||||
keyword ::= 'use'
|
||||
| 'type'
|
||||
| 'resource'
|
||||
| 'func'
|
||||
| 'u8' | 'u16' | 'u32' | 'u64'
|
||||
| 's8' | 's16' | 's32' | 's64'
|
||||
| 'float32' | 'float64'
|
||||
| 'char'
|
||||
| 'handle'
|
||||
| 'record'
|
||||
| 'enum'
|
||||
| 'flags'
|
||||
| 'variant'
|
||||
| 'union'
|
||||
| 'bool'
|
||||
| 'string'
|
||||
| 'option'
|
||||
| 'list'
|
||||
| 'expected'
|
||||
| 'unit'
|
||||
| 'as'
|
||||
| 'from'
|
||||
| 'static'
|
||||
| 'interface'
|
||||
| 'tuple'
|
||||
| 'async'
|
||||
| 'future'
|
||||
| 'stream'
|
||||
```
|
||||
|
||||
## Top-level items
|
||||
|
||||
A `wit` document is a sequence of items specified at the top level. These items
|
||||
come one after another and it's recommended to separate them with newlines for
|
||||
readability but this isn't required.
|
||||
|
||||
## Item: `use`
|
||||
|
||||
A `use` statement enables importing type or resource definitions from other
|
||||
wit documents. The structure of a use statement is:
|
||||
|
||||
```wit
|
||||
use * from other-file
|
||||
use { a, list, of, names } from another-file
|
||||
use { name as other-name } from yet-another-file
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
use-item ::= 'use' use-names 'from' id
|
||||
|
||||
use-names ::= '*'
|
||||
| '{' use-names-list '}'
|
||||
|
||||
use-names-list ::= use-names-item
|
||||
| use-names-item ',' use-names-list?
|
||||
|
||||
use-names-item ::= id
|
||||
| id 'as' id
|
||||
```
|
||||
|
||||
Note: Here `use-names-list?` means at least one `use-name-list` term.
|
||||
|
||||
## Items: type
|
||||
|
||||
There are a number of methods of defining types in a `wit` document, and all of
|
||||
the types that can be defined in `wit` are intended to map directly to types in
|
||||
the [interface types specification](https://github.com/WebAssembly/interface-types).
|
||||
|
||||
### Item: `type` (alias)
|
||||
|
||||
A `type` statement declares a new named type in the `wit` document. This name can
|
||||
be later referred to when defining items using this type. This construct is
|
||||
similar to a type alias in other languages
|
||||
|
||||
```wit
|
||||
type my-awesome-u32 = u32
|
||||
type my-complicated-tuple = tuple<u32, s32, string>
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
type-item ::= 'type' id '=' ty
|
||||
```
|
||||
|
||||
### Item: `record` (bag of named fields)
|
||||
|
||||
A `record` statement declares a new named structure with named fields. Records
|
||||
are similar to a `struct` in many languages. Instances of a `record` always have
|
||||
their fields defined.
|
||||
|
||||
```wit
|
||||
record pair {
|
||||
x: u32,
|
||||
y: u32,
|
||||
}
|
||||
|
||||
record person {
|
||||
name: string,
|
||||
age: u32,
|
||||
has-lego-action-figure: bool,
|
||||
}
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
record-item ::= 'record' id '{' record-fields '}'
|
||||
|
||||
record-fields ::= record-field
|
||||
| record-field ',' record-fields?
|
||||
|
||||
record-field ::= id ':' ty
|
||||
```
|
||||
|
||||
### Item: `flags` (bag-of-bools)
|
||||
|
||||
A `flags` statement defines a new `record`-like structure where all the fields
|
||||
are booleans. The `flags` type is distinct from `record` in that it typically is
|
||||
represented as a bit flags representation in the canonical ABI. For the purposes
|
||||
of type-checking, however, it's simply syntactic sugar for a record-of-booleans.
|
||||
|
||||
```wit
|
||||
flags properties {
|
||||
lego,
|
||||
marvel-superhero,
|
||||
supervillan,
|
||||
}
|
||||
|
||||
// type-wise equivalent to:
|
||||
//
|
||||
// record properties {
|
||||
// lego: bool,
|
||||
// marvel-superhero: bool,
|
||||
// supervillan: bool,
|
||||
// }
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
flags-items ::= 'flags' id '{' flags-fields '}'
|
||||
|
||||
flags-fields ::= id,
|
||||
| id ',' flags-fields?
|
||||
```
|
||||
|
||||
### Item: `variant` (one of a set of types)
|
||||
|
||||
A `variant` statement defines a new type where instances of the type match
|
||||
exactly one of the variants listed for the type. This is similar to a "sum" type
|
||||
in algebraic datatypes (or an `enum` in Rust if you're familiar with it).
|
||||
Variants can be thought of as tagged unions as well.
|
||||
|
||||
Each case of a variant can have an optional type associated with it which is
|
||||
present when values have that particular case's tag.
|
||||
|
||||
All `variant` type must have at least one case specified.
|
||||
|
||||
```wit
|
||||
variant filter {
|
||||
all,
|
||||
none,
|
||||
some(list<string>),
|
||||
}
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
variant-items ::= 'variant' id '{' variant-cases '}'
|
||||
|
||||
variant-cases ::= variant-case,
|
||||
| variant-case ',' variant-cases?
|
||||
|
||||
variant-case ::= id
|
||||
| id '(' ty ')'
|
||||
```
|
||||
|
||||
### Item: `enum` (variant but with no payload)
|
||||
|
||||
An `enum` statement defines a new type which is semantically equivalent to a
|
||||
`variant` where none of the cases have a payload type. This is special-cased,
|
||||
however, to possibly have a different representation in the language ABIs or
|
||||
have different bindings generated in for languages.
|
||||
|
||||
```wit
|
||||
enum color {
|
||||
red,
|
||||
green,
|
||||
blue,
|
||||
yellow,
|
||||
other,
|
||||
}
|
||||
|
||||
// type-wise equivalent to:
|
||||
//
|
||||
// variant color {
|
||||
// red,
|
||||
// green,
|
||||
// blue,
|
||||
// yellow,
|
||||
// other,
|
||||
// }
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
enum-items ::= 'enum' id '{' enum-cases '}'
|
||||
|
||||
enum-cases ::= id,
|
||||
| id ',' enum-cases?
|
||||
```
|
||||
|
||||
### Item: `union` (variant but with no case names)
|
||||
|
||||
A `union` statement defines a new type which is semantically equivalent to a
|
||||
`variant` where all of the cases have a payload type and the case names are
|
||||
numerical. This is special-cased, however, to possibly have a different
|
||||
representation in the language ABIs or have different bindings generated in for
|
||||
languages.
|
||||
|
||||
```wit
|
||||
union configuration {
|
||||
string,
|
||||
list<string>,
|
||||
}
|
||||
|
||||
// type-wise equivalent to:
|
||||
//
|
||||
// variant configuration {
|
||||
// 0(string),
|
||||
// 1(list<string>),
|
||||
// }
|
||||
```
|
||||
|
||||
Specifically the structure of this is:
|
||||
|
||||
```wit
|
||||
union-items ::= 'union' id '{' union-cases '}'
|
||||
|
||||
union-cases ::= ty,
|
||||
| ty ',' union-cases?
|
||||
```
|
||||
|
||||
## Item: `func`
|
||||
|
||||
Functions can also be defined in a `*.wit` document. Functions have a name,
|
||||
parameters, and results. Functions can optionally also be declared as `async`
|
||||
functions.
|
||||
|
||||
```wit
|
||||
thunk: func()
|
||||
fibonacci: func(n: u32) -> u32
|
||||
sleep: async func(ms: u64)
|
||||
```
|
||||
|
||||
Specifically functions have the structure:
|
||||
|
||||
```wit
|
||||
func-item ::= id ':' 'async'? 'func' '(' func-args ')' func-ret
|
||||
|
||||
func-args ::= func-arg
|
||||
| func-arg ',' func-args?
|
||||
|
||||
func-arg ::= id ':' ty
|
||||
|
||||
func-ret ::= nil
|
||||
| '->' ty
|
||||
```
|
||||
|
||||
## Item: `resource`
|
||||
|
||||
Resources represent a value that has a hidden representation not known to the
|
||||
outside world. This means that the resource is operated on through a "handle" (a
|
||||
pointer of sorts). Resources also have ownership associated with them and
|
||||
languages will have to manage the lifetime of resources manually (they're
|
||||
similar to file descriptors).
|
||||
|
||||
Resources can also optionally have functions defined within them which adds an
|
||||
implicit "self" argument as the first argument to each function of the same type
|
||||
of the including resource, unless the function is flagged as `static`.
|
||||
|
||||
```wit
|
||||
resource file-descriptor
|
||||
|
||||
resource request {
|
||||
static new: func() -> request
|
||||
|
||||
body: async func() -> list<u8>
|
||||
headers: func() -> list<string>
|
||||
}
|
||||
```
|
||||
|
||||
Specifically resources have the structure:
|
||||
|
||||
```wit
|
||||
resource-item ::= 'resource' id resource-contents
|
||||
|
||||
resource-contents ::= nil
|
||||
| '{' resource-defs '}'
|
||||
|
||||
resource-defs ::= resource-def resource-defs?
|
||||
|
||||
resource-def ::= 'static'? func-item
|
||||
```
|
||||
|
||||
## Types
|
||||
|
||||
As mentioned previously the intention of `wit` is to allow defining types
|
||||
corresponding to the interface types specification. Many of the top-level items
|
||||
above are introducing new named types but "anonymous" types are also supported,
|
||||
such as built-ins. For example:
|
||||
|
||||
```wit
|
||||
type number = u32
|
||||
type fallible-function-result = expected<u32, string>
|
||||
type headers = list<string>
|
||||
```
|
||||
|
||||
Specifically the following types are available:
|
||||
|
||||
```wit
|
||||
ty ::= 'u8' | 'u16' | 'u32' | 'u64'
|
||||
| 's8' | 's16' | 's32' | 's64'
|
||||
| 'float32' | 'float64'
|
||||
| 'char'
|
||||
| 'bool'
|
||||
| 'string'
|
||||
| 'unit'
|
||||
| tuple
|
||||
| list
|
||||
| option
|
||||
| expected
|
||||
| future
|
||||
| stream
|
||||
| id
|
||||
|
||||
tuple ::= 'tuple' '<' tuple-list '>'
|
||||
tuple-list ::= ty
|
||||
| ty ',' tuple-list?
|
||||
|
||||
list ::= 'list' '<' ty '>'
|
||||
|
||||
option ::= 'option' '<' ty '>'
|
||||
|
||||
expected ::= 'expected' '<' ty ',' ty '>'
|
||||
|
||||
future ::= 'future' '<' ty '>'
|
||||
|
||||
stream ::= 'stream' '<' ty ',' ty '>'
|
||||
```
|
||||
|
||||
The `tuple` type is semantically equivalent to a `record` with numerical fields,
|
||||
but it frequently can have language-specific meaning so it's provided as a
|
||||
first-class type.
|
||||
|
||||
Similarly the `option` and `expected` types are semantically equivalent to the
|
||||
variants:
|
||||
|
||||
```wit
|
||||
variant option {
|
||||
none,
|
||||
some(ty),
|
||||
}
|
||||
|
||||
variant expected {
|
||||
ok(ok-ty)
|
||||
err(err-ty),
|
||||
}
|
||||
```
|
||||
|
||||
These types are so frequently used and frequently have language-specific
|
||||
meanings though so they're also provided as first-class types.
|
||||
|
||||
Finally the last case of a `ty` is simply an `id` which is intended to refer to
|
||||
another type or resource defined in the document. Note that definitions can come
|
||||
through a `use` statement or they can be defined locally.
|
||||
|
||||
## Identifiers
|
||||
|
||||
Identifiers in `wit` can be defined with two different forms. The first is a
|
||||
lower-case [stream-safe] [NFC] [kebab-case] identifier where each part delimited
|
||||
by '-'s starts with a `XID_Start` scalar value with a zero Canonical Combining
|
||||
Class:
|
||||
|
||||
```wit
|
||||
foo: func(bar: u32)
|
||||
|
||||
red-green-blue: func(r: u32, g: u32, b: u32)
|
||||
```
|
||||
|
||||
This form can't name identifiers which have the same name as wit keywords, so
|
||||
the second form is the same syntax with the same restrictions as the first, but
|
||||
prefixed with '%':
|
||||
|
||||
```wit
|
||||
%foo: func(%bar: u32)
|
||||
|
||||
%red-green-blue: func(%r: u32, %g: u32, %b: u32)
|
||||
|
||||
// This form also supports identifiers that would otherwise be keywords.
|
||||
%variant: func(%enum: s32)
|
||||
```
|
||||
|
||||
[kebab-case]: https://en.wikipedia.org/wiki/Letter_case#Kebab_case
|
||||
[Unicode identifier]: http://www.unicode.org/reports/tr31/
|
||||
[stream-safe]: https://unicode.org/reports/tr15/#Stream_Safe_Text_Format
|
||||
[NFC]: https://unicode.org/reports/tr15/#Norm_Forms
|
||||
|
||||
## Name resolution
|
||||
|
||||
A `wit` document is resolved after parsing to ensure that all names resolve
|
||||
correctly. For example this is not a valid `wit` document:
|
||||
|
||||
```wit
|
||||
type foo = bar // ERROR: name `bar` not defined
|
||||
```
|
||||
|
||||
Type references primarily happen through the `id` production of `ty`.
|
||||
|
||||
Additionally names in a `wit` document can only be defined once:
|
||||
|
||||
```wit
|
||||
type foo = u32
|
||||
type foo = u64 // ERROR: name `foo` already defined
|
||||
```
|
||||
|
||||
Names do not need to be defined before they're used (unlike in C or C++),
|
||||
it's ok to define a type after it's used:
|
||||
|
||||
```wit
|
||||
type foo = bar
|
||||
|
||||
record bar {
|
||||
age: u32,
|
||||
}
|
||||
```
|
||||
|
||||
Types, however, cannot be recursive:
|
||||
|
||||
```wit
|
||||
type foo = foo // ERROR: cannot refer to itself
|
||||
|
||||
record bar1 {
|
||||
a: bar2,
|
||||
}
|
||||
|
||||
record bar2 {
|
||||
a: bar1, // ERROR: record cannot refer to itself
|
||||
}
|
||||
```
|
||||
|
||||
The intention of `wit` is that it maps down to interface types, so the goal of
|
||||
name resolution is to effectively create the type section of a wasm module using
|
||||
interface types. The restrictions about self-referential types and such come
|
||||
from how types can be defined in the interface types section. Additionally
|
||||
definitions of named types such as `record foo { ... }` are intended to map
|
||||
roughly to declarations in the type section of new types.
|
||||
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-c"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
wit-bindgen-gen-core = { path = '../gen-core', version = '0.1.0' }
|
||||
heck = "0.3"
|
||||
structopt = { version = "0.3", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
test-helpers = { path = '../test-helpers', features = ['wit-bindgen-gen-c'] }
|
||||
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
// this build script is currently only here so OUT_DIR is set for testing.
|
||||
}
|
||||
2260
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-c/src/lib.rs
Normal file
2260
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-c/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,58 @@
|
||||
use std::env;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
|
||||
mod imports {
|
||||
test_helpers::codegen_c_import!(
|
||||
// ...
|
||||
"*.wit"
|
||||
|
||||
// TODO: implement async support
|
||||
"!async-functions.wit"
|
||||
);
|
||||
}
|
||||
|
||||
mod exports {
|
||||
test_helpers::codegen_c_export!(
|
||||
"*.wit"
|
||||
|
||||
// TODO: implement async support
|
||||
"!async-functions.wit"
|
||||
|
||||
// TODO: these use push/pull buffer in exports which isn't implemented
|
||||
// yet
|
||||
"!wasi-next.wit"
|
||||
"!host.wit"
|
||||
);
|
||||
}
|
||||
|
||||
fn verify(dir: &str, name: &str) {
|
||||
let dir = Path::new(dir);
|
||||
let path = PathBuf::from(env::var_os("WASI_SDK_PATH").unwrap());
|
||||
let mut cmd = Command::new(path.join("bin/clang"));
|
||||
cmd.arg("--sysroot").arg(path.join("share/wasi-sysroot"));
|
||||
cmd.arg(dir.join(format!("{}.c", name)));
|
||||
cmd.arg("-I").arg(dir);
|
||||
cmd.arg("-Wall")
|
||||
.arg("-Wextra")
|
||||
.arg("-Werror")
|
||||
.arg("-Wno-unused-parameter");
|
||||
cmd.arg("-c");
|
||||
cmd.arg("-o").arg(dir.join("obj.o"));
|
||||
|
||||
println!("{:?}", cmd);
|
||||
let output = match cmd.output() {
|
||||
Ok(output) => output,
|
||||
Err(e) => panic!("failed to spawn compiler: {}", e),
|
||||
};
|
||||
|
||||
if output.status.success() {
|
||||
return;
|
||||
}
|
||||
println!("status: {}", output.status);
|
||||
println!("stdout: ------------------------------------------");
|
||||
println!("{}", String::from_utf8_lossy(&output.stdout));
|
||||
println!("stderr: ------------------------------------------");
|
||||
println!("{}", String::from_utf8_lossy(&output.stderr));
|
||||
panic!("failed to compile");
|
||||
}
|
||||
@@ -0,0 +1,12 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-core"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
wit-parser = { path = '../parser' }
|
||||
anyhow = "1"
|
||||
505
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-core/src/lib.rs
Normal file
505
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-core/src/lib.rs
Normal file
@@ -0,0 +1,505 @@
|
||||
use anyhow::Result;
|
||||
use std::collections::{btree_map::Entry, BTreeMap, HashMap};
|
||||
use std::fmt::{self, Write};
|
||||
use std::ops::Deref;
|
||||
use std::path::Path;
|
||||
use wit_parser::*;
|
||||
|
||||
pub use wit_parser;
|
||||
mod ns;
|
||||
|
||||
pub use ns::Ns;
|
||||
|
||||
/// This is the direction from the user's perspective. Are we importing
|
||||
/// functions to call, or defining functions and exporting them to be called?
|
||||
///
|
||||
/// This is only used outside of `Generator` implementations. Inside of
|
||||
/// `Generator` implementations, the `Direction` is translated to an
|
||||
/// `AbiVariant` instead. The ABI variant is usually the same as the
|
||||
/// `Direction`, but it's different in the case of the Wasmtime host bindings:
|
||||
///
|
||||
/// In a wasm-calling-wasm use case, one wasm module would use the `Import`
|
||||
/// ABI, the other would use the `Export` ABI, and there would be an adapter
|
||||
/// layer between the two that translates from one ABI to the other.
|
||||
///
|
||||
/// But with wasm-calling-host, we don't go through a separate adapter layer;
|
||||
/// the binding code we generate on the host side just does everything itself.
|
||||
/// So when the host is conceptually "exporting" a function to wasm, it uses
|
||||
/// the `Import` ABI so that wasm can also use the `Import` ABI and import it
|
||||
/// directly from the host.
|
||||
///
|
||||
/// These are all implementation details; from the user perspective, and
|
||||
/// from the perspective of everything outside of `Generator` implementations,
|
||||
/// `export` means I'm exporting functions to be called, and `import` means I'm
|
||||
/// importing functions that I'm going to call, in both wasm modules and host
|
||||
/// code. The enum here represents this user perspective.
|
||||
#[derive(Copy, Clone, Eq, PartialEq)]
|
||||
pub enum Direction {
|
||||
Import,
|
||||
Export,
|
||||
}
|
||||
|
||||
pub trait Generator {
|
||||
fn preprocess_all(&mut self, imports: &[Interface], exports: &[Interface]) {
|
||||
drop((imports, exports));
|
||||
}
|
||||
|
||||
fn preprocess_one(&mut self, iface: &Interface, dir: Direction) {
|
||||
drop((iface, dir));
|
||||
}
|
||||
|
||||
fn type_record(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
record: &Record,
|
||||
docs: &Docs,
|
||||
);
|
||||
fn type_flags(&mut self, iface: &Interface, id: TypeId, name: &str, flags: &Flags, docs: &Docs);
|
||||
fn type_tuple(&mut self, iface: &Interface, id: TypeId, name: &str, flags: &Tuple, docs: &Docs);
|
||||
fn type_variant(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
variant: &Variant,
|
||||
docs: &Docs,
|
||||
);
|
||||
fn type_option(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
payload: &Type,
|
||||
docs: &Docs,
|
||||
);
|
||||
fn type_expected(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
expected: &Expected,
|
||||
docs: &Docs,
|
||||
);
|
||||
fn type_union(&mut self, iface: &Interface, id: TypeId, name: &str, union: &Union, docs: &Docs);
|
||||
fn type_enum(&mut self, iface: &Interface, id: TypeId, name: &str, enum_: &Enum, docs: &Docs);
|
||||
fn type_resource(&mut self, iface: &Interface, ty: ResourceId);
|
||||
fn type_alias(&mut self, iface: &Interface, id: TypeId, name: &str, ty: &Type, docs: &Docs);
|
||||
fn type_list(&mut self, iface: &Interface, id: TypeId, name: &str, ty: &Type, docs: &Docs);
|
||||
fn type_builtin(&mut self, iface: &Interface, id: TypeId, name: &str, ty: &Type, docs: &Docs);
|
||||
|
||||
fn preprocess_functions(&mut self, iface: &Interface, dir: Direction) {
|
||||
drop((iface, dir));
|
||||
}
|
||||
fn import(&mut self, iface: &Interface, func: &Function);
|
||||
fn export(&mut self, iface: &Interface, func: &Function);
|
||||
fn finish_functions(&mut self, iface: &Interface, dir: Direction) {
|
||||
drop((iface, dir));
|
||||
}
|
||||
|
||||
fn finish_one(&mut self, iface: &Interface, files: &mut Files);
|
||||
|
||||
fn finish_all(&mut self, files: &mut Files) {
|
||||
drop(files);
|
||||
}
|
||||
|
||||
fn generate_one(&mut self, iface: &Interface, dir: Direction, files: &mut Files) {
|
||||
self.preprocess_one(iface, dir);
|
||||
|
||||
for (id, ty) in iface.types.iter() {
|
||||
// assert!(ty.foreign_module.is_none()); // TODO
|
||||
let name = match &ty.name {
|
||||
Some(name) => name,
|
||||
None => continue,
|
||||
};
|
||||
match &ty.kind {
|
||||
TypeDefKind::Record(record) => self.type_record(iface, id, name, record, &ty.docs),
|
||||
TypeDefKind::Flags(flags) => self.type_flags(iface, id, name, flags, &ty.docs),
|
||||
TypeDefKind::Tuple(tuple) => self.type_tuple(iface, id, name, tuple, &ty.docs),
|
||||
TypeDefKind::Enum(enum_) => self.type_enum(iface, id, name, enum_, &ty.docs),
|
||||
TypeDefKind::Variant(variant) => {
|
||||
self.type_variant(iface, id, name, variant, &ty.docs)
|
||||
}
|
||||
TypeDefKind::Option(t) => self.type_option(iface, id, name, t, &ty.docs),
|
||||
TypeDefKind::Expected(e) => self.type_expected(iface, id, name, e, &ty.docs),
|
||||
TypeDefKind::Union(u) => self.type_union(iface, id, name, u, &ty.docs),
|
||||
TypeDefKind::List(t) => self.type_list(iface, id, name, t, &ty.docs),
|
||||
TypeDefKind::Type(t) => self.type_alias(iface, id, name, t, &ty.docs),
|
||||
TypeDefKind::Future(_) => todo!("generate for future"),
|
||||
TypeDefKind::Stream(_) => todo!("generate for stream"),
|
||||
}
|
||||
}
|
||||
|
||||
for (id, _resource) in iface.resources.iter() {
|
||||
self.type_resource(iface, id);
|
||||
}
|
||||
|
||||
self.preprocess_functions(iface, dir);
|
||||
|
||||
for f in iface.functions.iter() {
|
||||
match dir {
|
||||
Direction::Import => self.import(iface, &f),
|
||||
Direction::Export => self.export(iface, &f),
|
||||
}
|
||||
}
|
||||
|
||||
self.finish_functions(iface, dir);
|
||||
|
||||
self.finish_one(iface, files)
|
||||
}
|
||||
|
||||
fn generate_all(&mut self, imports: &[Interface], exports: &[Interface], files: &mut Files) {
|
||||
self.preprocess_all(imports, exports);
|
||||
|
||||
for imp in imports {
|
||||
self.generate_one(imp, Direction::Import, files);
|
||||
}
|
||||
|
||||
for exp in exports {
|
||||
self.generate_one(exp, Direction::Export, files);
|
||||
}
|
||||
|
||||
self.finish_all(files);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Types {
|
||||
type_info: HashMap<TypeId, TypeInfo>,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Copy)]
|
||||
pub struct TypeInfo {
|
||||
/// Whether or not this type is ever used (transitively) within the
|
||||
/// parameter of a function.
|
||||
pub param: bool,
|
||||
|
||||
/// Whether or not this type is ever used (transitively) within the
|
||||
/// result of a function.
|
||||
pub result: bool,
|
||||
|
||||
/// Whether or not this type (transitively) has a list.
|
||||
pub has_list: bool,
|
||||
|
||||
/// Whether or not this type (transitively) has a handle.
|
||||
pub has_handle: bool,
|
||||
}
|
||||
|
||||
impl std::ops::BitOrAssign for TypeInfo {
|
||||
fn bitor_assign(&mut self, rhs: Self) {
|
||||
self.param |= rhs.param;
|
||||
self.result |= rhs.result;
|
||||
self.has_list |= rhs.has_list;
|
||||
self.has_handle |= rhs.has_handle;
|
||||
}
|
||||
}
|
||||
|
||||
impl Types {
|
||||
pub fn analyze(&mut self, iface: &Interface) {
|
||||
for (t, _) in iface.types.iter() {
|
||||
self.type_id_info(iface, t);
|
||||
}
|
||||
for f in iface.functions.iter() {
|
||||
for (_, ty) in f.params.iter() {
|
||||
self.set_param_result_ty(iface, ty, true, false);
|
||||
}
|
||||
self.set_param_result_ty(iface, &f.result, false, true);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get(&self, id: TypeId) -> TypeInfo {
|
||||
self.type_info[&id]
|
||||
}
|
||||
|
||||
pub fn type_id_info(&mut self, iface: &Interface, ty: TypeId) -> TypeInfo {
|
||||
if let Some(info) = self.type_info.get(&ty) {
|
||||
return *info;
|
||||
}
|
||||
let mut info = TypeInfo::default();
|
||||
match &iface.types[ty].kind {
|
||||
TypeDefKind::Record(r) => {
|
||||
for field in r.fields.iter() {
|
||||
info |= self.type_info(iface, &field.ty);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Tuple(t) => {
|
||||
for ty in t.types.iter() {
|
||||
info |= self.type_info(iface, ty);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Flags(_) => {}
|
||||
TypeDefKind::Enum(_) => {}
|
||||
TypeDefKind::Variant(v) => {
|
||||
for case in v.cases.iter() {
|
||||
info |= self.type_info(iface, &case.ty);
|
||||
}
|
||||
}
|
||||
TypeDefKind::List(ty) => {
|
||||
info = self.type_info(iface, ty);
|
||||
info.has_list = true;
|
||||
}
|
||||
TypeDefKind::Type(ty) => {
|
||||
info = self.type_info(iface, ty);
|
||||
}
|
||||
TypeDefKind::Option(ty) => {
|
||||
info = self.type_info(iface, ty);
|
||||
}
|
||||
TypeDefKind::Expected(e) => {
|
||||
info = self.type_info(iface, &e.ok);
|
||||
info |= self.type_info(iface, &e.err);
|
||||
}
|
||||
TypeDefKind::Union(u) => {
|
||||
for case in u.cases.iter() {
|
||||
info |= self.type_info(iface, &case.ty);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Future(_) => todo!("type_id_info for future"),
|
||||
TypeDefKind::Stream(_) => todo!("type_id_info for stream"),
|
||||
}
|
||||
self.type_info.insert(ty, info);
|
||||
return info;
|
||||
}
|
||||
|
||||
pub fn type_info(&mut self, iface: &Interface, ty: &Type) -> TypeInfo {
|
||||
let mut info = TypeInfo::default();
|
||||
match ty {
|
||||
Type::Handle(_) => info.has_handle = true,
|
||||
Type::String => info.has_list = true,
|
||||
Type::Id(id) => return self.type_id_info(iface, *id),
|
||||
_ => {}
|
||||
}
|
||||
info
|
||||
}
|
||||
|
||||
fn set_param_result_id(&mut self, iface: &Interface, ty: TypeId, param: bool, result: bool) {
|
||||
match &iface.types[ty].kind {
|
||||
TypeDefKind::Record(r) => {
|
||||
for field in r.fields.iter() {
|
||||
self.set_param_result_ty(iface, &field.ty, param, result)
|
||||
}
|
||||
}
|
||||
TypeDefKind::Tuple(t) => {
|
||||
for ty in t.types.iter() {
|
||||
self.set_param_result_ty(iface, ty, param, result)
|
||||
}
|
||||
}
|
||||
TypeDefKind::Flags(_) => {}
|
||||
TypeDefKind::Enum(_) => {}
|
||||
TypeDefKind::Variant(v) => {
|
||||
for case in v.cases.iter() {
|
||||
self.set_param_result_ty(iface, &case.ty, param, result)
|
||||
}
|
||||
}
|
||||
TypeDefKind::List(ty) | TypeDefKind::Type(ty) | TypeDefKind::Option(ty) => {
|
||||
self.set_param_result_ty(iface, ty, param, result)
|
||||
}
|
||||
TypeDefKind::Expected(e) => {
|
||||
self.set_param_result_ty(iface, &e.ok, param, result);
|
||||
self.set_param_result_ty(iface, &e.err, param, result);
|
||||
}
|
||||
TypeDefKind::Union(u) => {
|
||||
for case in u.cases.iter() {
|
||||
self.set_param_result_ty(iface, &case.ty, param, result)
|
||||
}
|
||||
}
|
||||
TypeDefKind::Future(_) => todo!("set_param_result_id for future"),
|
||||
TypeDefKind::Stream(_) => todo!("set_param_result_id for stream"),
|
||||
}
|
||||
}
|
||||
|
||||
fn set_param_result_ty(&mut self, iface: &Interface, ty: &Type, param: bool, result: bool) {
|
||||
match ty {
|
||||
Type::Id(id) => {
|
||||
self.type_id_info(iface, *id);
|
||||
let info = self.type_info.get_mut(id).unwrap();
|
||||
if (param && !info.param) || (result && !info.result) {
|
||||
info.param = info.param || param;
|
||||
info.result = info.result || result;
|
||||
self.set_param_result_id(iface, *id, param, result);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Files {
|
||||
files: BTreeMap<String, Vec<u8>>,
|
||||
}
|
||||
|
||||
impl Files {
|
||||
pub fn push(&mut self, name: &str, contents: &[u8]) {
|
||||
match self.files.entry(name.to_owned()) {
|
||||
Entry::Vacant(entry) => {
|
||||
entry.insert(contents.to_owned());
|
||||
}
|
||||
Entry::Occupied(ref mut entry) => {
|
||||
entry.get_mut().extend_from_slice(contents);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn iter(&self) -> impl Iterator<Item = (&'_ str, &'_ [u8])> {
|
||||
self.files.iter().map(|p| (p.0.as_str(), p.1.as_slice()))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load(path: impl AsRef<Path>) -> Result<Interface> {
|
||||
Interface::parse_file(path)
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Source {
|
||||
s: String,
|
||||
indent: usize,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
pub fn push_str(&mut self, src: &str) {
|
||||
let lines = src.lines().collect::<Vec<_>>();
|
||||
for (i, line) in lines.iter().enumerate() {
|
||||
let trimmed = line.trim();
|
||||
if trimmed.starts_with("}") && self.s.ends_with(" ") {
|
||||
self.s.pop();
|
||||
self.s.pop();
|
||||
}
|
||||
self.s.push_str(if lines.len() == 1 {
|
||||
line
|
||||
} else {
|
||||
line.trim_start()
|
||||
});
|
||||
if trimmed.ends_with('{') {
|
||||
self.indent += 1;
|
||||
}
|
||||
if trimmed.starts_with('}') {
|
||||
self.indent -= 1;
|
||||
}
|
||||
if i != lines.len() - 1 || src.ends_with("\n") {
|
||||
self.newline();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn indent(&mut self, amt: usize) {
|
||||
self.indent += amt;
|
||||
}
|
||||
|
||||
pub fn deindent(&mut self, amt: usize) {
|
||||
self.indent -= amt;
|
||||
}
|
||||
|
||||
fn newline(&mut self) {
|
||||
self.s.push_str("\n");
|
||||
for _ in 0..self.indent {
|
||||
self.s.push_str(" ");
|
||||
}
|
||||
}
|
||||
|
||||
pub fn as_mut_string(&mut self) -> &mut String {
|
||||
&mut self.s
|
||||
}
|
||||
}
|
||||
|
||||
impl Write for Source {
|
||||
fn write_str(&mut self, s: &str) -> fmt::Result {
|
||||
self.push_str(s);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Deref for Source {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &str {
|
||||
&self.s
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Source> for String {
|
||||
fn from(s: Source) -> String {
|
||||
s.s
|
||||
}
|
||||
}
|
||||
|
||||
/// Calls [`write!`] with the passed arguments and unwraps the result.
|
||||
///
|
||||
/// Useful for writing to things with infallible `Write` implementations like
|
||||
/// `Source` and `String`.
|
||||
///
|
||||
/// [`write!`]: std::write
|
||||
#[macro_export]
|
||||
macro_rules! uwrite {
|
||||
($dst:expr, $($arg:tt)*) => {
|
||||
write!($dst, $($arg)*).unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
/// Calls [`writeln!`] with the passed arguments and unwraps the result.
|
||||
///
|
||||
/// Useful for writing to things with infallible `Write` implementations like
|
||||
/// `Source` and `String`.
|
||||
///
|
||||
/// [`writeln!`]: std::writeln
|
||||
#[macro_export]
|
||||
macro_rules! uwriteln {
|
||||
($dst:expr, $($arg:tt)*) => {
|
||||
writeln!($dst, $($arg)*).unwrap()
|
||||
};
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::{Generator, Source};
|
||||
|
||||
#[test]
|
||||
fn simple_append() {
|
||||
let mut s = Source::default();
|
||||
s.push_str("x");
|
||||
assert_eq!(s.s, "x");
|
||||
s.push_str("y");
|
||||
assert_eq!(s.s, "xy");
|
||||
s.push_str("z ");
|
||||
assert_eq!(s.s, "xyz ");
|
||||
s.push_str(" a ");
|
||||
assert_eq!(s.s, "xyz a ");
|
||||
s.push_str("\na");
|
||||
assert_eq!(s.s, "xyz a \na");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn newline_remap() {
|
||||
let mut s = Source::default();
|
||||
s.push_str("function() {\n");
|
||||
s.push_str("y\n");
|
||||
s.push_str("}\n");
|
||||
assert_eq!(s.s, "function() {\n y\n}\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn if_else() {
|
||||
let mut s = Source::default();
|
||||
s.push_str("if() {\n");
|
||||
s.push_str("y\n");
|
||||
s.push_str("} else if () {\n");
|
||||
s.push_str("z\n");
|
||||
s.push_str("}\n");
|
||||
assert_eq!(s.s, "if() {\n y\n} else if () {\n z\n}\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trim_ws() {
|
||||
let mut s = Source::default();
|
||||
s.push_str(
|
||||
"function() {
|
||||
x
|
||||
}",
|
||||
);
|
||||
assert_eq!(s.s, "function() {\n x\n}");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn generator_is_object_safe() {
|
||||
fn _assert(_: &dyn Generator) {}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,27 @@
|
||||
use std::collections::HashSet;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Ns {
|
||||
defined: HashSet<String>,
|
||||
tmp: usize,
|
||||
}
|
||||
|
||||
impl Ns {
|
||||
pub fn insert(&mut self, name: &str) -> Result<(), String> {
|
||||
if self.defined.insert(name.to_string()) {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(format!("name `{}` already defined", name))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn tmp(&mut self, name: &str) -> String {
|
||||
let mut ret = name.to_string();
|
||||
while self.defined.contains(&ret) {
|
||||
ret = format!("{}{}", name, self.tmp);
|
||||
self.tmp += 1;
|
||||
}
|
||||
self.defined.insert(ret.clone());
|
||||
return ret;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
module.exports = {
|
||||
"env": {
|
||||
"browser": true,
|
||||
"es2021": true,
|
||||
"node": true
|
||||
},
|
||||
"extends": "eslint:recommended",
|
||||
"parserOptions": {
|
||||
"ecmaVersion": 12,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"rules": {
|
||||
// allow this since we generate `const {} = e;` for empty structs
|
||||
"no-empty-pattern": 0,
|
||||
// TODO: we generate some unused functions by accident, let's fix that later
|
||||
"no-unused-vars": 0,
|
||||
}
|
||||
};
|
||||
@@ -0,0 +1,17 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-js"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
wit-bindgen-gen-core = { path = '../gen-core', version = '0.1.0' }
|
||||
heck = "0.3"
|
||||
structopt = { version = "0.3", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
test-helpers = { path = '../test-helpers', features = ['wit-bindgen-gen-js'] }
|
||||
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
// this build script is currently only here so OUT_DIR is set for testing.
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
{
|
||||
"devDependencies": {
|
||||
"@types/node": "^15.12.2",
|
||||
"@typescript-eslint/eslint-plugin": "^4.27.0",
|
||||
"@typescript-eslint/parser": "^4.27.0",
|
||||
"eslint": "^7.28.0",
|
||||
"typescript": "^4.3.2"
|
||||
}
|
||||
}
|
||||
2649
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-js/src/lib.rs
Normal file
2649
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-js/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,38 @@
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
mod exports {
|
||||
test_helpers::codegen_js_export!(
|
||||
// ...
|
||||
"*.wit"
|
||||
);
|
||||
}
|
||||
|
||||
mod imports {
|
||||
test_helpers::codegen_js_import!(
|
||||
"*.wit"
|
||||
|
||||
// This uses buffers, which we don't support in imports just yet
|
||||
// TODO: should support this
|
||||
"!wasi-next.wit"
|
||||
"!host.wit"
|
||||
);
|
||||
}
|
||||
|
||||
fn verify(dir: &str, name: &str) {
|
||||
let (cmd, args) = if cfg!(windows) {
|
||||
("cmd.exe", &["/c", "npx.cmd"] as &[&str])
|
||||
} else {
|
||||
("npx", &[] as &[&str])
|
||||
};
|
||||
|
||||
let status = Command::new(cmd)
|
||||
.args(args)
|
||||
.arg("eslint")
|
||||
.arg("-c")
|
||||
.arg(".eslintrc.js")
|
||||
.arg(Path::new(dir).join(&format!("{}.js", name)))
|
||||
.status()
|
||||
.unwrap();
|
||||
assert!(status.success());
|
||||
}
|
||||
7
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-js/tests/helpers.d.ts
vendored
Normal file
7
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-js/tests/helpers.d.ts
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
export function getWasm(): Uint8Array;
|
||||
|
||||
export interface Wasi {
|
||||
start(instance: WebAssembly.Instance): void;
|
||||
}
|
||||
|
||||
export function addWasiToImports(importObj: any): Wasi;
|
||||
@@ -0,0 +1,26 @@
|
||||
import { readFileSync } from 'fs';
|
||||
import { WASI } from 'wasi';
|
||||
|
||||
export function getWasm() {
|
||||
return readFileSync(process.argv[2]);
|
||||
}
|
||||
|
||||
class MyWasi {
|
||||
constructor(wasi) {
|
||||
this.wasi = wasi;
|
||||
}
|
||||
|
||||
start(instance) {
|
||||
if ('_start' in instance.exports) {
|
||||
this.wasi.start(instance);
|
||||
} else {
|
||||
this.wasi.initialize(instance);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export function addWasiToImports(importObj) {
|
||||
const wasi = new WASI();
|
||||
importObj.wasi_snapshot_preview1 = wasi.wasiImport;
|
||||
return new MyWasi(wasi);
|
||||
}
|
||||
@@ -0,0 +1,99 @@
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use wit_bindgen_gen_core::Generator;
|
||||
|
||||
test_helpers::runtime_tests!("ts");
|
||||
|
||||
fn execute(name: &str, wasm: &Path, ts: &Path, imports: &Path, exports: &Path) {
|
||||
let mut dir = PathBuf::from(env!("OUT_DIR"));
|
||||
dir.push(name);
|
||||
drop(fs::remove_dir_all(&dir));
|
||||
fs::create_dir_all(&dir).unwrap();
|
||||
|
||||
println!("OUT_DIR = {:?}", dir);
|
||||
println!("Generating bindings...");
|
||||
// We call `generate_all` with exports from the imports.wit file, and
|
||||
// imports from the exports.wit wit file. It's reversed because we're
|
||||
// implementing the host side of these APIs.
|
||||
let imports = wit_bindgen_gen_core::wit_parser::Interface::parse_file(imports).unwrap();
|
||||
let exports = wit_bindgen_gen_core::wit_parser::Interface::parse_file(exports).unwrap();
|
||||
let mut files = Default::default();
|
||||
wit_bindgen_gen_js::Opts::default()
|
||||
.build()
|
||||
.generate_all(&[exports], &[imports], &mut files);
|
||||
for (file, contents) in files.iter() {
|
||||
fs::write(dir.join(file), contents).unwrap();
|
||||
}
|
||||
|
||||
let (cmd, args) = if cfg!(windows) {
|
||||
("cmd.exe", &["/c", "npx.cmd"] as &[&str])
|
||||
} else {
|
||||
("npx", &[] as &[&str])
|
||||
};
|
||||
|
||||
fs::copy(ts, dir.join("host.ts")).unwrap();
|
||||
fs::copy("tests/helpers.d.ts", dir.join("helpers.d.ts")).unwrap();
|
||||
fs::copy("tests/helpers.js", dir.join("helpers.js")).unwrap();
|
||||
let config = dir.join("tsconfig.json");
|
||||
fs::write(
|
||||
&config,
|
||||
format!(
|
||||
r#"
|
||||
{{
|
||||
"files": ["host.ts"],
|
||||
"compilerOptions": {{
|
||||
"module": "esnext",
|
||||
"target": "es2020",
|
||||
"strict": true,
|
||||
"strictNullChecks": true,
|
||||
"baseUrl": {0:?},
|
||||
"outDir": {0:?}
|
||||
}}
|
||||
}}
|
||||
"#,
|
||||
dir,
|
||||
),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
run(Command::new(cmd)
|
||||
.args(args)
|
||||
.arg("tsc")
|
||||
.arg("--project")
|
||||
.arg(&config));
|
||||
|
||||
// Currently there's mysterious uvwasi errors creating a `WASI` on Windows.
|
||||
// Unsure what's happening so let's ignore these tests for now since there's
|
||||
// not much Windows-specific here anyway.
|
||||
if cfg!(windows) {
|
||||
return;
|
||||
}
|
||||
|
||||
fs::write(dir.join("package.json"), "{\"type\":\"module\"}").unwrap();
|
||||
let mut path = Vec::new();
|
||||
path.push(env::current_dir().unwrap());
|
||||
path.push(dir.clone());
|
||||
println!("{:?}", std::env::join_paths(&path));
|
||||
run(Command::new("node")
|
||||
.arg("--experimental-wasi-unstable-preview1")
|
||||
.arg(dir.join("host.js"))
|
||||
.env("NODE_PATH", std::env::join_paths(&path).unwrap())
|
||||
.arg(wasm));
|
||||
}
|
||||
|
||||
fn run(cmd: &mut Command) {
|
||||
println!("running {:?}", cmd);
|
||||
let output = cmd.output().expect("failed to executed");
|
||||
println!("status: {}", output.status);
|
||||
println!(
|
||||
"stdout:\n {}",
|
||||
String::from_utf8_lossy(&output.stdout).replace("\n", "\n ")
|
||||
);
|
||||
println!(
|
||||
"stderr:\n {}",
|
||||
String::from_utf8_lossy(&output.stderr).replace("\n", "\n ")
|
||||
);
|
||||
assert!(output.status.success());
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-markdown"
|
||||
version = "0.1.0"
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
heck = "0.3"
|
||||
pulldown-cmark = { version = "0.8", default-features = false }
|
||||
structopt = { version = "0.3", default-features = false, optional = true }
|
||||
wit-bindgen-gen-core = { path = '../gen-core', version = '0.1.0' }
|
||||
@@ -0,0 +1,466 @@
|
||||
use heck::*;
|
||||
use pulldown_cmark::{html, Event, LinkType, Parser, Tag};
|
||||
use std::collections::HashMap;
|
||||
use wit_bindgen_gen_core::{wit_parser, Direction, Files, Generator, Source};
|
||||
use wit_parser::*;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Markdown {
|
||||
src: Source,
|
||||
opts: Opts,
|
||||
sizes: SizeAlign,
|
||||
hrefs: HashMap<String, String>,
|
||||
funcs: usize,
|
||||
types: usize,
|
||||
}
|
||||
|
||||
#[derive(Default, Debug, Clone)]
|
||||
#[cfg_attr(feature = "structopt", derive(structopt::StructOpt))]
|
||||
pub struct Opts {
|
||||
// ...
|
||||
}
|
||||
|
||||
impl Opts {
|
||||
pub fn build(&self) -> Markdown {
|
||||
let mut r = Markdown::new();
|
||||
r.opts = self.clone();
|
||||
r
|
||||
}
|
||||
}
|
||||
|
||||
impl Markdown {
|
||||
pub fn new() -> Markdown {
|
||||
Markdown::default()
|
||||
}
|
||||
|
||||
fn print_ty(&mut self, iface: &Interface, ty: &Type, skip_name: bool) {
|
||||
match ty {
|
||||
Type::Unit => self.src.push_str("`unit`"),
|
||||
Type::Bool => self.src.push_str("`bool`"),
|
||||
Type::U8 => self.src.push_str("`u8`"),
|
||||
Type::S8 => self.src.push_str("`s8`"),
|
||||
Type::U16 => self.src.push_str("`u16`"),
|
||||
Type::S16 => self.src.push_str("`s16`"),
|
||||
Type::U32 => self.src.push_str("`u32`"),
|
||||
Type::S32 => self.src.push_str("`s32`"),
|
||||
Type::U64 => self.src.push_str("`u64`"),
|
||||
Type::S64 => self.src.push_str("`s64`"),
|
||||
Type::Float32 => self.src.push_str("`float32`"),
|
||||
Type::Float64 => self.src.push_str("`float64`"),
|
||||
Type::Char => self.src.push_str("`char`"),
|
||||
Type::String => self.src.push_str("`string`"),
|
||||
Type::Handle(id) => {
|
||||
self.src.push_str("handle<");
|
||||
self.src.push_str(&iface.resources[*id].name);
|
||||
self.src.push_str(">");
|
||||
}
|
||||
Type::Id(id) => {
|
||||
let ty = &iface.types[*id];
|
||||
if !skip_name {
|
||||
if let Some(name) = &ty.name {
|
||||
self.src.push_str("[`");
|
||||
self.src.push_str(name);
|
||||
self.src.push_str("`](#");
|
||||
self.src.push_str(&name.to_snake_case());
|
||||
self.src.push_str(")");
|
||||
return;
|
||||
}
|
||||
}
|
||||
match &ty.kind {
|
||||
TypeDefKind::Type(t) => self.print_ty(iface, t, false),
|
||||
TypeDefKind::Tuple(t) => {
|
||||
self.src.push_str("(");
|
||||
for (i, t) in t.types.iter().enumerate() {
|
||||
if i > 0 {
|
||||
self.src.push_str(", ");
|
||||
}
|
||||
self.print_ty(iface, t, false);
|
||||
}
|
||||
self.src.push_str(")");
|
||||
}
|
||||
TypeDefKind::Record(_)
|
||||
| TypeDefKind::Flags(_)
|
||||
| TypeDefKind::Enum(_)
|
||||
| TypeDefKind::Variant(_)
|
||||
| TypeDefKind::Union(_) => {
|
||||
unreachable!()
|
||||
}
|
||||
TypeDefKind::Option(t) => {
|
||||
self.src.push_str("option<");
|
||||
self.print_ty(iface, t, false);
|
||||
self.src.push_str(">");
|
||||
}
|
||||
TypeDefKind::Expected(e) => {
|
||||
self.src.push_str("expected<");
|
||||
self.print_ty(iface, &e.ok, false);
|
||||
self.src.push_str(", ");
|
||||
self.print_ty(iface, &e.err, false);
|
||||
self.src.push_str(">");
|
||||
}
|
||||
TypeDefKind::List(t) => {
|
||||
self.src.push_str("list<");
|
||||
self.print_ty(iface, t, false);
|
||||
self.src.push_str(">");
|
||||
}
|
||||
TypeDefKind::Future(t) => {
|
||||
self.src.push_str("future<");
|
||||
self.print_ty(iface, t, false);
|
||||
self.src.push_str(">");
|
||||
}
|
||||
TypeDefKind::Stream(s) => {
|
||||
self.src.push_str("stream<");
|
||||
self.print_ty(iface, &s.element, false);
|
||||
self.src.push_str(", ");
|
||||
self.print_ty(iface, &s.end, false);
|
||||
self.src.push_str(">");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn docs(&mut self, docs: &Docs) {
|
||||
let docs = match &docs.contents {
|
||||
Some(docs) => docs,
|
||||
None => return,
|
||||
};
|
||||
for line in docs.lines() {
|
||||
self.src.push_str(line.trim());
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn print_type_header(&mut self, name: &str) {
|
||||
if self.types == 0 {
|
||||
self.src.push_str("# Types\n\n");
|
||||
}
|
||||
self.types += 1;
|
||||
self.src.push_str(&format!(
|
||||
"## <a href=\"#{}\" name=\"{0}\"></a> `{}`: ",
|
||||
name.to_snake_case(),
|
||||
name,
|
||||
));
|
||||
self.hrefs
|
||||
.insert(name.to_string(), format!("#{}", name.to_snake_case()));
|
||||
}
|
||||
|
||||
fn print_type_info(&mut self, ty: TypeId, docs: &Docs) {
|
||||
self.docs(docs);
|
||||
self.src.push_str("\n");
|
||||
self.src
|
||||
.push_str(&format!("Size: {}, ", self.sizes.size(&Type::Id(ty))));
|
||||
self.src
|
||||
.push_str(&format!("Alignment: {}\n", self.sizes.align(&Type::Id(ty))));
|
||||
}
|
||||
}
|
||||
|
||||
impl Generator for Markdown {
|
||||
fn preprocess_one(&mut self, iface: &Interface, _dir: Direction) {
|
||||
self.sizes.fill(iface);
|
||||
}
|
||||
|
||||
fn type_record(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
record: &Record,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("record\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n### Record Fields\n\n");
|
||||
for field in record.fields.iter() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"{r}.{f}\" name=\"{r}.{f}\"></a> [`{name}`](#{r}.{f}): ",
|
||||
r = name.to_snake_case(),
|
||||
f = field.name.to_snake_case(),
|
||||
name = field.name,
|
||||
));
|
||||
self.hrefs.insert(
|
||||
format!("{}::{}", name, field.name),
|
||||
format!("#{}.{}", name.to_snake_case(), field.name.to_snake_case()),
|
||||
);
|
||||
self.print_ty(iface, &field.ty, false);
|
||||
self.src.indent(1);
|
||||
self.src.push_str("\n\n");
|
||||
self.docs(&field.docs);
|
||||
self.src.deindent(1);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn type_tuple(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
tuple: &Tuple,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("tuple\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n### Tuple Fields\n\n");
|
||||
for (i, ty) in tuple.types.iter().enumerate() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"{r}.{f}\" name=\"{r}.{f}\"></a> [`{name}`](#{r}.{f}): ",
|
||||
r = name.to_snake_case(),
|
||||
f = i,
|
||||
name = i,
|
||||
));
|
||||
self.hrefs.insert(
|
||||
format!("{}::{}", name, i),
|
||||
format!("#{}.{}", name.to_snake_case(), i),
|
||||
);
|
||||
self.print_ty(iface, ty, false);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn type_flags(
|
||||
&mut self,
|
||||
_iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
flags: &Flags,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("record\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n### Record Fields\n\n");
|
||||
for (i, flag) in flags.flags.iter().enumerate() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"{r}.{f}\" name=\"{r}.{f}\"></a> [`{name}`](#{r}.{f}): ",
|
||||
r = name.to_snake_case(),
|
||||
f = flag.name.to_snake_case(),
|
||||
name = flag.name,
|
||||
));
|
||||
self.hrefs.insert(
|
||||
format!("{}::{}", name, flag.name),
|
||||
format!("#{}.{}", name.to_snake_case(), flag.name.to_snake_case()),
|
||||
);
|
||||
self.src.indent(1);
|
||||
self.src.push_str("\n\n");
|
||||
self.docs(&flag.docs);
|
||||
self.src.deindent(1);
|
||||
self.src.push_str(&format!("Bit: {}\n", i));
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn type_variant(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
variant: &Variant,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("variant\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n### Variant Cases\n\n");
|
||||
for case in variant.cases.iter() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"{v}.{c}\" name=\"{v}.{c}\"></a> [`{name}`](#{v}.{c})",
|
||||
v = name.to_snake_case(),
|
||||
c = case.name.to_snake_case(),
|
||||
name = case.name,
|
||||
));
|
||||
self.hrefs.insert(
|
||||
format!("{}::{}", name, case.name),
|
||||
format!("#{}.{}", name.to_snake_case(), case.name.to_snake_case()),
|
||||
);
|
||||
self.src.push_str(": ");
|
||||
self.print_ty(iface, &case.ty, false);
|
||||
self.src.indent(1);
|
||||
self.src.push_str("\n\n");
|
||||
self.docs(&case.docs);
|
||||
self.src.deindent(1);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn type_union(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
union: &Union,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("union\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n### Union Cases\n\n");
|
||||
let snake = name.to_snake_case();
|
||||
for (i, case) in union.cases.iter().enumerate() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"{snake}.{i}\" name=\"{snake}.{i}\"></a> [`{i}`](#{snake}.{i})",
|
||||
));
|
||||
self.hrefs
|
||||
.insert(format!("{name}::{i}"), format!("#{snake}.{i}"));
|
||||
self.src.push_str(": ");
|
||||
self.print_ty(iface, &case.ty, false);
|
||||
self.src.indent(1);
|
||||
self.src.push_str("\n\n");
|
||||
self.docs(&case.docs);
|
||||
self.src.deindent(1);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn type_enum(&mut self, _iface: &Interface, id: TypeId, name: &str, enum_: &Enum, docs: &Docs) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("enum\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n### Enum Cases\n\n");
|
||||
for case in enum_.cases.iter() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"{v}.{c}\" name=\"{v}.{c}\"></a> [`{name}`](#{v}.{c})",
|
||||
v = name.to_snake_case(),
|
||||
c = case.name.to_snake_case(),
|
||||
name = case.name,
|
||||
));
|
||||
self.hrefs.insert(
|
||||
format!("{}::{}", name, case.name),
|
||||
format!("#{}.{}", name.to_snake_case(), case.name.to_snake_case()),
|
||||
);
|
||||
self.src.indent(1);
|
||||
self.src.push_str("\n\n");
|
||||
self.docs(&case.docs);
|
||||
self.src.deindent(1);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
fn type_option(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
payload: &Type,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("option<");
|
||||
self.print_ty(iface, payload, false);
|
||||
self.src.push_str(">");
|
||||
self.print_type_info(id, docs);
|
||||
}
|
||||
|
||||
fn type_expected(
|
||||
&mut self,
|
||||
iface: &Interface,
|
||||
id: TypeId,
|
||||
name: &str,
|
||||
expected: &Expected,
|
||||
docs: &Docs,
|
||||
) {
|
||||
self.print_type_header(name);
|
||||
self.src.push_str("expected<");
|
||||
self.print_ty(iface, &expected.ok, false);
|
||||
self.src.push_str(", ");
|
||||
self.print_ty(iface, &expected.err, false);
|
||||
self.src.push_str(">");
|
||||
self.print_type_info(id, docs);
|
||||
}
|
||||
|
||||
fn type_resource(&mut self, iface: &Interface, ty: ResourceId) {
|
||||
drop((iface, ty));
|
||||
}
|
||||
|
||||
fn type_alias(&mut self, iface: &Interface, id: TypeId, name: &str, ty: &Type, docs: &Docs) {
|
||||
self.print_type_header(name);
|
||||
self.print_ty(iface, ty, true);
|
||||
self.src.push_str("\n\n");
|
||||
self.print_type_info(id, docs);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
|
||||
fn type_list(&mut self, iface: &Interface, id: TypeId, name: &str, _ty: &Type, docs: &Docs) {
|
||||
self.type_alias(iface, id, name, &Type::Id(id), docs);
|
||||
}
|
||||
|
||||
fn type_builtin(&mut self, iface: &Interface, id: TypeId, name: &str, ty: &Type, docs: &Docs) {
|
||||
self.type_alias(iface, id, name, ty, docs)
|
||||
}
|
||||
|
||||
fn import(&mut self, iface: &Interface, func: &Function) {
|
||||
if self.funcs == 0 {
|
||||
self.src.push_str("# Functions\n\n");
|
||||
}
|
||||
self.funcs += 1;
|
||||
|
||||
self.src.push_str("----\n\n");
|
||||
self.src.push_str(&format!(
|
||||
"#### <a href=\"#{0}\" name=\"{0}\"></a> `",
|
||||
func.name.to_snake_case()
|
||||
));
|
||||
self.hrefs
|
||||
.insert(func.name.clone(), format!("#{}", func.name.to_snake_case()));
|
||||
self.src.push_str(&func.name);
|
||||
self.src.push_str("` ");
|
||||
self.src.push_str("\n\n");
|
||||
self.docs(&func.docs);
|
||||
|
||||
if func.params.len() > 0 {
|
||||
self.src.push_str("##### Params\n\n");
|
||||
for (name, ty) in func.params.iter() {
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"#{f}.{p}\" name=\"{f}.{p}\"></a> `{}`: ",
|
||||
name,
|
||||
f = func.name.to_snake_case(),
|
||||
p = name.to_snake_case(),
|
||||
));
|
||||
self.print_ty(iface, ty, false);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
match &func.result {
|
||||
Type::Unit => {}
|
||||
ty => {
|
||||
self.src.push_str("##### Results\n\n");
|
||||
self.src.push_str(&format!(
|
||||
"- <a href=\"#{f}.{p}\" name=\"{f}.{p}\"></a> `{}`: ",
|
||||
"result",
|
||||
f = func.name.to_snake_case(),
|
||||
p = "result",
|
||||
));
|
||||
self.print_ty(iface, ty, false);
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
}
|
||||
|
||||
self.src.push_str("\n");
|
||||
}
|
||||
|
||||
fn export(&mut self, iface: &Interface, func: &Function) {
|
||||
self.import(iface, func);
|
||||
}
|
||||
|
||||
fn finish_one(&mut self, _iface: &Interface, files: &mut Files) {
|
||||
let parser = Parser::new(&self.src);
|
||||
let mut events = Vec::new();
|
||||
for event in parser {
|
||||
if let Event::Code(code) = &event {
|
||||
if let Some(dst) = self.hrefs.get(code.as_ref()) {
|
||||
let tag = Tag::Link(LinkType::Inline, dst.as_str().into(), "".into());
|
||||
events.push(Event::Start(tag.clone()));
|
||||
events.push(event.clone());
|
||||
events.push(Event::End(tag));
|
||||
continue;
|
||||
}
|
||||
}
|
||||
events.push(event);
|
||||
}
|
||||
let mut html_output = String::new();
|
||||
html::push_html(&mut html_output, events.into_iter());
|
||||
|
||||
files.push("bindings.md", self.src.as_bytes());
|
||||
files.push("bindings.html", html_output.as_bytes());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-rust-wasm"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
test = false
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
wit-bindgen-gen-core = { path = '../gen-core', version = '0.1.0' }
|
||||
wit-bindgen-gen-rust = { path = '../gen-rust', version = '0.1.0' }
|
||||
heck = "0.3"
|
||||
structopt = { version = "0.3", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
wit-bindgen-rust = { path = '../rust-wasm' }
|
||||
test-helpers = { path = '../test-helpers', features = ['wit-bindgen-gen-rust-wasm'] }
|
||||
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
// this build script is currently only here so OUT_DIR is set for testing.
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,32 @@
|
||||
#![allow(dead_code, type_alias_bounds)]
|
||||
|
||||
#[test]
|
||||
fn ok() {}
|
||||
|
||||
#[rustfmt::skip]
|
||||
mod imports {
|
||||
test_helpers::codegen_rust_wasm_import!(
|
||||
"*.wit"
|
||||
|
||||
// If you want to exclude a specific test you can include it here with
|
||||
// gitignore glob syntax:
|
||||
//
|
||||
// "!wasm.wit"
|
||||
// "!host.wit"
|
||||
//
|
||||
//
|
||||
// Similarly you can also just remove the `*.wit` glob and list tests
|
||||
// individually if you're debugging.
|
||||
);
|
||||
}
|
||||
|
||||
mod exports {
|
||||
test_helpers::codegen_rust_wasm_export!(
|
||||
"*.wit"
|
||||
|
||||
// TODO: these use push/pull buffer which isn't implemented in the test
|
||||
// generator just yet
|
||||
"!wasi-next.wit"
|
||||
"!host.wit"
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-rust"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
doctest = false
|
||||
test = false
|
||||
|
||||
[dependencies]
|
||||
wit-bindgen-gen-core = { path = '../gen-core' }
|
||||
heck = "0.3"
|
||||
1143
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-rust/src/lib.rs
Normal file
1143
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-rust/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,20 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-spidermonkey"
|
||||
version = "0.1.0"
|
||||
authors = ["Nick Fitzgerald <fitzgen@gmail.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
test = false
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
lazy_static = "1.4.0"
|
||||
structopt = { version = "0.3", optional = true }
|
||||
wasm-encoder = "0.8.0"
|
||||
wit-bindgen-gen-core = { path = "../gen-core" }
|
||||
heck = "0.3"
|
||||
|
||||
[dev-dependencies]
|
||||
test-helpers = { path = '../test-helpers', features = ['wit-bindgen-gen-spidermonkey'] }
|
||||
wasmparser = "0.80"
|
||||
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
// this build script is currently only here so OUT_DIR is set for testing.
|
||||
}
|
||||
7
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-spidermonkey/spidermonkey-wasm/.gitignore
vendored
Normal file
7
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-spidermonkey/spidermonkey-wasm/.gitignore
vendored
Normal file
@@ -0,0 +1,7 @@
|
||||
spidermonkey-*
|
||||
lib/*
|
||||
*.o
|
||||
spidermonkey.initial.wasm
|
||||
wasi-sdk-12.0
|
||||
mozbuild
|
||||
.exports
|
||||
@@ -0,0 +1,142 @@
|
||||
SM_REPO := https://github.com/fitzgen/gecko-dev
|
||||
SM_COMMIT := dafd3165f45c55023ece4787a86444029e4f475e
|
||||
|
||||
# TODO: support building `spidermonkey.wasm` on other OSes. But for some reason
|
||||
# the resulting `.wasm` binary is slower when the host compiler is on macOS.
|
||||
WASI_SDK_URL := https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-12/wasi-sdk-12.0-linux.tar.gz
|
||||
|
||||
CC := $(CURDIR)/wasi-sdk-12.0/bin/clang
|
||||
CXX := $(CURDIR)/wasi-sdk-12.0/bin/clang++
|
||||
|
||||
# Set this to `1` to enable logging via all the `SMW_LOG(...)` calls.
|
||||
LOGGING := 0
|
||||
|
||||
# Set this to `-DDEBUG` and uncomment the `--enable-debug` line in `mozconfig`
|
||||
# to enable debug builds of SpiderMonkey.
|
||||
DEBUG := ""
|
||||
|
||||
# Set this to `""` in debug mode for better debugging.
|
||||
OPT := "-O2"
|
||||
|
||||
CFLAGS := \
|
||||
--sysroot=$(CURDIR)/wasi-sdk-12.0/share/wasi-sysroot \
|
||||
-Wall \
|
||||
--target=wasm32-unknown-wasi \
|
||||
-Ispidermonkey-$(SM_COMMIT)/obj-wasm32-unknown-wasi/dist/include \
|
||||
-I$(CURDIR)/include \
|
||||
$(DEBUG) \
|
||||
$(OPT) \
|
||||
-DLOGGING=$(LOGGING)
|
||||
|
||||
CXXFLAGS := \
|
||||
$(CFLAGS) \
|
||||
-fno-exceptions \
|
||||
-std=c++17
|
||||
|
||||
# Local object files.
|
||||
LOCAL_OBJECTS := $(patsubst %.cpp,%.o,$(wildcard *.cpp))
|
||||
|
||||
# Object files needed within SpiderMonkey's obj dir.
|
||||
SM_OBJECTS := \
|
||||
js/src/build/libjs_static.a \
|
||||
memory/build/Unified_cpp_memory_build0.o \
|
||||
memory/mozalloc/mozalloc_abort.o \
|
||||
memory/mozalloc/Unified_cpp_memory_mozalloc0.o \
|
||||
mfbt/Unified_cpp_mfbt0.o \
|
||||
mfbt/Unified_cpp_mfbt1.o \
|
||||
mfbt/lz4.o \
|
||||
mfbt/lz4frame.o \
|
||||
mfbt/lz4hc.o \
|
||||
mfbt/xxhash.o \
|
||||
mozglue/misc/AutoProfilerLabel.o \
|
||||
mozglue/misc/ConditionVariable_noop.o \
|
||||
mozglue/misc/Decimal.o \
|
||||
mozglue/misc/MmapFaultHandler.o \
|
||||
mozglue/misc/Mutex_noop.o \
|
||||
mozglue/misc/Printf.o \
|
||||
mozglue/misc/StackWalk.o \
|
||||
mozglue/misc/TimeStamp.o \
|
||||
mozglue/misc/TimeStamp_posix.o \
|
||||
mozglue/misc/Uptime.o \
|
||||
modules/zlib/src/compress.o \
|
||||
modules/zlib/src/gzclose.o \
|
||||
modules/zlib/src/infback.o \
|
||||
modules/zlib/src/uncompr.o \
|
||||
wasm32-wasi/release/libjsrust.a
|
||||
|
||||
# The `./lib/*` copies of SpiderMonkey's object files that we check into the
|
||||
# repo.
|
||||
SM_LIB_OBJECTS := $(shell echo $(SM_OBJECTS) | xargs -d' ' -I{} basename {} | xargs -I{} echo lib/{})
|
||||
|
||||
.PHONY: all clean clean-all clean-spidermonkey clean-wasi-sdk
|
||||
|
||||
all: spidermonkey.wasm
|
||||
@echo "Done!"
|
||||
|
||||
spidermonkey.initial.wasm: $(SM_LIB_OBJECTS) $(LOCAL_OBJECTS)
|
||||
$(CXX) $(CXXFLAGS) \
|
||||
-mexec-model=reactor \
|
||||
$(LOCAL_OBJECTS) \
|
||||
$(SM_LIB_OBJECTS) \
|
||||
-o spidermonkey.initial.wasm \
|
||||
-Wl,--export-dynamic \
|
||||
-Wl,--growable-table \
|
||||
-Wl,--export-table \
|
||||
-Wl,--gc-sections
|
||||
|
||||
spidermonkey.wasm: spidermonkey.initial.wasm
|
||||
# Uncomment this `wasm-opt` invocation and comment the following one out to
|
||||
# enable better debugging.
|
||||
#
|
||||
# wasm-opt -g --duplicate-import-elimination spidermonkey.initial.wasm -o spidermonkey.wasm
|
||||
wasm-opt -O2 --strip-dwarf --duplicate-import-elimination spidermonkey.initial.wasm -o spidermonkey.wasm
|
||||
|
||||
|
||||
# Build all `*.cpp` files into `*.o` files.
|
||||
%.o: %.cpp $(SM_LIB_OBJECTS)
|
||||
$(CXX) $(CXXFLAGS) -c $< -o $@
|
||||
|
||||
# Actually build SpiderMonkey.
|
||||
$(SM_LIB_OBJECTS): spidermonkey-$(SM_COMMIT) mozbuild wasi-sdk-12.0 mozconfig
|
||||
cd spidermonkey-$(SM_COMMIT) \
|
||||
&& MOZBUILD_STATE_PATH=$(CURDIR)/mozbuild MOZCONFIG=$(CURDIR)/mozconfig ./mach build
|
||||
mkdir -p lib
|
||||
for x in $(SM_OBJECTS); do \
|
||||
cp spidermonkey-$(SM_COMMIT)/obj-wasm32-unknown-wasi/$$x lib/; \
|
||||
done
|
||||
|
||||
# Clone `mozilla-central` at the `SM_COMMIT` commit.
|
||||
spidermonkey-$(SM_COMMIT):
|
||||
-rm -rf spidermonkey-temp
|
||||
mkdir spidermonkey-temp
|
||||
cd spidermonkey-temp \
|
||||
&& git init \
|
||||
&& git remote add origin $(SM_REPO) \
|
||||
&& git fetch origin $(SM_COMMIT) \
|
||||
&& git checkout $(SM_COMMIT)
|
||||
mv spidermonkey-temp spidermonkey-$(SM_COMMIT)
|
||||
|
||||
mozbuild: spidermonkey-$(SM_COMMIT)
|
||||
-mkdir mozbuild
|
||||
cd spidermonkey-$(SM_COMMIT) \
|
||||
&& MOZBUILD_STATE_PATH=$(CURDIR)/mozbuild ./mach bootstrap --application-choice js --no-system-changes \
|
||||
|| rm -rf $(CURDIR)/mozbuild
|
||||
|
||||
wasi-sdk-12.0:
|
||||
curl -L $(WASI_SDK_URL) | tar -x -z
|
||||
|
||||
clean-all: clean clean-spidermonkey clean-wasi-sdk
|
||||
|
||||
clean-wasi-sdk:
|
||||
-rm -rf wasi-sdk-12.0
|
||||
|
||||
clean-spidermonkey:
|
||||
-rm -rf spidermonkey-$(SM_COMMIT)
|
||||
-rm -rf spidermonkey-$(SM_COMMIT)/obj-wasm32-unknown-wasi/
|
||||
-rm -rf mozbuild
|
||||
|
||||
clean:
|
||||
@echo 'Only cleaning our own artifacts, not upstream deps. Run `make clean-{all,spidermonkey,wasi-sdk}` to clean others.'
|
||||
-rm -rf spidermonkey-temp
|
||||
-rm -rf ./*.o
|
||||
-rm -rf spidermonkey.wasm
|
||||
@@ -0,0 +1,13 @@
|
||||
# `spidermonkey.wasm`
|
||||
|
||||
This directory contains the source code for `spidermonkey.wasm`, which is an
|
||||
embedding of the SpiderMonkey JavaScript engine for targeting `wasm32-wasi` and
|
||||
use with `wit-bindgen-gen-spidermonkey`. It exports a variety of helper
|
||||
functions that are used by `wit-bindgen-gen-spidermonkey`'s generated glue
|
||||
code. These helpers are typically named something like `SMW_whatever_function`.
|
||||
|
||||
## Building `spidermonkey.wasm`
|
||||
|
||||
```
|
||||
make
|
||||
```
|
||||
@@ -0,0 +1,46 @@
|
||||
#include <stdlib.h>
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Winvalid-offsetof"
|
||||
#include "js/Exception.h"
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
#include "smw/abort.h"
|
||||
#include "smw/cx.h"
|
||||
#include "smw/dump.h"
|
||||
|
||||
namespace smw {
|
||||
|
||||
void abort(const char* msg) {
|
||||
abort(get_js_context(), msg);
|
||||
}
|
||||
|
||||
void abort(JSContext *cx, const char* msg) {
|
||||
fprintf(stderr, "Error: %s", msg);
|
||||
|
||||
if (JS_IsExceptionPending(cx)) {
|
||||
fprintf(stderr, ":");
|
||||
JS::ExceptionStack exception(cx);
|
||||
if (!JS::GetPendingExceptionStack(cx, &exception)) {
|
||||
fprintf(stderr, " failed to get pending exception value and stack\n");
|
||||
} else {
|
||||
fprintf(stderr, "\n exception value: ");
|
||||
if (!dump_value(cx, exception.exception(), stderr)) {
|
||||
fprintf(stderr, "<failed to dump value>");
|
||||
}
|
||||
fprintf(stderr, "\n exception stack:\n");
|
||||
if (!dump_stack(cx, exception.stack(), stderr)) {
|
||||
fprintf(stderr, "<failed to dump stack>\n");
|
||||
}
|
||||
}
|
||||
} else {
|
||||
fprintf(stderr, "\n");
|
||||
}
|
||||
|
||||
// TODO: check for unhandled promise rejections.
|
||||
|
||||
fflush(stderr);
|
||||
::abort();
|
||||
}
|
||||
|
||||
} // namespace smw
|
||||
@@ -0,0 +1,452 @@
|
||||
/*!
|
||||
* This module implements the intrinsics used by code emitted in the
|
||||
* `wit_bindgen_gen_spidermonkey::Bindgen` trait implementation.
|
||||
*/
|
||||
|
||||
#include <assert.h>
|
||||
#include <cmath>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "smw/abort.h"
|
||||
#include "smw/cx.h"
|
||||
#include "smw/logging.h"
|
||||
#include "smw/wasm.h"
|
||||
|
||||
#include "mozilla/UniquePtr.h"
|
||||
#include "jsapi.h"
|
||||
#include "js/Array.h"
|
||||
#include "js/Conversions.h"
|
||||
#include "js/ForOfIterator.h"
|
||||
#include "js/Modules.h"
|
||||
|
||||
#ifdef LOGGING
|
||||
#include "js/friend/DumpFunctions.h"
|
||||
#endif
|
||||
|
||||
namespace smw {
|
||||
|
||||
using UniqueChars = mozilla::UniquePtr<char[]>;
|
||||
|
||||
using PersistentRootedValueVector = JS::PersistentRooted<JS::GCVector<JS::Value>>;
|
||||
|
||||
// Used for general Wasm<-->JS conversions.
|
||||
static PersistentRootedValueVector* OPERANDS;
|
||||
|
||||
// Used for holding arguments to JS calls.
|
||||
static PersistentRootedValueVector* ARGS;
|
||||
|
||||
// Used for holding returns from Wasm calls.
|
||||
static PersistentRootedValueVector* RETS;
|
||||
|
||||
void init_operands(JSContext* cx) {
|
||||
assert(!OPERANDS && "OPERANDS must only be initialized once");
|
||||
OPERANDS = new PersistentRootedValueVector(cx, cx);
|
||||
if (!OPERANDS) {
|
||||
abort(cx, "failed to allocate OPERANDS");
|
||||
}
|
||||
|
||||
assert(!ARGS && "ARGS must only be initialized once");
|
||||
ARGS = new PersistentRootedValueVector(cx, cx);
|
||||
if (!ARGS) {
|
||||
abort(cx, "failed to allocate ARGS");
|
||||
}
|
||||
|
||||
assert(!RETS && "RETS must only be initialized once");
|
||||
RETS = new PersistentRootedValueVector(cx, cx);
|
||||
if (!RETS) {
|
||||
abort(cx, "failed to allocate RETS");
|
||||
}
|
||||
}
|
||||
|
||||
PersistentRootedValueVector& operands() {
|
||||
assert(OPERANDS && OPERANDS->initialized() && "OPERANDS must be initialized");
|
||||
return *OPERANDS;
|
||||
}
|
||||
|
||||
void save_operand(size_t dest, JS::HandleValue val) {
|
||||
#if LOGGING==1
|
||||
SMW_LOG("operands[%zu] = ", dest);
|
||||
js::DumpValue(val, stderr);
|
||||
#endif // LOGGING==1
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
|
||||
if (operands().length() <= dest) {
|
||||
size_t needed_capacity = 1 + dest - operands().length();
|
||||
if (!operands().reserve(needed_capacity)) {
|
||||
abort("failed to reserve capacity for the OPERANDS vector");
|
||||
}
|
||||
if (dest == operands().length()) {
|
||||
bool ok = operands().append(val);
|
||||
assert(ok && "already reserved space");
|
||||
return;
|
||||
}
|
||||
JS::RootedValue placeholder(cx, JS::UndefinedValue());
|
||||
for (size_t i = 0; i < needed_capacity; i++) {
|
||||
bool ok = operands().append(placeholder);
|
||||
assert(ok && "already reserved space");
|
||||
}
|
||||
}
|
||||
|
||||
operands()[dest].set(val);
|
||||
}
|
||||
|
||||
PersistentRootedValueVector& args() {
|
||||
assert(ARGS && ARGS->initialized() && "ARGS must be initialized");
|
||||
return *ARGS;
|
||||
}
|
||||
|
||||
PersistentRootedValueVector& rets() {
|
||||
assert(RETS && RETS->initialized() && "RETS must be initialized");
|
||||
return *RETS;
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void canonical_abi_free(void* ptr, size_t size, size_t align) {
|
||||
(void) size;
|
||||
(void) align;
|
||||
free(ptr);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void* canonical_abi_realloc(void* ptr, size_t old_size, size_t align, size_t new_size) {
|
||||
(void) old_size;
|
||||
(void) align;
|
||||
return realloc(ptr, new_size);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_fill_operands(unsigned argc, JS::Value* vp) {
|
||||
SMW_LOG("SMW_fill_operands(argc = %d, vp = %p)\n", argc, vp);
|
||||
|
||||
JS::CallArgs args = JS::CallArgsFromVp(argc, vp);
|
||||
|
||||
if (!operands().reserve(size_t(args.length()))) {
|
||||
abort(get_js_context(), "failed to reserve space in the operands vector");
|
||||
}
|
||||
for (unsigned i = 0; i < args.length(); i++) {
|
||||
#if LOGGING==1
|
||||
SMW_LOG("operands[%d] = ", i);
|
||||
js::DumpValue(args.get(i), stderr);
|
||||
#endif // LOGGING==1
|
||||
|
||||
bool ok = operands().append(args.get(i));
|
||||
assert(ok && "already reserved space");
|
||||
}
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_clear_operands() {
|
||||
SMW_LOG("SMW_clear_operands\n");
|
||||
operands().clear();
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_push_arg(size_t i) {
|
||||
SMW_LOG("SMW_push_arg(i = %zu)\n", i);
|
||||
if (!args().append(operands()[i])) {
|
||||
abort("failed to push arg");
|
||||
}
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_call(char *funcName, size_t funcNameLen, size_t numResults, size_t dest) {
|
||||
#ifdef LOGGING
|
||||
SMW_LOG("SMW_call(funcName = %p \"", funcName);
|
||||
for (size_t i = 0; i < funcNameLen; i++) {
|
||||
SMW_LOG("%c", funcName[i]);
|
||||
}
|
||||
SMW_LOG("\", funcNameLen = %zu, numResults = %zu, dest = %zu)\n",
|
||||
funcNameLen,
|
||||
numResults,
|
||||
dest);
|
||||
#endif
|
||||
|
||||
UniqueChars uniqFuncName(funcName);
|
||||
|
||||
JSContext *cx = get_js_context();
|
||||
|
||||
JS::RootedString funcNameAtom(cx, JS_AtomizeStringN(cx, uniqFuncName.get(), funcNameLen));
|
||||
if (!funcNameAtom) {
|
||||
abort(cx, "failed to atomize function name");
|
||||
}
|
||||
|
||||
JS::RootedObject module(cx, get_user_module());
|
||||
JS::RootedValue exportVal(cx);
|
||||
bool hasExport = false;
|
||||
if (!JS::GetModuleExport(cx, module, funcNameAtom, &exportVal, &hasExport)) {
|
||||
abort(cx, "failed to get module export");
|
||||
}
|
||||
if (!hasExport) {
|
||||
// TODO: include the export name in this message to help users debug
|
||||
// which export they're missing.
|
||||
abort(cx, "user module does not have the requested export");
|
||||
}
|
||||
|
||||
JS::RootedFunction exportFunc(cx, JS_ValueToFunction(cx, exportVal));
|
||||
if (!exportFunc) {
|
||||
// TODO: include the export name in this message.
|
||||
abort(cx, "exported value is not a function");
|
||||
}
|
||||
|
||||
// XXX: we have to copy ARGS into a `JS::RootedVector<JS::Value>` because
|
||||
// `JS::Call` takes a `JS::HandleValueArray` and you can't construct that
|
||||
// from a `JS::PersistentRooted<JS::GCVector<JS::Value>>`, only a
|
||||
// `JS::RootedVector<JS::Value>`. And we can't make `ARGS` a
|
||||
// `JS::RootedVector<JS::Value>` because it is a global, not an on-stack
|
||||
// RAII value as required by `JS::RootedVector<JS::Value>`. Gross!
|
||||
JS::RootedVector<JS::Value> argsVector(cx);
|
||||
if (!argsVector.reserve(args().length())) {
|
||||
abort(cx, "failed to reserve space for arguments vector");
|
||||
}
|
||||
for (size_t i = 0; i < args().length(); i++) {
|
||||
bool ok = argsVector.append(args()[i]);
|
||||
assert(ok && "already reserved space");
|
||||
}
|
||||
|
||||
JS::RootedObject thisObj(cx);
|
||||
JS::RootedValue result(cx);
|
||||
if (!JS::Call(cx, thisObj, exportFunc, argsVector, &result)) {
|
||||
// TODO: include the export name in this message.
|
||||
abort(cx, "calling export function failed");
|
||||
}
|
||||
|
||||
args().clear();
|
||||
|
||||
if (numResults == 0) {
|
||||
// Nothing to push onto the operands vector.
|
||||
} else if (numResults == 1) {
|
||||
save_operand(dest, result);
|
||||
} else {
|
||||
// Treat the "physical" return value as an iterator and unpack the
|
||||
// "logical" return values from within it. This allows JS to return
|
||||
// multiple WIT values as an array or any other iterable.
|
||||
JS::ForOfIterator iter(cx);
|
||||
if (!iter.init(result)) {
|
||||
// TODO: include the export name in this message.
|
||||
abort(cx, "failed to convert return value to iterable");
|
||||
}
|
||||
JS::RootedValue val(cx);
|
||||
bool done = false;
|
||||
for (size_t i = 0; i < numResults; i++) {
|
||||
if (done) {
|
||||
// TODO: include the export name in this message.
|
||||
abort(cx, "function's returned iterator did not yield enough return values");
|
||||
}
|
||||
if (!iter.next(&val, &done)) {
|
||||
// TODO: include the export name in this message.
|
||||
abort(cx, "failed to get the next value out of the return values iterator");
|
||||
}
|
||||
save_operand(dest + i, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_push_return_value(size_t i) {
|
||||
SMW_LOG("SMW_push_return_value(i = %zu)\n", i);
|
||||
if (!rets().append(operands()[i])) {
|
||||
abort(get_js_context(), "failed to push arg");
|
||||
}
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_finish_returns(unsigned argc, JS::Value* vp) {
|
||||
SMW_LOG("SMW_finish_returns(argc = %d, vp = %p)\n", argc, vp);
|
||||
|
||||
JS::CallArgs args = JS::CallArgsFromVp(argc, vp);
|
||||
switch (rets().length()) {
|
||||
case 0: {
|
||||
break;
|
||||
}
|
||||
case 1: {
|
||||
args.rval().set(rets().back());
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
JSContext* cx = get_js_context();
|
||||
JS::RootedVector<JS::Value> elems(cx);
|
||||
if (!elems.reserve(rets().length())) {
|
||||
abort(cx, "failed to reserve space for results vector");
|
||||
}
|
||||
for (size_t i = 0; i < rets().length(); i++) {
|
||||
bool ok = elems.append(rets()[i]);
|
||||
assert(ok && "already reserved space");
|
||||
}
|
||||
JS::RootedObject arr(cx, JS::NewArrayObject(cx, elems));
|
||||
if (!arr) {
|
||||
abort(cx, "failed to allocate array for function's return values");
|
||||
}
|
||||
args.rval().setObject(*arr.get());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
rets().clear();
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
uint32_t SMW_i32_from_u32(size_t i) {
|
||||
SMW_LOG("SMW_i32_from_u32(i = %zu)\n", i);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
JS::RootedValue val(cx, operands()[i]);
|
||||
double number = 0.0;
|
||||
if (!JS::ToNumber(cx, val, &number)) {
|
||||
abort(cx, "failed to convert value to number");
|
||||
}
|
||||
number = std::round(number);
|
||||
return uint32_t(number);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_u32_from_i32(uint32_t x, size_t dest) {
|
||||
SMW_LOG("SMW_u32_from_i32(x = %ull, dest = %zu)\n", x, dest);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
JS::RootedValue val(cx, JS::NumberValue(x));
|
||||
save_operand(dest, val);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_string_canon_lower(uint32_t* ret_ptr, size_t i) {
|
||||
SMW_LOG("SMW_string_canon_lower(ret_ptr = %p, i = %zu)\n", ret_ptr, i);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
JS::RootedValue strVal(cx, operands()[i]);
|
||||
if (!strVal.isString()) {
|
||||
abort(cx, "value is not a string");
|
||||
}
|
||||
JS::RootedString str(cx, strVal.toString());
|
||||
JS::Rooted<JSLinearString*> linearStr(cx, JS_EnsureLinearString(cx, str));
|
||||
if (!linearStr) {
|
||||
abort(cx, "failed to linearize JS string");
|
||||
}
|
||||
|
||||
size_t len = JS::GetDeflatedUTF8StringLength(linearStr);
|
||||
char* ptr = static_cast<char*>(malloc(len));
|
||||
if (!ptr) {
|
||||
abort(cx, "out of memory");
|
||||
}
|
||||
|
||||
size_t num_written = JS::DeflateStringToUTF8Buffer(linearStr, mozilla::Span(ptr, len));
|
||||
assert(num_written == len);
|
||||
|
||||
ret_ptr[0] = reinterpret_cast<uint32_t>(ptr);
|
||||
ret_ptr[1] = static_cast<uint32_t>(len);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_string_canon_lift(char* ptr, size_t len, size_t dest) {
|
||||
SMW_LOG("SMW_string_canon_lift(ptr = %p, len = %zu, dest = %zu)\n", ptr, len, dest);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
JS::RootedString str(cx, JS_NewStringCopyUTF8N(cx, JS::UTF8Chars(ptr, len)));
|
||||
if (!str) {
|
||||
abort(cx, "failed to create JS string from UTF-8 buffer");
|
||||
}
|
||||
JS::RootedValue strVal(cx, JS::StringValue(str));
|
||||
save_operand(dest, strVal);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
uint32_t SMW_spread_into_array(size_t i) {
|
||||
SMW_LOG("SMW_spread_into_array; i = %zu\n", i);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
|
||||
JS::RootedValue iterable(cx, operands()[i]);
|
||||
bool is_array = false;
|
||||
if (!JS::IsArrayObject(cx, iterable, &is_array)) {
|
||||
abort(cx, "failed to check if object is an array");
|
||||
}
|
||||
|
||||
if (is_array) {
|
||||
JS::RootedObject arr(cx, &iterable.toObject());
|
||||
uint32_t length = 0;
|
||||
if (!JS::GetArrayLength(cx, arr, &length)) {
|
||||
abort(cx, "failed to get array length");
|
||||
}
|
||||
return length;
|
||||
}
|
||||
|
||||
JS::RootedVector<JS::Value> elems(cx);
|
||||
JS::ForOfIterator iter(cx);
|
||||
if (!iter.init(iterable)) {
|
||||
abort(cx, "failed to convert operand value to iterable");
|
||||
}
|
||||
JS::RootedValue val(cx);
|
||||
bool done = false;
|
||||
while (!done) {
|
||||
if (!iter.next(&val, &done)) {
|
||||
abort(cx, "failed to get the next value out of iterator");
|
||||
}
|
||||
if (done) {
|
||||
break;
|
||||
}
|
||||
if (!elems.append(val)) {
|
||||
abort(cx, "failed to append value to vector");
|
||||
}
|
||||
}
|
||||
|
||||
JS::RootedObject arr(cx, JS::NewArrayObject(cx, elems));
|
||||
if (!arr) {
|
||||
abort(cx, "failed to allocate JS array object");
|
||||
}
|
||||
operands()[i].setObject(*arr);
|
||||
|
||||
return elems.length();
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_get_array_element(size_t array, size_t index, size_t dest) {
|
||||
SMW_LOG("SMW_get_array_element(array = %zu, index = %zu, dest = %zu)\n", array, index, dest);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
|
||||
JS::RootedValue array_val(cx, operands()[array]);
|
||||
assert(array_val.isObject());
|
||||
JS::RootedObject array_obj(cx, &array_val.toObject());
|
||||
JS::RootedValue elem(cx);
|
||||
if (!JS_GetElement(cx, array_obj, index, &elem)) {
|
||||
abort(cx, "failed to get array element");
|
||||
}
|
||||
|
||||
save_operand(dest, elem);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_new_array(size_t dest) {
|
||||
SMW_LOG("SMW_new_array(dest = %zu)\n", dest);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
JS::RootedObject arr(cx, JS::NewArrayObject(cx, 0));
|
||||
if (!arr) {
|
||||
abort(cx, "failed to allocate a new JS array object");
|
||||
}
|
||||
JS::RootedValue arr_val(cx, JS::ObjectValue(*arr));
|
||||
save_operand(dest, arr_val);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_array_push(size_t array, size_t elem) {
|
||||
SMW_LOG("SMW_array_push(array = %zu, elem = %zu)\n", array, elem);
|
||||
|
||||
JSContext* cx = get_js_context();
|
||||
|
||||
JS::RootedValue array_val(cx, operands()[array]);
|
||||
assert(array_val.isObject());
|
||||
JS::RootedObject array_obj(cx, &array_val.toObject());
|
||||
|
||||
uint32_t length = 0;
|
||||
if (!JS::GetArrayLength(cx, array_obj, &length)) {
|
||||
abort(cx, "failed to get JS array object length");
|
||||
}
|
||||
|
||||
JS::RootedValue elem_val(cx, operands()[elem]);
|
||||
if (!JS_SetElement(cx, array_obj, length, elem_val)) {
|
||||
abort(cx, "failed to set JS array element");
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace smw
|
||||
@@ -0,0 +1,33 @@
|
||||
#include <assert.h>
|
||||
|
||||
#include "smw/cx.h"
|
||||
|
||||
#include "jsapi.h"
|
||||
|
||||
namespace smw {
|
||||
|
||||
static JSContext* CONTEXT = nullptr;
|
||||
|
||||
void init_js_context(JSContext *cx) {
|
||||
assert(!CONTEXT && "CONTEXT should only be initialized once");
|
||||
CONTEXT = cx;
|
||||
}
|
||||
|
||||
JSContext *get_js_context() {
|
||||
assert(CONTEXT && "CONTEXT should be initialized");
|
||||
return CONTEXT;
|
||||
}
|
||||
|
||||
static JS::PersistentRooted<JSObject*> USER_MODULE;
|
||||
|
||||
void init_user_module(JSContext* cx, JSObject* user_module) {
|
||||
assert(!USER_MODULE && "USER_MODULE should only be initialized once");
|
||||
USER_MODULE.init(cx, user_module);
|
||||
}
|
||||
|
||||
JSObject* get_user_module() {
|
||||
assert(USER_MODULE && "USER_MODULE should be initialized");
|
||||
return USER_MODULE;
|
||||
}
|
||||
|
||||
} // namespace smw
|
||||
@@ -0,0 +1,48 @@
|
||||
#include "smw/dump.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include "jsapi.h"
|
||||
#include "smw/wasm.h"
|
||||
|
||||
namespace smw {
|
||||
|
||||
static JS::UniqueChars stringify_value(JSContext *cx, JS::HandleValue val) {
|
||||
JS::RootedString str(cx, JS_ValueToSource(cx, val));
|
||||
if (!str) {
|
||||
return nullptr;
|
||||
}
|
||||
return JS_EncodeStringToUTF8(cx, str);
|
||||
}
|
||||
|
||||
bool dump_value(JSContext *cx, JS::HandleValue val, FILE* fp) {
|
||||
JS::UniqueChars str = stringify_value(cx, val);
|
||||
if (!str) {
|
||||
return false;
|
||||
}
|
||||
fprintf(fp, "%s\n", str.get());
|
||||
return true;
|
||||
}
|
||||
|
||||
bool dump_stack(JSContext *cx, JS::HandleObject stack, FILE* fp) {
|
||||
JS::RootedString str(cx);
|
||||
size_t indent = 4;
|
||||
if (!JS::BuildStackString(cx, nullptr, stack, &str, indent)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
JS::UniqueChars utf8 = JS_EncodeStringToUTF8(cx, str);
|
||||
if (!utf8) {
|
||||
return false;
|
||||
}
|
||||
|
||||
fprintf(fp, "%s\n", utf8.get());
|
||||
return true;
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
int32_t dump_i32(int32_t x) {
|
||||
fprintf(stderr, "dump_i32: %d\n", x);
|
||||
return x;
|
||||
}
|
||||
|
||||
} // namespace smw
|
||||
@@ -0,0 +1,16 @@
|
||||
#ifndef _smw_abort_h
|
||||
#define _smw_abort_h
|
||||
|
||||
struct JSContext;
|
||||
|
||||
namespace smw {
|
||||
|
||||
/**
|
||||
* Print the given error message and abort.
|
||||
*/
|
||||
void abort(const char* msg);
|
||||
void abort(JSContext *cx, const char* msg);
|
||||
|
||||
}
|
||||
|
||||
#endif // _smw_abort_h
|
||||
@@ -0,0 +1,12 @@
|
||||
#ifndef _smw_bindgen_h
|
||||
#define _smw_bindgen_h
|
||||
|
||||
struct JSContext;
|
||||
|
||||
namespace smw {
|
||||
|
||||
void init_operands(JSContext* cx);
|
||||
|
||||
}
|
||||
|
||||
#endif // _smw_bindgen_h
|
||||
@@ -0,0 +1,17 @@
|
||||
#ifndef _smw_cx_h
|
||||
#define _smw_cx_h
|
||||
|
||||
struct JSContext;
|
||||
class JSObject;
|
||||
|
||||
namespace smw {
|
||||
|
||||
void init_js_context(JSContext* cx);
|
||||
JSContext* get_js_context();
|
||||
|
||||
void init_user_module(JSContext* cx, JSObject* user_module);
|
||||
JSObject* get_user_module();
|
||||
|
||||
}
|
||||
|
||||
#endif // _smw_cx_h
|
||||
@@ -0,0 +1,27 @@
|
||||
#ifndef _smw_dump_h
|
||||
#define _smw_dump_h
|
||||
|
||||
#pragma clang diagnostic push
|
||||
#pragma clang diagnostic ignored "-Winvalid-offsetof"
|
||||
|
||||
#include "js/TypeDecls.h"
|
||||
#include "js/Value.h"
|
||||
|
||||
#pragma clang diagnostic pop
|
||||
|
||||
namespace smw {
|
||||
|
||||
/**
|
||||
* Dump a human-readable representation of the given JS value to the given file.
|
||||
*/
|
||||
bool dump_value(JSContext *cx, JS::HandleValue val, FILE* fp);
|
||||
|
||||
/**
|
||||
* Dump a human-readable representation of the given JS exception stack to the
|
||||
* given file.
|
||||
*/
|
||||
bool dump_stack(JSContext *cx, JS::HandleObject stack, FILE* fp);
|
||||
|
||||
}
|
||||
|
||||
#endif // _smw_dump_h
|
||||
@@ -0,0 +1,15 @@
|
||||
#ifndef _smw_logging_h
|
||||
#define _smw_logging_h
|
||||
|
||||
#if LOGGING==1
|
||||
|
||||
#include <stdio.h>
|
||||
#define SMW_LOG(msg, ...) fprintf(stderr, msg, ##__VA_ARGS__)
|
||||
|
||||
#else // LOGGING==1
|
||||
|
||||
#define SMW_LOG(msg, ...) do { } while(false)
|
||||
|
||||
#endif // LOGGING==1
|
||||
|
||||
#endif // _smw_logging_h
|
||||
@@ -0,0 +1,18 @@
|
||||
#ifndef _smw_wasm_h
|
||||
#define _smw_wasm_h
|
||||
|
||||
/**
|
||||
* An attribute for making a function exported from the final Wasm binary.
|
||||
*
|
||||
* Example usage:
|
||||
*
|
||||
* WASM_EXPORT
|
||||
* int add(int a, int b) {
|
||||
* return a + b;
|
||||
* }
|
||||
*/
|
||||
#define WASM_EXPORT \
|
||||
__attribute__((visibility("default"))) \
|
||||
extern "C"
|
||||
|
||||
#endif // _smw_wasm_h
|
||||
@@ -0,0 +1,380 @@
|
||||
/*!
|
||||
* JS engine initialization and JS top-level evaluation.
|
||||
*
|
||||
* This file contains the code to start up the JS engine, define import-able
|
||||
* modules from VM functions, and evaluate the user JS.
|
||||
*/
|
||||
|
||||
#include <assert.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "smw/abort.h"
|
||||
#include "smw/bindgen.h"
|
||||
#include "smw/cx.h"
|
||||
#include "smw/wasm.h"
|
||||
|
||||
#include "js/AllocPolicy.h"
|
||||
#include "js/CompilationAndEvaluation.h"
|
||||
#include "js/GCAPI.h"
|
||||
#include "js/GCVector.h"
|
||||
#include "js/Initialization.h"
|
||||
#include "js/Modules.h"
|
||||
#include "js/Promise.h"
|
||||
#include "js/Realm.h"
|
||||
#include "js/SourceText.h"
|
||||
#include "js/TypeDecls.h"
|
||||
#include "js/Warnings.h"
|
||||
#include "jsapi.h"
|
||||
#include "jsfriendapi.h"
|
||||
|
||||
namespace smw {
|
||||
|
||||
using UniqueChars = mozilla::UniquePtr<char[]>;
|
||||
|
||||
bool INITIALIZED = false;
|
||||
JS::PersistentRootedObject GLOBAL;
|
||||
|
||||
static JSClass global_class = {
|
||||
"global",
|
||||
JSCLASS_GLOBAL_FLAGS,
|
||||
&JS::DefaultGlobalClassOps
|
||||
};
|
||||
|
||||
/**
|
||||
* Compile the given JS source as a module in the context of the given global.
|
||||
*
|
||||
* Takes ownership of `jsSource`.
|
||||
*
|
||||
* Does not take ownership of `jsFileName`.
|
||||
*
|
||||
* Sets `outModule` to the resulting source text module record object.
|
||||
*/
|
||||
bool compile_js_module(JSContext *cx,
|
||||
const char *jsFileName,
|
||||
char *jsSource,
|
||||
size_t jsSourceLen,
|
||||
JS::MutableHandleObject outModule) {
|
||||
JS::CompileOptions copts(cx);
|
||||
copts
|
||||
.setFileAndLine(jsFileName, 1)
|
||||
.setNoScriptRval(true)
|
||||
.setForceFullParse();
|
||||
|
||||
JS::SourceText<mozilla::Utf8Unit> srcBuf;
|
||||
if (!srcBuf.init(cx, jsSource, jsSourceLen, JS::SourceOwnership::TakeOwnership)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
JS::RootedObject module(cx);
|
||||
|
||||
// Disabling generational GC during compilation seems to slightly reduce
|
||||
// the number of pages touched post-wizening. (Whereas disabling it
|
||||
// during execution meaningfully increases it, which is why this is
|
||||
// scoped to just compilation.)
|
||||
JS::AutoDisableGenerationalGC noGgc(cx);
|
||||
module = JS::CompileModule(cx, copts, srcBuf);
|
||||
if (!module) {
|
||||
return false;
|
||||
}
|
||||
|
||||
outModule.set(module);
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* A synthesized module that exports `JSNative` functions.
|
||||
*/
|
||||
struct SynthesizedModule {
|
||||
JS::Heap<JSString*> moduleName;
|
||||
JS::Heap<JSObject*> moduleObject;
|
||||
|
||||
SynthesizedModule(JS::HandleString moduleName, JS::HandleObject moduleObject)
|
||||
: moduleName(moduleName)
|
||||
, moduleObject(moduleObject)
|
||||
{ }
|
||||
|
||||
void trace(JSTracer* tracer) {
|
||||
JS::TraceEdge(tracer, &moduleObject, "SynthesizedModule.moduleObject");
|
||||
}
|
||||
};
|
||||
|
||||
JS::PersistentRooted<JS::GCVector<SynthesizedModule, 0, js::SystemAllocPolicy>> MODULES;
|
||||
|
||||
JSObject* module_resolve_hook(JSContext *cx,
|
||||
JS::HandleValue referencing_private,
|
||||
JS::HandleObject module_request) {
|
||||
JS::RootedString specifier(cx, JS::GetModuleRequestSpecifier(cx, module_request));
|
||||
if (!specifier) {
|
||||
abort(cx, "failed to get module request specifier");
|
||||
}
|
||||
|
||||
size_t len = MODULES.length();
|
||||
for (size_t i = 0; i < len; i++) {
|
||||
JS::RootedObject it_module(cx, MODULES[i].get().moduleObject);
|
||||
JS::RootedString it_name(cx, MODULES[i].get().moduleName);
|
||||
int32_t result = 0;
|
||||
if (!JS_CompareStrings(cx, it_name, specifier, &result)) {
|
||||
abort(cx, "failed to compare module specifier to registered module name");
|
||||
}
|
||||
if (result == 0) {
|
||||
return it_module.get();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
JS::UniqueChars utf8 = JS_EncodeStringToUTF8(cx, specifier);
|
||||
if (!utf8) {
|
||||
JS_ReportErrorASCII(cx, "failed to find module import");
|
||||
return nullptr;
|
||||
}
|
||||
JS_ReportErrorASCII(cx, "failed to find module import: `%s`", utf8.get());
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
JS::RealmOptions make_realm_options() {
|
||||
JS::RealmOptions options;
|
||||
options
|
||||
.creationOptions()
|
||||
.setStreamsEnabled(true)
|
||||
.setReadableByteStreamsEnabled(true)
|
||||
.setBYOBStreamReadersEnabled(true)
|
||||
.setReadableStreamPipeToEnabled(true)
|
||||
.setWritableStreamsEnabled(true)
|
||||
.setIteratorHelpersEnabled(true)
|
||||
.setWeakRefsEnabled(JS::WeakRefSpecifier::EnabledWithoutCleanupSome);
|
||||
return options;
|
||||
}
|
||||
|
||||
bool init_js(JSContext *cx) {
|
||||
if (!js::UseInternalJobQueues(cx)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!JS::InitSelfHostedCode(cx)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
JS::RealmOptions options = make_realm_options();
|
||||
|
||||
JS::DisableIncrementalGC(cx);
|
||||
|
||||
JS::RootedObject global(cx, JS_NewGlobalObject(cx, &global_class, nullptr,
|
||||
JS::FireOnNewGlobalHook, options));
|
||||
if (!global) {
|
||||
return false;
|
||||
}
|
||||
|
||||
JS::EnterRealm(cx, global);
|
||||
|
||||
if (!JS::InitRealmStandardClasses(cx)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// JS::SetPromiseRejectionTrackerCallback(cx, rejection_tracker);
|
||||
|
||||
JS::SetModuleResolveHook(JS_GetRuntime(cx), module_resolve_hook);
|
||||
|
||||
GLOBAL.init(cx, global);
|
||||
return true;
|
||||
}
|
||||
|
||||
// static void report_warning(JSContext *cx, JSErrorReport *report) {
|
||||
// JS::PrintError(stderr, report, true);
|
||||
// if (!report->isWarning()) {
|
||||
// ::abort();
|
||||
// }
|
||||
// }
|
||||
|
||||
/**
|
||||
* Initialize the JS engine and evaluate the top-level of the given JavaScript
|
||||
* source.
|
||||
*
|
||||
* Takes ownership of its parameters.
|
||||
*/
|
||||
WASM_EXPORT
|
||||
void SMW_initialize_engine() {
|
||||
assert(!INITIALIZED);
|
||||
|
||||
bool ok = true;
|
||||
|
||||
ok = JS_Init();
|
||||
assert(ok && "JS_Init failed");
|
||||
|
||||
JSContext *cx = JS_NewContext(JS::DefaultHeapMaxBytes);
|
||||
assert(cx != nullptr && "JS_NewContext failed");
|
||||
init_js_context(cx);
|
||||
|
||||
// JS::SetWarningReporter(cx, report_warning);
|
||||
|
||||
if (!init_js(cx)) {
|
||||
abort(cx, "initializing the JavaScript engine failed");
|
||||
}
|
||||
|
||||
init_operands(cx);
|
||||
|
||||
MODULES.init(cx);
|
||||
INITIALIZED = true;
|
||||
}
|
||||
|
||||
class ModuleBuilder {
|
||||
JS::PersistentRootedString moduleName;
|
||||
JS::PersistentRooted<JS::IdValueVector> exports;
|
||||
|
||||
public:
|
||||
/**
|
||||
* Construct a new `ModuleBuilder` and take ownership of `moduleName`.
|
||||
*/
|
||||
ModuleBuilder(JSContext *cx, JS::HandleString moduleName)
|
||||
: moduleName(cx, moduleName)
|
||||
, exports(cx, cx)
|
||||
{
|
||||
assert(moduleName && "moduleName must not be nullptr");
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an exported function to this module and take ownership of `funcName`.
|
||||
*/
|
||||
void add_export(const char *funcName, size_t funcNameLen, JSNative func, unsigned numArgs) {
|
||||
assert(funcName && "function name must not be nullptr");
|
||||
assert(funcNameLen > 0 && "function name length must be greater than zero");
|
||||
assert(func && "the function must not be nullptr");
|
||||
|
||||
JSContext *cx = get_js_context();
|
||||
|
||||
JS::RootedString jsFuncName(cx, JS_NewStringCopyN(cx, funcName, funcNameLen));
|
||||
if (!jsFuncName) {
|
||||
abort(cx, "failed to create new JS string");
|
||||
}
|
||||
|
||||
JS::RootedId funcNameId(cx);
|
||||
if (!JS_StringToId(cx, jsFuncName, &funcNameId)) {
|
||||
abort(cx, "failed to convert string to id");
|
||||
}
|
||||
|
||||
JS::RootedFunction jsFunc(cx, JS_NewFunction(cx, func, numArgs, 0, funcName));
|
||||
if (!jsFunc) {
|
||||
abort(cx, "failed to create new JS function");
|
||||
}
|
||||
|
||||
JS::RootedObject jsFuncObj(cx, JS_GetFunctionObject(jsFunc));
|
||||
assert(jsFuncObj && "getting function object is infallible");
|
||||
JS::RootedValue jsFuncVal(cx, JS::ObjectValue(*jsFuncObj));
|
||||
|
||||
if (!exports.append(JS::IdValuePair(funcNameId, jsFuncVal))) {
|
||||
abort(cx, "failed to append export to exports list");
|
||||
}
|
||||
}
|
||||
|
||||
void finish() {
|
||||
JSContext *cx = get_js_context();
|
||||
|
||||
JS::RootedObject module(cx, JS::CreateModule(cx, exports));
|
||||
if (!module) {
|
||||
abort(cx, "failed to create synthetic module");
|
||||
}
|
||||
|
||||
if (!MODULES.append(SynthesizedModule(moduleName, module))) {
|
||||
abort(cx, "failed to append to MODULES");
|
||||
}
|
||||
|
||||
delete this;
|
||||
}
|
||||
};
|
||||
|
||||
WASM_EXPORT
|
||||
ModuleBuilder *SMW_new_module_builder(char *module_name, size_t module_name_len) {
|
||||
auto unique_module_name = UniqueChars(module_name);
|
||||
|
||||
JSContext *cx = get_js_context();
|
||||
|
||||
JS::RootedString js_module_name(cx, JS_NewStringCopyN(cx, unique_module_name.get(), module_name_len));
|
||||
if (!js_module_name) {
|
||||
abort(cx, "failed to allocate JS string");
|
||||
}
|
||||
|
||||
auto b = new ModuleBuilder(cx, js_module_name);
|
||||
if (!b) {
|
||||
abort(cx, "failed to create new ModuleBuilder");
|
||||
}
|
||||
|
||||
return b;
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_module_builder_add_export(ModuleBuilder *builder,
|
||||
char *funcName,
|
||||
size_t funcNameLen,
|
||||
JSNative func,
|
||||
unsigned numArgs) {
|
||||
assert(builder && "builder must not be nullptr");
|
||||
assert(funcName && "funcName must not be nullptr");
|
||||
assert(funcNameLen > 0 && "funcNameLen must be greater than 0");
|
||||
assert(func && "func must not be nullptr");
|
||||
|
||||
auto uniqFuncName = UniqueChars(funcName);
|
||||
builder->add_export(uniqFuncName.get(), funcNameLen, func, numArgs);
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_finish_module_builder(ModuleBuilder *builder) {
|
||||
builder->finish();
|
||||
}
|
||||
|
||||
WASM_EXPORT
|
||||
void SMW_eval_module(char *jsFileName, char *jsSource, size_t jsSourceLen) {
|
||||
JSContext *cx = get_js_context();
|
||||
|
||||
assert(GLOBAL && "GLOBAL should be initialized");
|
||||
JS::RootedObject global(cx, GLOBAL);
|
||||
JSAutoRealm autoRealm(cx, global);
|
||||
|
||||
JS::RootedObject module(cx);
|
||||
if (!compile_js_module(cx, jsFileName, jsSource, jsSourceLen, &module)) {
|
||||
abort(cx, "module compilation failed");
|
||||
}
|
||||
|
||||
if (!JS::ModuleInstantiate(cx, module)) {
|
||||
abort(cx, "failed to instantiate module");
|
||||
}
|
||||
|
||||
JS::RootedValue result(cx);
|
||||
if (!JS::ModuleEvaluate(cx, module, &result)) {
|
||||
abort(cx, "failed to evaluate module");
|
||||
}
|
||||
|
||||
// TODO: if `result` is a promise because of top-level await, then don't
|
||||
// return until the micro task queue is empty.
|
||||
if (result.isObject()) {
|
||||
JS::RootedObject resultObj(cx, &result.toObject());
|
||||
if (!JS::IsPromiseObject(resultObj)) {
|
||||
goto done_handling_promise;
|
||||
}
|
||||
switch (JS::GetPromiseState(resultObj)) {
|
||||
case JS::PromiseState::Fulfilled: {
|
||||
JS::RootedValue promiseResolution(cx, JS::GetPromiseResult(resultObj));
|
||||
break;
|
||||
}
|
||||
case JS::PromiseState::Rejected: {
|
||||
JS::RootedValue promiseRejection(cx, JS::GetPromiseResult(resultObj));
|
||||
JS_SetPendingException(cx, promiseRejection);
|
||||
abort(cx, "module evaluation failed");
|
||||
}
|
||||
case JS::PromiseState::Pending: {
|
||||
abort(cx, "module evaluation returned a pending promise, but top-level await isn't enabled yet");
|
||||
}
|
||||
default:
|
||||
abort(cx, "module evaluation returned a promise in an unknown promise state");
|
||||
}
|
||||
}
|
||||
|
||||
done_handling_promise:
|
||||
|
||||
init_user_module(cx, module);
|
||||
|
||||
JS::PrepareForFullGC(cx);
|
||||
JS::NonIncrementalGC(cx, JS::GCOptions::Shrink, JS::GCReason::API);
|
||||
|
||||
free(jsFileName);
|
||||
}
|
||||
|
||||
} // namespace smw
|
||||
@@ -0,0 +1,18 @@
|
||||
#include <stdint.h>
|
||||
#include <stdlib.h>
|
||||
|
||||
#include "smw/abort.h"
|
||||
#include "smw/wasm.h"
|
||||
|
||||
namespace smw {
|
||||
|
||||
WASM_EXPORT
|
||||
void* SMW_malloc(size_t size) {
|
||||
auto p = malloc(size);
|
||||
if (p == nullptr) {
|
||||
abort("out of memory");
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
} // namespace smw
|
||||
@@ -0,0 +1,27 @@
|
||||
WASI_SDK="$(pwd)/../wasi-sdk-12.0"
|
||||
|
||||
CC="$WASI_SDK/bin/clang --sysroot=$WASI_SDK/share/wasi-sysroot"
|
||||
CXX="$WASI_SDK/bin/clang++ --sysroot=$WASI_SDK/share/wasi-sysroot"
|
||||
AR="$WASI_SDK/bin/ar"
|
||||
|
||||
HOST_CC=gcc
|
||||
HOST_CXX=g++
|
||||
|
||||
ac_add_options --enable-application=js
|
||||
ac_add_options --target=wasm32-unknown-wasi
|
||||
|
||||
ac_add_options --without-system-zlib
|
||||
ac_add_options --without-intl-api
|
||||
|
||||
ac_add_options --disable-jit
|
||||
ac_add_options --disable-shared-js
|
||||
ac_add_options --disable-shared-memory
|
||||
|
||||
# Comment out `--enable-optimize` and `--disable-debug` and then uncomment
|
||||
# `--enable-debug` to switch to debug builds of SpiderMonkey. Also update
|
||||
# `DEFINE=` in the `Makefile`.
|
||||
ac_add_options --enable-optimize
|
||||
ac_add_options --disable-debug
|
||||
# ac_add_options --enable-debug
|
||||
|
||||
mk_add_options AUTOCLOBBER=1
|
||||
@@ -0,0 +1,72 @@
|
||||
//! Assigning static locations for data segments we will emit in the glue Wasm
|
||||
//! module.
|
||||
|
||||
use std::convert::TryFrom;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DataSegments {
|
||||
data: wasm_encoder::DataSection,
|
||||
next_offset: u32,
|
||||
memory: u32,
|
||||
}
|
||||
|
||||
impl DataSegments {
|
||||
/// Create a new collection of data segments for the given memory.
|
||||
pub fn new(memory: u32) -> DataSegments {
|
||||
DataSegments {
|
||||
data: wasm_encoder::DataSection::new(),
|
||||
next_offset: 0,
|
||||
memory,
|
||||
}
|
||||
}
|
||||
|
||||
/// Add a new segment to this `DataSegments`, returning the assigned offset
|
||||
/// in memory.
|
||||
pub fn add<S>(&mut self, segment: S) -> u32
|
||||
where
|
||||
S: IntoIterator<Item = u8>,
|
||||
S::IntoIter: ExactSizeIterator,
|
||||
{
|
||||
let segment = segment.into_iter();
|
||||
let offset = self.reserve_space(u32::try_from(segment.len()).unwrap());
|
||||
self.data.active(
|
||||
self.memory,
|
||||
&wasm_encoder::Instruction::I32Const(offset as i32),
|
||||
segment,
|
||||
);
|
||||
offset
|
||||
}
|
||||
|
||||
/// Reserve space in memory but don't emit any data segment to initialize
|
||||
/// it.
|
||||
///
|
||||
/// This effectively lets you add zero-initialized data segments, reserve
|
||||
/// space for return pointer areas, or define shadow stack regions.
|
||||
pub fn reserve_space(&mut self, num_bytes: u32) -> u32 {
|
||||
// Leave an empty byte between each data segment. This helps when
|
||||
// staring at disassemblies and heap dumps.
|
||||
self.next_offset += 1;
|
||||
|
||||
let offset = self.next_offset;
|
||||
self.next_offset += num_bytes;
|
||||
|
||||
offset
|
||||
}
|
||||
|
||||
/// Get the memory type required to hold these data segments.
|
||||
pub fn memory_type(&self) -> wasm_encoder::MemoryType {
|
||||
const WASM_PAGE_SIZE: u32 = 65_536;
|
||||
wasm_encoder::MemoryType {
|
||||
minimum: ((self.next_offset + WASM_PAGE_SIZE - 1) / WASM_PAGE_SIZE).into(),
|
||||
maximum: None,
|
||||
memory64: false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Take the constructed data section.
|
||||
///
|
||||
/// No more data segments should be added after this is called.
|
||||
pub fn take_data(&mut self) -> wasm_encoder::DataSection {
|
||||
std::mem::replace(&mut self.data, wasm_encoder::DataSection::new())
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,31 @@
|
||||
use std::path::Path;
|
||||
|
||||
mod imports {
|
||||
test_helpers::codegen_spidermonkey_import!(
|
||||
// TODO: should support more of the `*.wit` test suite
|
||||
"strings.wit"
|
||||
"simple-lists.wit"
|
||||
"simple-functions.wit"
|
||||
);
|
||||
}
|
||||
|
||||
mod exports {
|
||||
test_helpers::codegen_spidermonkey_export!(
|
||||
// TODO: should support more of the `*.wit` test suite
|
||||
"strings.wit"
|
||||
"simple-lists.wit"
|
||||
"simple-functions.wit"
|
||||
);
|
||||
}
|
||||
|
||||
fn verify(dir: &str, _name: &str) {
|
||||
let wasm = std::fs::read(Path::new(dir).join("foo.wasm")).unwrap();
|
||||
let mut validator = wasmparser::Validator::new();
|
||||
validator.wasm_features(wasmparser::WasmFeatures {
|
||||
bulk_memory: true,
|
||||
module_linking: true,
|
||||
multi_memory: true,
|
||||
..wasmparser::WasmFeatures::default()
|
||||
});
|
||||
validator.validate_all(&wasm).expect("wasm isn't valid");
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-wasmtime-py"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
wit-bindgen-gen-core = { path = '../gen-core', version = '0.1.0' }
|
||||
heck = "0.3"
|
||||
structopt = { version = "0.3", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
test-helpers = { path = '../test-helpers', features = ['wit-bindgen-gen-wasmtime-py'] }
|
||||
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
// this build script is currently only here so OUT_DIR is set for testing.
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
[mypy]
|
||||
|
||||
disallow_any_unimported = True
|
||||
|
||||
disallow_untyped_calls = True
|
||||
disallow_untyped_defs = True
|
||||
disallow_incomplete_defs = True
|
||||
check_untyped_defs = True
|
||||
disallow_untyped_decorators = True
|
||||
strict_optional = True
|
||||
|
||||
warn_return_any = True
|
||||
warn_unused_configs = True
|
||||
@@ -0,0 +1,368 @@
|
||||
use crate::Source;
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
/// Tracks all of the import and intrinsics that a given codegen
|
||||
/// requires and how to generate them when needed.
|
||||
#[derive(Default)]
|
||||
pub struct Dependencies {
|
||||
pub needs_clamp: bool,
|
||||
pub needs_store: bool,
|
||||
pub needs_load: bool,
|
||||
pub needs_validate_guest_char: bool,
|
||||
pub needs_expected: bool,
|
||||
pub needs_i32_to_f32: bool,
|
||||
pub needs_f32_to_i32: bool,
|
||||
pub needs_i64_to_f64: bool,
|
||||
pub needs_f64_to_i64: bool,
|
||||
pub needs_decode_utf8: bool,
|
||||
pub needs_encode_utf8: bool,
|
||||
pub needs_list_canon_lift: bool,
|
||||
pub needs_list_canon_lower: bool,
|
||||
pub needs_t_typevar: bool,
|
||||
pub needs_resources: bool,
|
||||
pub pyimports: BTreeMap<String, Option<BTreeSet<String>>>,
|
||||
}
|
||||
|
||||
impl Dependencies {
|
||||
/// Record that a Python import is required
|
||||
///
|
||||
/// Examples
|
||||
/// ```
|
||||
/// # use wit_bindgen_gen_wasmtime_py::dependencies::Dependencies;
|
||||
/// # let mut deps = Dependencies::default();
|
||||
/// // Import a specific item from a module
|
||||
/// deps.pyimport("typing", "NamedTuple");
|
||||
/// // Import an entire module
|
||||
/// deps.pyimport("collections", None);
|
||||
/// ```
|
||||
pub fn pyimport<'a>(&mut self, module: &str, name: impl Into<Option<&'a str>>) {
|
||||
let name = name.into();
|
||||
let list = self
|
||||
.pyimports
|
||||
.entry(module.to_string())
|
||||
.or_insert(match name {
|
||||
Some(_) => Some(BTreeSet::new()),
|
||||
None => None,
|
||||
});
|
||||
match name {
|
||||
Some(name) => {
|
||||
assert!(list.is_some());
|
||||
list.as_mut().unwrap().insert(name.to_string());
|
||||
}
|
||||
None => assert!(list.is_none()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a `Source` containing all of the intrinsics
|
||||
/// required according to this `Dependencies` struct.
|
||||
pub fn intrinsics(&mut self) -> Source {
|
||||
let mut src = Source::default();
|
||||
|
||||
if self.needs_clamp {
|
||||
src.push_str(
|
||||
"
|
||||
def _clamp(i: int, min: int, max: int) -> int:
|
||||
if i < min or i > max:
|
||||
raise OverflowError(f'must be between {min} and {max}')
|
||||
return i
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_store {
|
||||
// TODO: this uses native endianness
|
||||
self.pyimport("ctypes", None);
|
||||
src.push_str(
|
||||
"
|
||||
def _store(ty: Any, mem: wasmtime.Memory, store: wasmtime.Storelike, base: int, offset: int, val: Any) -> None:
|
||||
ptr = (base & 0xffffffff) + offset
|
||||
if ptr + ctypes.sizeof(ty) > mem.data_len(store):
|
||||
raise IndexError('out-of-bounds store')
|
||||
raw_base = mem.data_ptr(store)
|
||||
c_ptr = ctypes.POINTER(ty)(
|
||||
ty.from_address(ctypes.addressof(raw_base.contents) + ptr)
|
||||
)
|
||||
c_ptr[0] = val
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_load {
|
||||
// TODO: this uses native endianness
|
||||
self.pyimport("ctypes", None);
|
||||
src.push_str(
|
||||
"
|
||||
def _load(ty: Any, mem: wasmtime.Memory, store: wasmtime.Storelike, base: int, offset: int) -> Any:
|
||||
ptr = (base & 0xffffffff) + offset
|
||||
if ptr + ctypes.sizeof(ty) > mem.data_len(store):
|
||||
raise IndexError('out-of-bounds store')
|
||||
raw_base = mem.data_ptr(store)
|
||||
c_ptr = ctypes.POINTER(ty)(
|
||||
ty.from_address(ctypes.addressof(raw_base.contents) + ptr)
|
||||
)
|
||||
return c_ptr[0]
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_validate_guest_char {
|
||||
src.push_str(
|
||||
"
|
||||
def _validate_guest_char(i: int) -> str:
|
||||
if i > 0x10ffff or (i >= 0xd800 and i <= 0xdfff):
|
||||
raise TypeError('not a valid char')
|
||||
return chr(i)
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_expected {
|
||||
self.pyimport("dataclasses", "dataclass");
|
||||
self.pyimport("typing", "TypeVar");
|
||||
self.pyimport("typing", "Generic");
|
||||
self.pyimport("typing", "Union");
|
||||
self.needs_t_typevar = true;
|
||||
src.push_str(
|
||||
"
|
||||
@dataclass
|
||||
class Ok(Generic[T]):
|
||||
value: T
|
||||
E = TypeVar('E')
|
||||
@dataclass
|
||||
class Err(Generic[E]):
|
||||
value: E
|
||||
|
||||
Expected = Union[Ok[T], Err[E]]
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_i32_to_f32 || self.needs_f32_to_i32 {
|
||||
self.pyimport("ctypes", None);
|
||||
src.push_str("_i32_to_f32_i32 = ctypes.pointer(ctypes.c_int32(0))\n");
|
||||
src.push_str(
|
||||
"_i32_to_f32_f32 = ctypes.cast(_i32_to_f32_i32, ctypes.POINTER(ctypes.c_float))\n",
|
||||
);
|
||||
if self.needs_i32_to_f32 {
|
||||
src.push_str(
|
||||
"
|
||||
def _i32_to_f32(i: int) -> float:
|
||||
_i32_to_f32_i32[0] = i # type: ignore
|
||||
return _i32_to_f32_f32[0] # type: ignore
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_f32_to_i32 {
|
||||
src.push_str(
|
||||
"
|
||||
def _f32_to_i32(i: float) -> int:
|
||||
_i32_to_f32_f32[0] = i # type: ignore
|
||||
return _i32_to_f32_i32[0] # type: ignore
|
||||
",
|
||||
);
|
||||
}
|
||||
}
|
||||
if self.needs_i64_to_f64 || self.needs_f64_to_i64 {
|
||||
self.pyimport("ctypes", None);
|
||||
src.push_str("_i64_to_f64_i64 = ctypes.pointer(ctypes.c_int64(0))\n");
|
||||
src.push_str(
|
||||
"_i64_to_f64_f64 = ctypes.cast(_i64_to_f64_i64, ctypes.POINTER(ctypes.c_double))\n",
|
||||
);
|
||||
if self.needs_i64_to_f64 {
|
||||
src.push_str(
|
||||
"
|
||||
def _i64_to_f64(i: int) -> float:
|
||||
_i64_to_f64_i64[0] = i # type: ignore
|
||||
return _i64_to_f64_f64[0] # type: ignore
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_f64_to_i64 {
|
||||
src.push_str(
|
||||
"
|
||||
def _f64_to_i64(i: float) -> int:
|
||||
_i64_to_f64_f64[0] = i # type: ignore
|
||||
return _i64_to_f64_i64[0] # type: ignore
|
||||
",
|
||||
);
|
||||
}
|
||||
}
|
||||
if self.needs_decode_utf8 {
|
||||
self.pyimport("ctypes", None);
|
||||
src.push_str(
|
||||
"
|
||||
def _decode_utf8(mem: wasmtime.Memory, store: wasmtime.Storelike, ptr: int, len: int) -> str:
|
||||
ptr = ptr & 0xffffffff
|
||||
len = len & 0xffffffff
|
||||
if ptr + len > mem.data_len(store):
|
||||
raise IndexError('string out of bounds')
|
||||
base = mem.data_ptr(store)
|
||||
base = ctypes.POINTER(ctypes.c_ubyte)(
|
||||
ctypes.c_ubyte.from_address(ctypes.addressof(base.contents) + ptr)
|
||||
)
|
||||
return ctypes.string_at(base, len).decode('utf-8')
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_encode_utf8 {
|
||||
self.pyimport("ctypes", None);
|
||||
self.pyimport("typing", "Tuple");
|
||||
src.push_str(
|
||||
"
|
||||
def _encode_utf8(val: str, realloc: wasmtime.Func, mem: wasmtime.Memory, store: wasmtime.Storelike) -> Tuple[int, int]:
|
||||
bytes = val.encode('utf8')
|
||||
ptr = realloc(store, 0, 0, 1, len(bytes))
|
||||
assert(isinstance(ptr, int))
|
||||
ptr = ptr & 0xffffffff
|
||||
if ptr + len(bytes) > mem.data_len(store):
|
||||
raise IndexError('string out of bounds')
|
||||
base = mem.data_ptr(store)
|
||||
base = ctypes.POINTER(ctypes.c_ubyte)(
|
||||
ctypes.c_ubyte.from_address(ctypes.addressof(base.contents) + ptr)
|
||||
)
|
||||
ctypes.memmove(base, bytes, len(bytes))
|
||||
return (ptr, len(bytes))
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_list_canon_lift {
|
||||
self.pyimport("ctypes", None);
|
||||
self.pyimport("typing", "List");
|
||||
// TODO: this is doing a native-endian read, not a little-endian
|
||||
// read
|
||||
src.push_str(
|
||||
"
|
||||
def _list_canon_lift(ptr: int, len: int, size: int, ty: Any, mem: wasmtime.Memory ,store: wasmtime.Storelike) -> Any:
|
||||
ptr = ptr & 0xffffffff
|
||||
len = len & 0xffffffff
|
||||
if ptr + len * size > mem.data_len(store):
|
||||
raise IndexError('list out of bounds')
|
||||
raw_base = mem.data_ptr(store)
|
||||
base = ctypes.POINTER(ty)(
|
||||
ty.from_address(ctypes.addressof(raw_base.contents) + ptr)
|
||||
)
|
||||
if ty == ctypes.c_uint8:
|
||||
return ctypes.string_at(base, len)
|
||||
return base[:len]
|
||||
",
|
||||
);
|
||||
}
|
||||
if self.needs_list_canon_lower {
|
||||
self.pyimport("ctypes", None);
|
||||
self.pyimport("typing", "List");
|
||||
self.pyimport("typing", "Tuple");
|
||||
// TODO: is there a faster way to memcpy other than iterating over
|
||||
// the input list?
|
||||
// TODO: this is doing a native-endian write, not a little-endian
|
||||
// write
|
||||
src.push_str(
|
||||
"
|
||||
def _list_canon_lower(list: Any, ty: Any, size: int, align: int, realloc: wasmtime.Func, mem: wasmtime.Memory, store: wasmtime.Storelike) -> Tuple[int, int]:
|
||||
total_size = size * len(list)
|
||||
ptr = realloc(store, 0, 0, align, total_size)
|
||||
assert(isinstance(ptr, int))
|
||||
ptr = ptr & 0xffffffff
|
||||
if ptr + total_size > mem.data_len(store):
|
||||
raise IndexError('list realloc return of bounds')
|
||||
raw_base = mem.data_ptr(store)
|
||||
base = ctypes.POINTER(ty)(
|
||||
ty.from_address(ctypes.addressof(raw_base.contents) + ptr)
|
||||
)
|
||||
for i, val in enumerate(list):
|
||||
base[i] = val
|
||||
return (ptr, len(list))
|
||||
",
|
||||
);
|
||||
}
|
||||
|
||||
if self.needs_resources {
|
||||
self.pyimport("typing", "TypeVar");
|
||||
self.pyimport("typing", "Generic");
|
||||
self.pyimport("typing", "List");
|
||||
self.pyimport("typing", "Optional");
|
||||
self.pyimport("dataclasses", "dataclass");
|
||||
self.needs_t_typevar = true;
|
||||
src.push_str(
|
||||
"
|
||||
@dataclass
|
||||
class SlabEntry(Generic[T]):
|
||||
next: int
|
||||
val: Optional[T]
|
||||
|
||||
class Slab(Generic[T]):
|
||||
head: int
|
||||
list: List[SlabEntry[T]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.list = []
|
||||
self.head = 0
|
||||
|
||||
def insert(self, val: T) -> int:
|
||||
if self.head >= len(self.list):
|
||||
self.list.append(SlabEntry(next = len(self.list) + 1, val = None))
|
||||
ret = self.head
|
||||
slot = self.list[ret]
|
||||
self.head = slot.next
|
||||
slot.next = -1
|
||||
slot.val = val
|
||||
return ret
|
||||
|
||||
def get(self, idx: int) -> T:
|
||||
if idx >= len(self.list):
|
||||
raise IndexError('handle index not valid')
|
||||
slot = self.list[idx]
|
||||
if slot.next == -1:
|
||||
assert(slot.val is not None)
|
||||
return slot.val
|
||||
raise IndexError('handle index not valid')
|
||||
|
||||
def remove(self, idx: int) -> T:
|
||||
ret = self.get(idx)
|
||||
slot = self.list[idx]
|
||||
slot.val = None
|
||||
slot.next = self.head
|
||||
self.head = idx
|
||||
return ret
|
||||
",
|
||||
);
|
||||
}
|
||||
src
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod test {
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use super::Dependencies;
|
||||
|
||||
#[test]
|
||||
fn test_pyimport_only_contents() {
|
||||
let mut deps = Dependencies::default();
|
||||
deps.pyimport("typing", None);
|
||||
deps.pyimport("typing", None);
|
||||
assert_eq!(deps.pyimports, BTreeMap::from([("typing".into(), None)]));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_pyimport_only_module() {
|
||||
let mut deps = Dependencies::default();
|
||||
deps.pyimport("typing", "Union");
|
||||
deps.pyimport("typing", "List");
|
||||
deps.pyimport("typing", "NamedTuple");
|
||||
assert_eq!(
|
||||
deps.pyimports,
|
||||
BTreeMap::from([(
|
||||
"typing".into(),
|
||||
Some(BTreeSet::from([
|
||||
"Union".into(),
|
||||
"List".into(),
|
||||
"NamedTuple".into()
|
||||
]))
|
||||
)])
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
#[should_panic]
|
||||
fn test_pyimport_conflicting() {
|
||||
let mut deps = Dependencies::default();
|
||||
deps.pyimport("typing", "NamedTuple");
|
||||
deps.pyimport("typing", None);
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,501 @@
|
||||
use heck::*;
|
||||
use wit_bindgen_gen_core::wit_parser::*;
|
||||
|
||||
use crate::dependencies::Dependencies;
|
||||
|
||||
/// A [Source] represents some unit of Python code
|
||||
/// and keeps track of its indent.
|
||||
#[derive(Default)]
|
||||
pub struct Source {
|
||||
s: String,
|
||||
indent: usize,
|
||||
}
|
||||
|
||||
impl Source {
|
||||
/// Appends a string slice to this [Source].
|
||||
///
|
||||
/// Strings without newlines, they are simply appended.
|
||||
/// Strings with newlines are appended and also new lines
|
||||
/// are indented based on the current indent level.
|
||||
pub fn push_str(&mut self, src: &str) {
|
||||
let lines = src.lines().collect::<Vec<_>>();
|
||||
let mut trim = None;
|
||||
for (i, line) in lines.iter().enumerate() {
|
||||
self.s.push_str(if lines.len() == 1 {
|
||||
line
|
||||
} else {
|
||||
let trim = match trim {
|
||||
Some(n) => n,
|
||||
None => {
|
||||
let val = line.len() - line.trim_start().len();
|
||||
if !line.is_empty() {
|
||||
trim = Some(val);
|
||||
}
|
||||
val
|
||||
}
|
||||
};
|
||||
line.get(trim..).unwrap_or("")
|
||||
});
|
||||
if i != lines.len() - 1 || src.ends_with("\n") {
|
||||
self.newline();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Prints the documentation as comments
|
||||
/// e.g.
|
||||
/// > \# Line one of docs node
|
||||
/// >
|
||||
/// > \# Line two of docs node
|
||||
pub fn comment(&mut self, docs: &Docs) {
|
||||
let docs = match &docs.contents {
|
||||
Some(docs) => docs,
|
||||
None => return,
|
||||
};
|
||||
for line in docs.lines() {
|
||||
self.push_str(&format!("# {}\n", line));
|
||||
}
|
||||
}
|
||||
|
||||
/// Prints the documentation as comments
|
||||
/// e.g.
|
||||
/// > """
|
||||
/// >
|
||||
/// > Line one of docs node
|
||||
/// >
|
||||
/// > Line two of docs node
|
||||
/// >
|
||||
/// > """
|
||||
pub fn docstring(&mut self, docs: &Docs) {
|
||||
let docs = match &docs.contents {
|
||||
Some(docs) => docs,
|
||||
None => return,
|
||||
};
|
||||
let triple_quote = r#"""""#;
|
||||
self.push_str(triple_quote);
|
||||
self.newline();
|
||||
for line in docs.lines() {
|
||||
self.push_str(line);
|
||||
self.newline();
|
||||
}
|
||||
self.push_str(triple_quote);
|
||||
self.newline();
|
||||
}
|
||||
|
||||
/// Indent the source one level.
|
||||
pub fn indent(&mut self) {
|
||||
self.indent += 4;
|
||||
self.s.push_str(" ");
|
||||
}
|
||||
|
||||
/// Unindent, or in Python terms "dedent",
|
||||
/// the source one level.
|
||||
pub fn dedent(&mut self) {
|
||||
self.indent -= 4;
|
||||
assert!(self.s.ends_with(" "));
|
||||
self.s.pop();
|
||||
self.s.pop();
|
||||
self.s.pop();
|
||||
self.s.pop();
|
||||
}
|
||||
|
||||
/// Go to the next line and apply any indent.
|
||||
pub fn newline(&mut self) {
|
||||
self.s.push_str("\n");
|
||||
for _ in 0..self.indent {
|
||||
self.s.push_str(" ");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::ops::Deref for Source {
|
||||
type Target = str;
|
||||
fn deref(&self) -> &str {
|
||||
&self.s
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Source> for String {
|
||||
fn from(s: Source) -> String {
|
||||
s.s
|
||||
}
|
||||
}
|
||||
|
||||
/// [SourceBuilder] combines together a [Source]
|
||||
/// with other contextual information and state.
|
||||
///
|
||||
/// This allows you to generate code for the Source using
|
||||
/// high-level tools that take care of updating dependencies
|
||||
/// and retrieving interface details.
|
||||
///
|
||||
/// You can create a [SourceBuilder] easily using a [Source]
|
||||
/// ```
|
||||
/// # use wit_bindgen_gen_wasmtime_py::dependencies::Dependencies;
|
||||
/// # use wit_bindgen_gen_core::wit_parser::{Interface, Type};
|
||||
/// # use wit_bindgen_gen_wasmtime_py::source::Source;
|
||||
/// # let mut deps = Dependencies::default();
|
||||
/// # let mut interface = Interface::default();
|
||||
/// # let iface = &interface;
|
||||
/// let mut source = Source::default();
|
||||
/// let mut builder = source.builder(&mut deps, iface);
|
||||
/// builder.print_ty(&Type::Bool, false);
|
||||
/// ```
|
||||
pub struct SourceBuilder<'s, 'd, 'i> {
|
||||
source: &'s mut Source,
|
||||
pub deps: &'d mut Dependencies,
|
||||
iface: &'i Interface,
|
||||
}
|
||||
|
||||
impl<'s, 'd, 'i> Source {
|
||||
/// Create a [SourceBuilder] for the current source.
|
||||
pub fn builder(
|
||||
&'s mut self,
|
||||
deps: &'d mut Dependencies,
|
||||
iface: &'i Interface,
|
||||
) -> SourceBuilder<'s, 'd, 'i> {
|
||||
SourceBuilder {
|
||||
source: self,
|
||||
deps,
|
||||
iface,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'd, 'i> SourceBuilder<'s, 'd, 'i> {
|
||||
/// See [Dependencies::pyimport].
|
||||
pub fn pyimport<'a>(&mut self, module: &str, name: impl Into<Option<&'a str>>) {
|
||||
self.deps.pyimport(module, name)
|
||||
}
|
||||
|
||||
/// Appends a type's Python representation to this `Source`.
|
||||
/// Records any required intrinsics and imports in the `deps`.
|
||||
/// Uses Python forward reference syntax (e.g. 'Foo')
|
||||
/// on the root type only if `forward_ref` is true.
|
||||
pub fn print_ty(&mut self, ty: &Type, forward_ref: bool) {
|
||||
match ty {
|
||||
Type::Unit => self.push_str("None"),
|
||||
Type::Bool => self.push_str("bool"),
|
||||
Type::U8
|
||||
| Type::S8
|
||||
| Type::U16
|
||||
| Type::S16
|
||||
| Type::U32
|
||||
| Type::S32
|
||||
| Type::U64
|
||||
| Type::S64 => self.push_str("int"),
|
||||
Type::Float32 | Type::Float64 => self.push_str("float"),
|
||||
Type::Char => self.push_str("str"),
|
||||
Type::String => self.push_str("str"),
|
||||
Type::Handle(id) => {
|
||||
if forward_ref {
|
||||
self.push_str("'");
|
||||
}
|
||||
let handle_name = &self.iface.resources[*id].name.to_camel_case();
|
||||
self.source.push_str(handle_name);
|
||||
if forward_ref {
|
||||
self.push_str("'");
|
||||
}
|
||||
}
|
||||
Type::Id(id) => {
|
||||
let ty = &self.iface.types[*id];
|
||||
if let Some(name) = &ty.name {
|
||||
self.push_str(&name.to_camel_case());
|
||||
return;
|
||||
}
|
||||
match &ty.kind {
|
||||
TypeDefKind::Type(t) => self.print_ty(t, forward_ref),
|
||||
TypeDefKind::Tuple(t) => self.print_tuple(t),
|
||||
TypeDefKind::Record(_)
|
||||
| TypeDefKind::Flags(_)
|
||||
| TypeDefKind::Enum(_)
|
||||
| TypeDefKind::Variant(_)
|
||||
| TypeDefKind::Union(_) => {
|
||||
unreachable!()
|
||||
}
|
||||
TypeDefKind::Option(t) => {
|
||||
self.deps.pyimport("typing", "Optional");
|
||||
self.push_str("Optional[");
|
||||
self.print_ty(t, true);
|
||||
self.push_str("]");
|
||||
}
|
||||
TypeDefKind::Expected(e) => {
|
||||
self.deps.needs_expected = true;
|
||||
self.push_str("Expected[");
|
||||
self.print_ty(&e.ok, true);
|
||||
self.push_str(", ");
|
||||
self.print_ty(&e.err, true);
|
||||
self.push_str("]");
|
||||
}
|
||||
TypeDefKind::List(t) => self.print_list(t),
|
||||
TypeDefKind::Future(t) => {
|
||||
self.push_str("Future[");
|
||||
self.print_ty(t, true);
|
||||
self.push_str("]");
|
||||
}
|
||||
TypeDefKind::Stream(s) => {
|
||||
self.push_str("Stream[");
|
||||
self.print_ty(&s.element, true);
|
||||
self.push_str(", ");
|
||||
self.print_ty(&s.end, true);
|
||||
self.push_str("]");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Appends a tuple type's Python representation to this `Source`.
|
||||
/// Records any required intrinsics and imports in the `deps`.
|
||||
/// Uses Python forward reference syntax (e.g. 'Foo') for named type parameters.
|
||||
pub fn print_tuple(&mut self, tuple: &Tuple) {
|
||||
if tuple.types.is_empty() {
|
||||
return self.push_str("None");
|
||||
}
|
||||
self.deps.pyimport("typing", "Tuple");
|
||||
self.push_str("Tuple[");
|
||||
for (i, t) in tuple.types.iter().enumerate() {
|
||||
if i > 0 {
|
||||
self.push_str(", ");
|
||||
}
|
||||
self.print_ty(t, true);
|
||||
}
|
||||
self.push_str("]");
|
||||
}
|
||||
|
||||
/// Appends a Python type representing a sequence of the `element` type to this `Source`.
|
||||
/// If the element type is `Type::U8`, the result type is `bytes` otherwise it is a `List[T]`
|
||||
/// Records any required intrinsics and imports in the `deps`.
|
||||
/// Uses Python forward reference syntax (e.g. 'Foo') for named type parameters.
|
||||
pub fn print_list(&mut self, element: &Type) {
|
||||
match element {
|
||||
Type::U8 => self.push_str("bytes"),
|
||||
t => {
|
||||
self.deps.pyimport("typing", "List");
|
||||
self.push_str("List[");
|
||||
self.print_ty(t, true);
|
||||
self.push_str("]");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Print variable declaration.
|
||||
/// Brings name into scope and binds type to it.
|
||||
pub fn print_var_declaration<'a>(&mut self, name: &'a str, ty: &Type) {
|
||||
self.push_str(name);
|
||||
self.push_str(": ");
|
||||
self.print_ty(ty, true);
|
||||
self.push_str("\n");
|
||||
}
|
||||
|
||||
pub fn print_sig(&mut self, func: &Function, in_import: bool) -> Vec<String> {
|
||||
if !in_import {
|
||||
if let FunctionKind::Static { .. } = func.kind {
|
||||
self.push_str("@classmethod\n");
|
||||
}
|
||||
}
|
||||
self.source.push_str("def ");
|
||||
match &func.kind {
|
||||
FunctionKind::Method { .. } => self.source.push_str(&func.item_name().to_snake_case()),
|
||||
FunctionKind::Static { .. } if !in_import => {
|
||||
self.source.push_str(&func.item_name().to_snake_case())
|
||||
}
|
||||
_ => self.source.push_str(&func.name.to_snake_case()),
|
||||
}
|
||||
if in_import {
|
||||
self.source.push_str("(self");
|
||||
} else if let FunctionKind::Static { .. } = func.kind {
|
||||
self.source.push_str("(cls, caller: wasmtime.Store, obj: '");
|
||||
self.source.push_str(&self.iface.name.to_camel_case());
|
||||
self.source.push_str("'");
|
||||
} else {
|
||||
self.source.push_str("(self, caller: wasmtime.Store");
|
||||
}
|
||||
let mut params = Vec::new();
|
||||
for (i, (param, ty)) in func.params.iter().enumerate() {
|
||||
if i == 0 {
|
||||
if let FunctionKind::Method { .. } = func.kind {
|
||||
params.push("self".to_string());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
self.source.push_str(", ");
|
||||
self.source.push_str(¶m.to_snake_case());
|
||||
params.push(param.to_snake_case());
|
||||
self.source.push_str(": ");
|
||||
self.print_ty(ty, true);
|
||||
}
|
||||
self.source.push_str(") -> ");
|
||||
self.print_ty(&func.result, true);
|
||||
params
|
||||
}
|
||||
|
||||
/// Print a wrapped union definition.
|
||||
/// e.g.
|
||||
/// ```py
|
||||
/// @dataclass
|
||||
/// class Foo0:
|
||||
/// value: int
|
||||
///
|
||||
/// @dataclass
|
||||
/// class Foo1:
|
||||
/// value: int
|
||||
///
|
||||
/// Foo = Union[Foo0, Foo1]
|
||||
/// ```
|
||||
pub fn print_union_wrapped(&mut self, name: &str, union: &Union, docs: &Docs) {
|
||||
self.deps.pyimport("dataclasses", "dataclass");
|
||||
let mut cases = Vec::new();
|
||||
let name = name.to_camel_case();
|
||||
for (i, case) in union.cases.iter().enumerate() {
|
||||
self.source.push_str("@dataclass\n");
|
||||
let name = format!("{name}{i}");
|
||||
self.source.push_str(&format!("class {name}:\n"));
|
||||
self.source.indent();
|
||||
self.source.docstring(&case.docs);
|
||||
self.source.push_str("value: ");
|
||||
self.print_ty(&case.ty, true);
|
||||
self.source.newline();
|
||||
self.source.dedent();
|
||||
self.source.newline();
|
||||
cases.push(name);
|
||||
}
|
||||
|
||||
self.deps.pyimport("typing", "Union");
|
||||
self.source.comment(docs);
|
||||
self.source
|
||||
.push_str(&format!("{name} = Union[{}]\n", cases.join(", ")));
|
||||
self.source.newline();
|
||||
}
|
||||
|
||||
pub fn print_union_raw(&mut self, name: &str, union: &Union, docs: &Docs) {
|
||||
self.deps.pyimport("typing", "Union");
|
||||
self.source.comment(docs);
|
||||
for case in union.cases.iter() {
|
||||
self.source.comment(&case.docs);
|
||||
}
|
||||
self.source.push_str(&name.to_camel_case());
|
||||
self.source.push_str(" = Union[");
|
||||
let mut first = true;
|
||||
for case in union.cases.iter() {
|
||||
if !first {
|
||||
self.source.push_str(",");
|
||||
}
|
||||
self.print_ty(&case.ty, true);
|
||||
first = false;
|
||||
}
|
||||
self.source.push_str("]\n\n");
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'd, 'i> std::ops::Deref for SourceBuilder<'s, 'd, 'i> {
|
||||
type Target = Source;
|
||||
fn deref(&self) -> &Source {
|
||||
&self.source
|
||||
}
|
||||
}
|
||||
|
||||
impl<'s, 'd, 'i> std::ops::DerefMut for SourceBuilder<'s, 'd, 'i> {
|
||||
fn deref_mut(&mut self) -> &mut Source {
|
||||
&mut self.source
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_append() {
|
||||
let mut s = Source::default();
|
||||
s.push_str("x");
|
||||
assert_eq!(s.s, "x");
|
||||
s.push_str("y");
|
||||
assert_eq!(s.s, "xy");
|
||||
s.push_str("z ");
|
||||
assert_eq!(s.s, "xyz ");
|
||||
s.push_str(" a ");
|
||||
assert_eq!(s.s, "xyz a ");
|
||||
s.push_str("\na");
|
||||
assert_eq!(s.s, "xyz a \na");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn trim_ws() {
|
||||
let mut s = Source::default();
|
||||
s.push_str("def foo():\n return 1\n");
|
||||
assert_eq!(s.s, "def foo():\n return 1\n");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn print_ty_forward_ref() {
|
||||
let mut deps = Dependencies::default();
|
||||
let mut iface = Interface::default();
|
||||
// Set up a Resource type to refer to
|
||||
let resource_id = iface.resources.alloc(Resource {
|
||||
docs: Docs::default(),
|
||||
name: "foo".into(),
|
||||
supertype: None,
|
||||
foreign_module: None,
|
||||
});
|
||||
iface.resource_lookup.insert("foo".into(), resource_id);
|
||||
let handle_ty = Type::Handle(resource_id);
|
||||
// ForwardRef usage can be controlled by an argument to print_ty
|
||||
let mut s1 = Source::default();
|
||||
let mut builder = s1.builder(&mut deps, &iface);
|
||||
builder.print_ty(&handle_ty, true);
|
||||
drop(builder);
|
||||
assert_eq!(s1.s, "'Foo'");
|
||||
|
||||
let mut s2 = Source::default();
|
||||
let mut builder = s2.builder(&mut deps, &iface);
|
||||
builder.print_ty(&handle_ty, false);
|
||||
drop(builder);
|
||||
assert_eq!(s2.s, "Foo");
|
||||
|
||||
// ForwardRef is used for any types within other types
|
||||
// Even if the outer type is itself not allowed to be one
|
||||
let option_id = iface.types.alloc(TypeDef {
|
||||
docs: Docs::default(),
|
||||
kind: TypeDefKind::Option(handle_ty),
|
||||
name: None,
|
||||
foreign_module: None,
|
||||
});
|
||||
let option_ty = Type::Id(option_id);
|
||||
let mut s3 = Source::default();
|
||||
let mut builder = s3.builder(&mut deps, &iface);
|
||||
builder.print_ty(&option_ty, false);
|
||||
drop(builder);
|
||||
assert_eq!(s3.s, "Optional['Foo']");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn print_list_bytes() {
|
||||
// If the element type is u8, it is interpreted as `bytes`
|
||||
let mut deps = Dependencies::default();
|
||||
let iface = Interface::default();
|
||||
let mut source = Source::default();
|
||||
let mut builder = source.builder(&mut deps, &iface);
|
||||
builder.print_list(&Type::U8);
|
||||
drop(builder);
|
||||
assert_eq!(source.s, "bytes");
|
||||
assert_eq!(deps.pyimports, BTreeMap::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn print_list_non_bytes() {
|
||||
// If the element type is u8, it is interpreted as `bytes`
|
||||
let mut deps = Dependencies::default();
|
||||
let iface = Interface::default();
|
||||
let mut source = Source::default();
|
||||
let mut builder = source.builder(&mut deps, &iface);
|
||||
builder.print_list(&Type::Float32);
|
||||
drop(builder);
|
||||
assert_eq!(source.s, "List[float]");
|
||||
assert_eq!(
|
||||
deps.pyimports,
|
||||
BTreeMap::from([("typing".into(), Some(BTreeSet::from(["List".into()])))])
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
use std::path::Path;
|
||||
use std::process::Command;
|
||||
|
||||
mod exports {
|
||||
test_helpers::codegen_py_export!(
|
||||
"*.wit"
|
||||
|
||||
// TODO: implement async support
|
||||
"!async-functions.wit"
|
||||
);
|
||||
}
|
||||
|
||||
mod imports {
|
||||
test_helpers::codegen_py_import!(
|
||||
"*.wit"
|
||||
|
||||
// TODO: implement async support
|
||||
"!async-functions.wit"
|
||||
|
||||
// This uses buffers, which we don't support in imports just yet
|
||||
// TODO: should support this
|
||||
"!wasi-next.wit"
|
||||
"!host.wit"
|
||||
);
|
||||
}
|
||||
|
||||
fn verify(dir: &str, _name: &str) {
|
||||
let output = Command::new("mypy")
|
||||
.arg(Path::new(dir).join("bindings.py"))
|
||||
.arg("--config-file")
|
||||
.arg("mypy.ini")
|
||||
.output()
|
||||
.expect("failed to run `mypy`; do you have it installed?");
|
||||
if output.status.success() {
|
||||
return;
|
||||
}
|
||||
panic!(
|
||||
"mypy failed
|
||||
|
||||
status: {status}
|
||||
|
||||
stdout ---
|
||||
{stdout}
|
||||
|
||||
stderr ---
|
||||
{stderr}",
|
||||
status = output.status,
|
||||
stdout = String::from_utf8_lossy(&output.stdout).replace("\n", "\n\t"),
|
||||
stderr = String::from_utf8_lossy(&output.stderr).replace("\n", "\n\t"),
|
||||
);
|
||||
}
|
||||
@@ -0,0 +1,75 @@
|
||||
use std::env;
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::process::Command;
|
||||
use wit_bindgen_gen_core::Generator;
|
||||
|
||||
test_helpers::runtime_tests!("py");
|
||||
|
||||
fn execute(name: &str, wasm: &Path, py: &Path, imports: &Path, exports: &Path) {
|
||||
let out_dir = PathBuf::from(env!("OUT_DIR"));
|
||||
let dir = out_dir.join(name);
|
||||
drop(fs::remove_dir_all(&dir));
|
||||
fs::create_dir_all(&dir).unwrap();
|
||||
fs::create_dir_all(&dir.join("imports")).unwrap();
|
||||
fs::create_dir_all(&dir.join("exports")).unwrap();
|
||||
|
||||
println!("OUT_DIR = {:?}", dir);
|
||||
println!("Generating bindings...");
|
||||
// We call `generate_all` with exports from the imports.wit file, and
|
||||
// imports from the exports.wit wit file. It's reversed because we're
|
||||
// implementing the host side of these APIs.
|
||||
let iface = wit_bindgen_gen_core::wit_parser::Interface::parse_file(imports).unwrap();
|
||||
let mut files = Default::default();
|
||||
wit_bindgen_gen_wasmtime_py::Opts::default()
|
||||
.build()
|
||||
.generate_all(&[], &[iface], &mut files);
|
||||
for (file, contents) in files.iter() {
|
||||
fs::write(dir.join("imports").join(file), contents).unwrap();
|
||||
}
|
||||
fs::write(dir.join("imports").join("__init__.py"), "").unwrap();
|
||||
|
||||
let iface = wit_bindgen_gen_core::wit_parser::Interface::parse_file(exports).unwrap();
|
||||
let mut files = Default::default();
|
||||
wit_bindgen_gen_wasmtime_py::Opts::default()
|
||||
.build()
|
||||
.generate_all(&[iface], &[], &mut files);
|
||||
for (file, contents) in files.iter() {
|
||||
fs::write(dir.join("exports").join(file), contents).unwrap();
|
||||
}
|
||||
fs::write(dir.join("exports").join("__init__.py"), "").unwrap();
|
||||
|
||||
println!("Running mypy...");
|
||||
exec(
|
||||
Command::new("mypy")
|
||||
.env("MYPYPATH", &dir)
|
||||
.arg(py)
|
||||
.arg("--cache-dir")
|
||||
.arg(out_dir.join("mypycache").join(name)),
|
||||
);
|
||||
|
||||
exec(
|
||||
Command::new("python3")
|
||||
.env("PYTHONPATH", &dir)
|
||||
.arg(py)
|
||||
.arg(wasm),
|
||||
);
|
||||
}
|
||||
|
||||
fn exec(cmd: &mut Command) {
|
||||
println!("{:?}", cmd);
|
||||
let output = cmd.output().unwrap();
|
||||
if output.status.success() {
|
||||
return;
|
||||
}
|
||||
println!("status: {}", output.status);
|
||||
println!(
|
||||
"stdout ---\n {}",
|
||||
String::from_utf8_lossy(&output.stdout).replace("\n", "\n ")
|
||||
);
|
||||
println!(
|
||||
"stderr ---\n {}",
|
||||
String::from_utf8_lossy(&output.stderr).replace("\n", "\n ")
|
||||
);
|
||||
panic!("no success");
|
||||
}
|
||||
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "wit-bindgen-gen-wasmtime"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[lib]
|
||||
test = false
|
||||
doctest = false
|
||||
|
||||
[dependencies]
|
||||
wit-bindgen-gen-core = { path = '../gen-core', version = '0.1.0' }
|
||||
wit-bindgen-gen-rust = { path = '../gen-rust', version = '0.1.0' }
|
||||
heck = "0.3"
|
||||
structopt = { version = "0.3", default-features = false, optional = true }
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = "1.0"
|
||||
test-helpers = { path = '../test-helpers', features = ['wit-bindgen-gen-wasmtime'] }
|
||||
wasmtime = "0.38.0"
|
||||
wasmtime-wasi = "0.38.0"
|
||||
wit-bindgen-wasmtime = { path = '../wasmtime', features = ['tracing', 'async'] }
|
||||
@@ -0,0 +1,4 @@
|
||||
fn main() {
|
||||
println!("cargo:rerun-if-changed=build.rs");
|
||||
// this build script is currently only here so OUT_DIR is set for testing.
|
||||
}
|
||||
2217
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-wasmtime/src/lib.rs
Normal file
2217
__wasm/wit-bindgen-sample/wit-bindgen/crates/gen-wasmtime/src/lib.rs
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,108 @@
|
||||
#![allow(dead_code, type_alias_bounds)]
|
||||
|
||||
fn main() {
|
||||
println!("compiled successfully!")
|
||||
}
|
||||
|
||||
#[rustfmt::skip]
|
||||
mod exports {
|
||||
test_helpers::codegen_wasmtime_export!(
|
||||
"*.wit"
|
||||
|
||||
// TODO: implement async support
|
||||
"!async-functions.wit"
|
||||
|
||||
// If you want to exclude a specific test you can include it here with
|
||||
// gitignore glob syntax:
|
||||
//
|
||||
// "!wasm.wit"
|
||||
// "!host.wit"
|
||||
//
|
||||
//
|
||||
// Similarly you can also just remove the `*.wit` glob and list tests
|
||||
// individually if you're debugging.
|
||||
);
|
||||
}
|
||||
|
||||
mod imports {
|
||||
test_helpers::codegen_wasmtime_import!(
|
||||
"*.wit"
|
||||
|
||||
// TODO: implement async support
|
||||
"!async-functions.wit"
|
||||
|
||||
// TODO: these use push/pull buffer which isn't implemented in the test
|
||||
// generator just yet
|
||||
"!wasi-next.wit"
|
||||
"!host.wit"
|
||||
);
|
||||
}
|
||||
|
||||
mod async_tests {
|
||||
mod not_async {
|
||||
wit_bindgen_wasmtime::export!({
|
||||
src["x"]: "foo: func()",
|
||||
async: ["bar"],
|
||||
});
|
||||
|
||||
struct Me;
|
||||
|
||||
impl x::X for Me {
|
||||
fn foo(&mut self) {}
|
||||
}
|
||||
}
|
||||
mod one_async {
|
||||
wit_bindgen_wasmtime::export!({
|
||||
src["x"]: "
|
||||
foo: func() -> list<u8>
|
||||
bar: func()
|
||||
",
|
||||
async: ["bar"],
|
||||
});
|
||||
|
||||
struct Me;
|
||||
|
||||
#[wit_bindgen_wasmtime::async_trait]
|
||||
impl x::X for Me {
|
||||
fn foo(&mut self) -> Vec<u8> {
|
||||
Vec::new()
|
||||
}
|
||||
|
||||
async fn bar(&mut self) {}
|
||||
}
|
||||
}
|
||||
mod one_async_export {
|
||||
wit_bindgen_wasmtime::import!({
|
||||
src["x"]: "
|
||||
foo: func(x: list<string>)
|
||||
bar: func()
|
||||
",
|
||||
async: ["bar"],
|
||||
});
|
||||
}
|
||||
mod resource_with_none_async {
|
||||
wit_bindgen_wasmtime::export!({
|
||||
src["x"]: "
|
||||
resource y {
|
||||
z: func() -> string
|
||||
}
|
||||
",
|
||||
async: [],
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
mod custom_errors {
|
||||
wit_bindgen_wasmtime::export!({
|
||||
src["x"]: "
|
||||
foo: func()
|
||||
bar: func() -> expected<unit, u32>
|
||||
enum errno {
|
||||
bad1,
|
||||
bad2,
|
||||
}
|
||||
baz: func() -> expected<u32, errno>
|
||||
",
|
||||
custom_error: true,
|
||||
});
|
||||
}
|
||||
@@ -0,0 +1,110 @@
|
||||
use anyhow::{Context as _, Result};
|
||||
use wasmtime::{Config, Engine, Instance, Linker, Module, Store};
|
||||
|
||||
test_helpers::runtime_tests_wasmtime!();
|
||||
|
||||
fn default_config() -> Result<Config> {
|
||||
// Create an engine with caching enabled to assist with iteration in this
|
||||
// project.
|
||||
let mut config = Config::new();
|
||||
config.cache_config_load_default()?;
|
||||
config.wasm_backtrace_details(wasmtime::WasmBacktraceDetails::Enable);
|
||||
Ok(config)
|
||||
}
|
||||
|
||||
fn default_wasi() -> wasmtime_wasi::WasiCtx {
|
||||
wasmtime_wasi::sync::WasiCtxBuilder::new()
|
||||
.inherit_stdio()
|
||||
.build()
|
||||
}
|
||||
|
||||
struct Context<I, E> {
|
||||
wasi: wasmtime_wasi::WasiCtx,
|
||||
imports: I,
|
||||
exports: E,
|
||||
}
|
||||
|
||||
fn instantiate<I: Default, E: Default, T>(
|
||||
wasm: &str,
|
||||
add_imports: impl FnOnce(&mut Linker<Context<I, E>>) -> Result<()>,
|
||||
mk_exports: impl FnOnce(
|
||||
&mut Store<Context<I, E>>,
|
||||
&Module,
|
||||
&mut Linker<Context<I, E>>,
|
||||
) -> Result<(T, Instance)>,
|
||||
) -> Result<(T, Store<Context<I, E>>)> {
|
||||
let engine = Engine::new(&default_config()?)?;
|
||||
let module = Module::from_file(&engine, wasm)?;
|
||||
|
||||
let mut linker = Linker::new(&engine);
|
||||
add_imports(&mut linker)?;
|
||||
wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?;
|
||||
|
||||
let mut store = Store::new(
|
||||
&engine,
|
||||
Context {
|
||||
wasi: default_wasi(),
|
||||
imports: I::default(),
|
||||
exports: E::default(),
|
||||
},
|
||||
);
|
||||
let (exports, _instance) = mk_exports(&mut store, &module, &mut linker)?;
|
||||
Ok((exports, store))
|
||||
}
|
||||
|
||||
// TODO: This function needs to be updated to use the component model once it's ready. See
|
||||
// https://github.com/bytecodealliance/wit-bindgen/issues/259 for details.
|
||||
//
|
||||
// Also, rename the ignore_host.rs files under the tests/runtime/smw_{functions|lists|strings} to host.rs and
|
||||
// remove the leading underscore from this function's name to re-enable the Spidermonkey tests.
|
||||
fn _instantiate_smw<I: Default, E: Default, T>(
|
||||
wasm: &str,
|
||||
add_imports: impl FnOnce(&mut Linker<Context<I, E>>) -> Result<()>,
|
||||
mk_exports: impl FnOnce(
|
||||
&mut Store<Context<I, E>>,
|
||||
&Module,
|
||||
&mut Linker<Context<I, E>>,
|
||||
) -> Result<(T, Instance)>,
|
||||
) -> Result<(T, Store<Context<I, E>>)> {
|
||||
let mut config = default_config()?;
|
||||
config.wasm_multi_memory(true);
|
||||
let engine = Engine::new(&config)?;
|
||||
|
||||
println!("reading wasms...");
|
||||
let wasm = std::fs::read(wasm).context(format!("failed to read {}", wasm))?;
|
||||
let smw = std::fs::read("../gen-spidermonkey/spidermonkey-wasm/spidermonkey.wasm")
|
||||
.context("failed to read `spidermonkey.wasm`")?;
|
||||
println!("compiling input wasm...");
|
||||
let module = Module::new(&engine, &wasm)?;
|
||||
println!("compiling spidermonkey.wasm...");
|
||||
let smw = Module::new(&engine, &smw)?;
|
||||
|
||||
let mut linker = Linker::new(&engine);
|
||||
add_imports(&mut linker)?;
|
||||
wasmtime_wasi::add_to_linker(&mut linker, |cx| &mut cx.wasi)?;
|
||||
|
||||
let mut store = Store::new(
|
||||
&engine,
|
||||
Context {
|
||||
wasi: default_wasi(),
|
||||
imports: I::default(),
|
||||
exports: E::default(),
|
||||
},
|
||||
);
|
||||
|
||||
println!("instantiating spidermonkey.wasm...");
|
||||
let _smw_instance = linker
|
||||
.instantiate(&mut store, &smw)
|
||||
.context("failed to instantiate `spidermonkey.wasm`")?;
|
||||
// TODO: replace this with a component model equivalent:
|
||||
// linker.define_name("spidermonkey", smw_instance)?;
|
||||
|
||||
println!("instantiating input wasm...");
|
||||
let (exports, instance) = mk_exports(&mut store, &module, &mut linker)?;
|
||||
|
||||
println!("running wizer.initialize");
|
||||
let init = instance.get_typed_func::<(), (), _>(&mut store, "wizer.initialize")?;
|
||||
init.call(&mut store, ())
|
||||
.context("failed to call wizer.initialize")?;
|
||||
Ok((exports, store))
|
||||
}
|
||||
2
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/.gitignore
vendored
Normal file
2
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
target
|
||||
Cargo.lock
|
||||
@@ -0,0 +1,22 @@
|
||||
[package]
|
||||
name = "wit-parser"
|
||||
version = "0.1.0"
|
||||
authors = ["Alex Crichton <alex@alexcrichton.com>"]
|
||||
edition = "2018"
|
||||
|
||||
[dependencies]
|
||||
id-arena = "2"
|
||||
anyhow = "1.0"
|
||||
pulldown-cmark = { version = "0.8", default-features = false }
|
||||
wast = { version = "33", default-features = false, optional = true }
|
||||
unicode-xid = "0.2.2"
|
||||
unicode-normalization = "0.1.19"
|
||||
|
||||
[dev-dependencies]
|
||||
rayon = "1"
|
||||
serde_json = "1"
|
||||
serde = { version = "1", features = ['derive'] }
|
||||
|
||||
[[test]]
|
||||
name = "all"
|
||||
harness = false
|
||||
2251
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/src/abi.rs
Normal file
2251
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/src/abi.rs
Normal file
File diff suppressed because it is too large
Load Diff
711
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/src/ast.rs
Normal file
711
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/src/ast.rs
Normal file
@@ -0,0 +1,711 @@
|
||||
use anyhow::Result;
|
||||
use lex::{Span, Token, Tokenizer};
|
||||
use std::borrow::Cow;
|
||||
use std::collections::HashMap;
|
||||
use std::convert::TryFrom;
|
||||
use std::fmt;
|
||||
|
||||
mod lex;
|
||||
mod resolve;
|
||||
|
||||
pub use lex::validate_id;
|
||||
|
||||
pub struct Ast<'a> {
|
||||
pub items: Vec<Item<'a>>,
|
||||
}
|
||||
|
||||
pub enum Item<'a> {
|
||||
Use(Use<'a>),
|
||||
Resource(Resource<'a>),
|
||||
TypeDef(TypeDef<'a>),
|
||||
Value(Value<'a>),
|
||||
Interface(Interface<'a>),
|
||||
}
|
||||
|
||||
pub struct Id<'a> {
|
||||
pub name: Cow<'a, str>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl<'a> From<&'a str> for Id<'a> {
|
||||
fn from(s: &'a str) -> Id<'a> {
|
||||
Id {
|
||||
name: s.into(),
|
||||
span: Span { start: 0, end: 0 },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> From<String> for Id<'a> {
|
||||
fn from(s: String) -> Id<'a> {
|
||||
Id {
|
||||
name: s.into(),
|
||||
span: Span { start: 0, end: 0 },
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct Use<'a> {
|
||||
pub from: Vec<Id<'a>>,
|
||||
names: Option<Vec<UseName<'a>>>,
|
||||
}
|
||||
|
||||
struct UseName<'a> {
|
||||
name: Id<'a>,
|
||||
as_: Option<Id<'a>>,
|
||||
}
|
||||
|
||||
pub struct Resource<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
supertype: Option<Id<'a>>,
|
||||
values: Vec<(bool, Value<'a>)>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct Docs<'a> {
|
||||
docs: Vec<Cow<'a, str>>,
|
||||
}
|
||||
|
||||
pub struct TypeDef<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
ty: Type<'a>,
|
||||
}
|
||||
|
||||
enum Type<'a> {
|
||||
Unit,
|
||||
Bool,
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
S8,
|
||||
S16,
|
||||
S32,
|
||||
S64,
|
||||
Float32,
|
||||
Float64,
|
||||
Char,
|
||||
String,
|
||||
Handle(Id<'a>),
|
||||
Name(Id<'a>),
|
||||
List(Box<Type<'a>>),
|
||||
Record(Record<'a>),
|
||||
Flags(Flags<'a>),
|
||||
Variant(Variant<'a>),
|
||||
Tuple(Vec<Type<'a>>),
|
||||
Enum(Enum<'a>),
|
||||
Option(Box<Type<'a>>),
|
||||
Expected(Expected<'a>),
|
||||
Future(Box<Type<'a>>),
|
||||
Stream(Stream<'a>),
|
||||
Union(Union<'a>),
|
||||
}
|
||||
|
||||
struct Record<'a> {
|
||||
fields: Vec<Field<'a>>,
|
||||
}
|
||||
|
||||
struct Field<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
ty: Type<'a>,
|
||||
}
|
||||
|
||||
struct Flags<'a> {
|
||||
flags: Vec<Flag<'a>>,
|
||||
}
|
||||
|
||||
struct Flag<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
}
|
||||
|
||||
struct Variant<'a> {
|
||||
span: Span,
|
||||
cases: Vec<Case<'a>>,
|
||||
}
|
||||
|
||||
struct Case<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
ty: Option<Type<'a>>,
|
||||
}
|
||||
|
||||
struct Enum<'a> {
|
||||
span: Span,
|
||||
cases: Vec<EnumCase<'a>>,
|
||||
}
|
||||
|
||||
struct EnumCase<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
}
|
||||
|
||||
struct Expected<'a> {
|
||||
ok: Box<Type<'a>>,
|
||||
err: Box<Type<'a>>,
|
||||
}
|
||||
|
||||
struct Stream<'a> {
|
||||
element: Box<Type<'a>>,
|
||||
end: Box<Type<'a>>,
|
||||
}
|
||||
|
||||
pub struct Value<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
kind: ValueKind<'a>,
|
||||
}
|
||||
|
||||
struct Union<'a> {
|
||||
span: Span,
|
||||
cases: Vec<UnionCase<'a>>,
|
||||
}
|
||||
|
||||
struct UnionCase<'a> {
|
||||
docs: Docs<'a>,
|
||||
ty: Type<'a>,
|
||||
}
|
||||
|
||||
enum ValueKind<'a> {
|
||||
Function {
|
||||
is_async: bool,
|
||||
params: Vec<(Id<'a>, Type<'a>)>,
|
||||
result: Type<'a>,
|
||||
},
|
||||
Global(Type<'a>),
|
||||
}
|
||||
|
||||
#[allow(dead_code)] // TODO
|
||||
pub struct Interface<'a> {
|
||||
docs: Docs<'a>,
|
||||
name: Id<'a>,
|
||||
items: Vec<Item<'a>>,
|
||||
}
|
||||
|
||||
impl<'a> Ast<'a> {
|
||||
pub fn parse(input: &'a str) -> Result<Ast<'a>> {
|
||||
let mut lexer = Tokenizer::new(input)?;
|
||||
let mut items = Vec::new();
|
||||
while lexer.clone().next()?.is_some() {
|
||||
let docs = parse_docs(&mut lexer)?;
|
||||
items.push(Item::parse(&mut lexer, docs)?);
|
||||
}
|
||||
Ok(Ast { items })
|
||||
}
|
||||
|
||||
pub fn resolve(
|
||||
&self,
|
||||
name: &str,
|
||||
map: &HashMap<String, crate::Interface>,
|
||||
) -> Result<crate::Interface> {
|
||||
let mut resolver = resolve::Resolver::default();
|
||||
let instance = resolver.resolve(name, &self.items, map)?;
|
||||
Ok(instance)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Item<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Item<'a>> {
|
||||
match tokens.clone().next()? {
|
||||
Some((_span, Token::Use)) => Use::parse(tokens, docs).map(Item::Use),
|
||||
Some((_span, Token::Type)) => TypeDef::parse(tokens, docs).map(Item::TypeDef),
|
||||
Some((_span, Token::Flags)) => TypeDef::parse_flags(tokens, docs).map(Item::TypeDef),
|
||||
Some((_span, Token::Enum)) => TypeDef::parse_enum(tokens, docs).map(Item::TypeDef),
|
||||
Some((_span, Token::Variant)) => {
|
||||
TypeDef::parse_variant(tokens, docs).map(Item::TypeDef)
|
||||
}
|
||||
Some((_span, Token::Record)) => TypeDef::parse_record(tokens, docs).map(Item::TypeDef),
|
||||
Some((_span, Token::Union)) => TypeDef::parse_union(tokens, docs).map(Item::TypeDef),
|
||||
Some((_span, Token::Resource)) => Resource::parse(tokens, docs).map(Item::Resource),
|
||||
Some((_span, Token::Interface)) => Interface::parse(tokens, docs).map(Item::Interface),
|
||||
Some((_span, Token::Id)) | Some((_span, Token::ExplicitId)) => {
|
||||
Value::parse(tokens, docs).map(Item::Value)
|
||||
}
|
||||
other => Err(err_expected(tokens, "`type`, `resource`, or `func`", other).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Use<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>, _docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Use)?;
|
||||
let mut names = None;
|
||||
loop {
|
||||
if names.is_none() {
|
||||
if tokens.eat(Token::Star)? {
|
||||
break;
|
||||
}
|
||||
tokens.expect(Token::LeftBrace)?;
|
||||
names = Some(Vec::new());
|
||||
}
|
||||
let names = names.as_mut().unwrap();
|
||||
let mut name = UseName {
|
||||
name: parse_id(tokens)?,
|
||||
as_: None,
|
||||
};
|
||||
if tokens.eat(Token::As)? {
|
||||
name.as_ = Some(parse_id(tokens)?);
|
||||
}
|
||||
names.push(name);
|
||||
if !tokens.eat(Token::Comma)? {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if names.is_some() {
|
||||
tokens.expect(Token::RightBrace)?;
|
||||
}
|
||||
tokens.expect(Token::From_)?;
|
||||
let mut from = vec![parse_id(tokens)?];
|
||||
while tokens.eat(Token::Colon)? {
|
||||
tokens.expect_raw(Token::Colon)?;
|
||||
from.push(parse_id(tokens)?);
|
||||
}
|
||||
Ok(Use { from, names })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> TypeDef<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Type)?;
|
||||
let name = parse_id(tokens)?;
|
||||
tokens.expect(Token::Equals)?;
|
||||
let ty = Type::parse(tokens)?;
|
||||
Ok(TypeDef { docs, name, ty })
|
||||
}
|
||||
|
||||
fn parse_flags(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Flags)?;
|
||||
let name = parse_id(tokens)?;
|
||||
let ty = Type::Flags(Flags {
|
||||
flags: parse_list(
|
||||
tokens,
|
||||
Token::LeftBrace,
|
||||
Token::RightBrace,
|
||||
|docs, tokens| {
|
||||
let name = parse_id(tokens)?;
|
||||
Ok(Flag { docs, name })
|
||||
},
|
||||
)?,
|
||||
});
|
||||
Ok(TypeDef { docs, name, ty })
|
||||
}
|
||||
|
||||
fn parse_record(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Record)?;
|
||||
let name = parse_id(tokens)?;
|
||||
let ty = Type::Record(Record {
|
||||
fields: parse_list(
|
||||
tokens,
|
||||
Token::LeftBrace,
|
||||
Token::RightBrace,
|
||||
|docs, tokens| {
|
||||
let name = parse_id(tokens)?;
|
||||
tokens.expect(Token::Colon)?;
|
||||
let ty = Type::parse(tokens)?;
|
||||
Ok(Field { docs, name, ty })
|
||||
},
|
||||
)?,
|
||||
});
|
||||
Ok(TypeDef { docs, name, ty })
|
||||
}
|
||||
|
||||
fn parse_variant(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Variant)?;
|
||||
let name = parse_id(tokens)?;
|
||||
let ty = Type::Variant(Variant {
|
||||
span: name.span,
|
||||
cases: parse_list(
|
||||
tokens,
|
||||
Token::LeftBrace,
|
||||
Token::RightBrace,
|
||||
|docs, tokens| {
|
||||
let name = parse_id(tokens)?;
|
||||
let ty = if tokens.eat(Token::LeftParen)? {
|
||||
let ty = Type::parse(tokens)?;
|
||||
tokens.expect(Token::RightParen)?;
|
||||
Some(ty)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
Ok(Case { docs, name, ty })
|
||||
},
|
||||
)?,
|
||||
});
|
||||
Ok(TypeDef { docs, name, ty })
|
||||
}
|
||||
|
||||
fn parse_union(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Union)?;
|
||||
let name = parse_id(tokens)?;
|
||||
let ty = Type::Union(Union {
|
||||
span: name.span,
|
||||
cases: parse_list(
|
||||
tokens,
|
||||
Token::LeftBrace,
|
||||
Token::RightBrace,
|
||||
|docs, tokens| {
|
||||
let ty = Type::parse(tokens)?;
|
||||
Ok(UnionCase { docs, ty })
|
||||
},
|
||||
)?,
|
||||
});
|
||||
Ok(TypeDef { docs, name, ty })
|
||||
}
|
||||
|
||||
fn parse_enum(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Enum)?;
|
||||
let name = parse_id(tokens)?;
|
||||
let ty = Type::Enum(Enum {
|
||||
span: name.span,
|
||||
cases: parse_list(
|
||||
tokens,
|
||||
Token::LeftBrace,
|
||||
Token::RightBrace,
|
||||
|docs, tokens| {
|
||||
let name = parse_id(tokens)?;
|
||||
Ok(EnumCase { docs, name })
|
||||
},
|
||||
)?,
|
||||
});
|
||||
Ok(TypeDef { docs, name, ty })
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Resource<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Resource)?;
|
||||
let name = parse_id(tokens)?;
|
||||
let supertype = if tokens.eat(Token::Implements)? {
|
||||
Some(parse_id(tokens)?)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
let mut values = Vec::new();
|
||||
if tokens.eat(Token::LeftBrace)? {
|
||||
loop {
|
||||
let docs = parse_docs(tokens)?;
|
||||
if tokens.eat(Token::RightBrace)? {
|
||||
break;
|
||||
}
|
||||
let statik = tokens.eat(Token::Static)?;
|
||||
values.push((statik, Value::parse(tokens, docs)?));
|
||||
}
|
||||
}
|
||||
Ok(Resource {
|
||||
docs,
|
||||
name,
|
||||
supertype,
|
||||
values,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Value<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
let name = parse_id(tokens)?;
|
||||
tokens.expect(Token::Colon)?;
|
||||
|
||||
let kind = if tokens.eat(Token::Func)? {
|
||||
parse_func(tokens, false)?
|
||||
} else if tokens.eat(Token::Async)? {
|
||||
tokens.expect(Token::Func)?;
|
||||
parse_func(tokens, true)?
|
||||
} else {
|
||||
ValueKind::Global(Type::parse(tokens)?)
|
||||
};
|
||||
return Ok(Value { docs, name, kind });
|
||||
|
||||
fn parse_func<'a>(tokens: &mut Tokenizer<'a>, is_async: bool) -> Result<ValueKind<'a>> {
|
||||
let params = parse_list(
|
||||
tokens,
|
||||
Token::LeftParen,
|
||||
Token::RightParen,
|
||||
|_docs, tokens| {
|
||||
let name = parse_id(tokens)?;
|
||||
tokens.expect(Token::Colon)?;
|
||||
let ty = Type::parse(tokens)?;
|
||||
Ok((name, ty))
|
||||
},
|
||||
)?;
|
||||
let result = if tokens.eat(Token::RArrow)? {
|
||||
Type::parse(tokens)?
|
||||
} else {
|
||||
Type::Unit
|
||||
};
|
||||
Ok(ValueKind::Function {
|
||||
is_async,
|
||||
params,
|
||||
result,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_id<'a>(tokens: &mut Tokenizer<'a>) -> Result<Id<'a>> {
|
||||
match tokens.next()? {
|
||||
Some((span, Token::Id)) => Ok(Id {
|
||||
name: tokens.parse_id(span)?.into(),
|
||||
span,
|
||||
}),
|
||||
Some((span, Token::ExplicitId)) => Ok(Id {
|
||||
name: tokens.parse_explicit_id(span)?.into(),
|
||||
span,
|
||||
}),
|
||||
other => Err(err_expected(tokens, "an identifier or string", other).into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_docs<'a>(tokens: &mut Tokenizer<'a>) -> Result<Docs<'a>> {
|
||||
let mut docs = Docs::default();
|
||||
let mut clone = tokens.clone();
|
||||
while let Some((span, token)) = clone.next_raw()? {
|
||||
match token {
|
||||
Token::Whitespace => {}
|
||||
Token::Comment => docs.docs.push(tokens.get_span(span).into()),
|
||||
_ => break,
|
||||
};
|
||||
*tokens = clone.clone();
|
||||
}
|
||||
Ok(docs)
|
||||
}
|
||||
|
||||
impl<'a> Type<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>) -> Result<Self> {
|
||||
match tokens.next()? {
|
||||
Some((_span, Token::U8)) => Ok(Type::U8),
|
||||
Some((_span, Token::U16)) => Ok(Type::U16),
|
||||
Some((_span, Token::U32)) => Ok(Type::U32),
|
||||
Some((_span, Token::U64)) => Ok(Type::U64),
|
||||
Some((_span, Token::S8)) => Ok(Type::S8),
|
||||
Some((_span, Token::S16)) => Ok(Type::S16),
|
||||
Some((_span, Token::S32)) => Ok(Type::S32),
|
||||
Some((_span, Token::S64)) => Ok(Type::S64),
|
||||
Some((_span, Token::Float32)) => Ok(Type::Float32),
|
||||
Some((_span, Token::Float64)) => Ok(Type::Float64),
|
||||
Some((_span, Token::Char)) => Ok(Type::Char),
|
||||
Some((_span, Token::Handle)) => {
|
||||
let name = parse_id(tokens)?;
|
||||
Ok(Type::Handle(name))
|
||||
}
|
||||
|
||||
// tuple<T, U, ...>
|
||||
Some((_span, Token::Tuple)) => {
|
||||
let types = parse_list(
|
||||
tokens,
|
||||
Token::LessThan,
|
||||
Token::GreaterThan,
|
||||
|_docs, tokens| Type::parse(tokens),
|
||||
)?;
|
||||
Ok(Type::Tuple(types))
|
||||
}
|
||||
|
||||
Some((_span, Token::Unit)) => Ok(Type::Unit),
|
||||
Some((_span, Token::Bool)) => Ok(Type::Bool),
|
||||
Some((_span, Token::String_)) => Ok(Type::String),
|
||||
|
||||
// list<T>
|
||||
Some((_span, Token::List)) => {
|
||||
tokens.expect(Token::LessThan)?;
|
||||
let ty = Type::parse(tokens)?;
|
||||
tokens.expect(Token::GreaterThan)?;
|
||||
Ok(Type::List(Box::new(ty)))
|
||||
}
|
||||
|
||||
// option<T>
|
||||
Some((_span, Token::Option_)) => {
|
||||
tokens.expect(Token::LessThan)?;
|
||||
let ty = Type::parse(tokens)?;
|
||||
tokens.expect(Token::GreaterThan)?;
|
||||
Ok(Type::Option(Box::new(ty)))
|
||||
}
|
||||
|
||||
// expected<T, E>
|
||||
Some((_span, Token::Expected)) => {
|
||||
tokens.expect(Token::LessThan)?;
|
||||
let ok = Box::new(Type::parse(tokens)?);
|
||||
tokens.expect(Token::Comma)?;
|
||||
let err = Box::new(Type::parse(tokens)?);
|
||||
tokens.expect(Token::GreaterThan)?;
|
||||
Ok(Type::Expected(Expected { ok, err }))
|
||||
}
|
||||
|
||||
// future<T>
|
||||
Some((_span, Token::Future)) => {
|
||||
tokens.expect(Token::LessThan)?;
|
||||
let ty = Box::new(Type::parse(tokens)?);
|
||||
tokens.expect(Token::GreaterThan)?;
|
||||
Ok(Type::Future(ty))
|
||||
}
|
||||
|
||||
// stream<T, Z>
|
||||
Some((_span, Token::Stream)) => {
|
||||
tokens.expect(Token::LessThan)?;
|
||||
let element = Box::new(Type::parse(tokens)?);
|
||||
tokens.expect(Token::Comma)?;
|
||||
let end = Box::new(Type::parse(tokens)?);
|
||||
tokens.expect(Token::GreaterThan)?;
|
||||
Ok(Type::Stream(Stream { element, end }))
|
||||
}
|
||||
|
||||
// `foo`
|
||||
Some((span, Token::Id)) => Ok(Type::Name(Id {
|
||||
name: tokens.parse_id(span)?.into(),
|
||||
span,
|
||||
})),
|
||||
// `@foo`
|
||||
Some((span, Token::ExplicitId)) => Ok(Type::Name(Id {
|
||||
name: tokens.parse_explicit_id(span)?.into(),
|
||||
span,
|
||||
})),
|
||||
|
||||
other => Err(err_expected(tokens, "a type", other).into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Interface<'a> {
|
||||
fn parse(tokens: &mut Tokenizer<'a>, docs: Docs<'a>) -> Result<Self> {
|
||||
tokens.expect(Token::Interface)?;
|
||||
let name = parse_id(tokens)?;
|
||||
tokens.expect(Token::LeftBrace)?;
|
||||
let mut items = Vec::new();
|
||||
loop {
|
||||
let docs = parse_docs(tokens)?;
|
||||
if tokens.eat(Token::RightBrace)? {
|
||||
break;
|
||||
}
|
||||
items.push(Item::parse(tokens, docs)?);
|
||||
}
|
||||
Ok(Interface { docs, name, items })
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_list<'a, T>(
|
||||
tokens: &mut Tokenizer<'a>,
|
||||
start: Token,
|
||||
end: Token,
|
||||
mut parse: impl FnMut(Docs<'a>, &mut Tokenizer<'a>) -> Result<T>,
|
||||
) -> Result<Vec<T>> {
|
||||
tokens.expect(start)?;
|
||||
let mut items = Vec::new();
|
||||
loop {
|
||||
// get docs before we skip them to try to eat the end token
|
||||
let docs = parse_docs(tokens)?;
|
||||
|
||||
// if we found an end token then we're done
|
||||
if tokens.eat(end)? {
|
||||
break;
|
||||
}
|
||||
|
||||
let item = parse(docs, tokens)?;
|
||||
items.push(item);
|
||||
|
||||
// if there's no trailing comma then this is required to be the end,
|
||||
// otherwise we go through the loop to try to get another item
|
||||
if !tokens.eat(Token::Comma)? {
|
||||
tokens.expect(end)?;
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(items)
|
||||
}
|
||||
|
||||
fn err_expected(
|
||||
tokens: &Tokenizer<'_>,
|
||||
expected: &'static str,
|
||||
found: Option<(Span, Token)>,
|
||||
) -> Error {
|
||||
match found {
|
||||
Some((span, token)) => Error {
|
||||
span,
|
||||
msg: format!("expected {}, found {}", expected, token.describe()),
|
||||
},
|
||||
None => Error {
|
||||
span: Span {
|
||||
start: u32::try_from(tokens.input().len()).unwrap(),
|
||||
end: u32::try_from(tokens.input().len()).unwrap(),
|
||||
},
|
||||
msg: format!("expected {}, found eof", expected),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
struct Error {
|
||||
span: Span,
|
||||
msg: String,
|
||||
}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.msg.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
pub fn rewrite_error(err: &mut anyhow::Error, file: &str, contents: &str) {
|
||||
let parse = match err.downcast_mut::<Error>() {
|
||||
Some(err) => err,
|
||||
None => return lex::rewrite_error(err, file, contents),
|
||||
};
|
||||
let msg = highlight_err(
|
||||
parse.span.start as usize,
|
||||
Some(parse.span.end as usize),
|
||||
file,
|
||||
contents,
|
||||
&parse.msg,
|
||||
);
|
||||
*err = anyhow::anyhow!("{}", msg);
|
||||
}
|
||||
|
||||
fn highlight_err(
|
||||
start: usize,
|
||||
end: Option<usize>,
|
||||
file: &str,
|
||||
input: &str,
|
||||
err: impl fmt::Display,
|
||||
) -> String {
|
||||
let (line, col) = linecol_in(start, input);
|
||||
let snippet = input.lines().nth(line).unwrap_or("");
|
||||
let mut msg = format!(
|
||||
"\
|
||||
{err}
|
||||
--> {file}:{line}:{col}
|
||||
|
|
||||
{line:4} | {snippet}
|
||||
| {marker:>0$}",
|
||||
col + 1,
|
||||
file = file,
|
||||
line = line + 1,
|
||||
col = col + 1,
|
||||
err = err,
|
||||
snippet = snippet,
|
||||
marker = "^",
|
||||
);
|
||||
if let Some(end) = end {
|
||||
if let Some(s) = input.get(start..end) {
|
||||
for _ in s.chars().skip(1) {
|
||||
msg.push('-');
|
||||
}
|
||||
}
|
||||
}
|
||||
return msg;
|
||||
|
||||
fn linecol_in(pos: usize, text: &str) -> (usize, usize) {
|
||||
let mut cur = 0;
|
||||
// Use split_terminator instead of lines so that if there is a `\r`,
|
||||
// it is included in the offset calculation. The `+1` values below
|
||||
// account for the `\n`.
|
||||
for (i, line) in text.split_terminator('\n').enumerate() {
|
||||
if cur + line.len() + 1 > pos {
|
||||
return (i, pos - cur);
|
||||
}
|
||||
cur += line.len() + 1;
|
||||
}
|
||||
(text.lines().count(), 0)
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,709 @@
|
||||
use anyhow::{bail, Result};
|
||||
use std::char;
|
||||
use std::convert::TryFrom;
|
||||
use std::fmt;
|
||||
use std::str;
|
||||
use unicode_normalization::char::canonical_combining_class;
|
||||
use unicode_xid::UnicodeXID;
|
||||
|
||||
use self::Token::*;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Tokenizer<'a> {
|
||||
input: &'a str,
|
||||
chars: CrlfFold<'a>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct CrlfFold<'a> {
|
||||
chars: str::CharIndices<'a>,
|
||||
}
|
||||
|
||||
/// A span, designating a range of bytes where a token is located.
|
||||
#[derive(Eq, PartialEq, Debug, Clone, Copy)]
|
||||
pub struct Span {
|
||||
/// The start of the range.
|
||||
pub start: u32,
|
||||
/// The end of the range (exclusive).
|
||||
pub end: u32,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
|
||||
pub enum Token {
|
||||
Whitespace,
|
||||
Comment,
|
||||
|
||||
Equals,
|
||||
Comma,
|
||||
Colon,
|
||||
Semicolon,
|
||||
LeftParen,
|
||||
RightParen,
|
||||
LeftBrace,
|
||||
RightBrace,
|
||||
LessThan,
|
||||
GreaterThan,
|
||||
RArrow,
|
||||
Star,
|
||||
|
||||
Use,
|
||||
Type,
|
||||
Resource,
|
||||
Func,
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
S8,
|
||||
S16,
|
||||
S32,
|
||||
S64,
|
||||
Float32,
|
||||
Float64,
|
||||
Char,
|
||||
Handle,
|
||||
Record,
|
||||
Flags,
|
||||
Variant,
|
||||
Enum,
|
||||
Union,
|
||||
Bool,
|
||||
String_,
|
||||
Option_,
|
||||
Expected,
|
||||
Future,
|
||||
Stream,
|
||||
List,
|
||||
Underscore,
|
||||
As,
|
||||
From_,
|
||||
Static,
|
||||
Interface,
|
||||
Tuple,
|
||||
Async,
|
||||
Unit,
|
||||
Implements,
|
||||
|
||||
Id,
|
||||
ExplicitId,
|
||||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Debug)]
|
||||
#[allow(dead_code)]
|
||||
pub enum Error {
|
||||
InvalidCharInString(usize, char),
|
||||
InvalidCharInId(usize, char),
|
||||
IdNotSSNFC(usize),
|
||||
IdPartEmpty(usize),
|
||||
InvalidEscape(usize, char),
|
||||
// InvalidHexEscape(usize, char),
|
||||
// InvalidEscapeValue(usize, u32),
|
||||
Unexpected(usize, char),
|
||||
UnterminatedComment(usize),
|
||||
UnterminatedString(usize),
|
||||
NewlineInString(usize),
|
||||
Wanted {
|
||||
at: usize,
|
||||
expected: &'static str,
|
||||
found: &'static str,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a> Tokenizer<'a> {
|
||||
pub fn new(input: &'a str) -> Result<Tokenizer<'a>> {
|
||||
detect_invalid_input(input)?;
|
||||
|
||||
let mut t = Tokenizer {
|
||||
input,
|
||||
chars: CrlfFold {
|
||||
chars: input.char_indices(),
|
||||
},
|
||||
};
|
||||
// Eat utf-8 BOM
|
||||
t.eatc('\u{feff}');
|
||||
Ok(t)
|
||||
}
|
||||
|
||||
pub fn input(&self) -> &'a str {
|
||||
self.input
|
||||
}
|
||||
|
||||
pub fn get_span(&self, span: Span) -> &'a str {
|
||||
&self.input[span.start as usize..span.end as usize]
|
||||
}
|
||||
|
||||
pub fn parse_id(&self, span: Span) -> Result<String> {
|
||||
let ret = self.get_span(span).to_owned();
|
||||
validate_id(span.start as usize, &ret)?;
|
||||
Ok(ret)
|
||||
}
|
||||
|
||||
pub fn parse_explicit_id(&self, span: Span) -> Result<String> {
|
||||
let token = self.get_span(span);
|
||||
let id_part = token.strip_prefix('%').unwrap();
|
||||
validate_id(span.start as usize, id_part)?;
|
||||
Ok(id_part.to_owned())
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> Result<Option<(Span, Token)>, Error> {
|
||||
loop {
|
||||
match self.next_raw()? {
|
||||
Some((_, Token::Whitespace)) | Some((_, Token::Comment)) => {}
|
||||
other => break Ok(other),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn next_raw(&mut self) -> Result<Option<(Span, Token)>, Error> {
|
||||
let (start, ch) = match self.chars.next() {
|
||||
Some(pair) => pair,
|
||||
None => return Ok(None),
|
||||
};
|
||||
let token = match ch {
|
||||
'\n' | '\t' | ' ' => {
|
||||
// Eat all contiguous whitespace tokens
|
||||
while self.eatc(' ') || self.eatc('\t') || self.eatc('\n') {}
|
||||
Whitespace
|
||||
}
|
||||
'/' => {
|
||||
// Eat a line comment if it's `//...`
|
||||
if self.eatc('/') {
|
||||
for (_, ch) in &mut self.chars {
|
||||
if ch == '\n' {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// eat a block comment if it's `/*...`
|
||||
} else if self.eatc('*') {
|
||||
let mut depth = 1;
|
||||
while depth > 0 {
|
||||
let (_, ch) = match self.chars.next() {
|
||||
Some(pair) => pair,
|
||||
None => return Err(Error::UnterminatedComment(start)),
|
||||
};
|
||||
match ch {
|
||||
'/' if self.eatc('*') => depth += 1,
|
||||
'*' if self.eatc('/') => depth -= 1,
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return Err(Error::Unexpected(start, ch));
|
||||
}
|
||||
|
||||
Comment
|
||||
}
|
||||
'=' => Equals,
|
||||
',' => Comma,
|
||||
':' => Colon,
|
||||
';' => Semicolon,
|
||||
'(' => LeftParen,
|
||||
')' => RightParen,
|
||||
'{' => LeftBrace,
|
||||
'}' => RightBrace,
|
||||
'<' => LessThan,
|
||||
'>' => GreaterThan,
|
||||
'*' => Star,
|
||||
'-' => {
|
||||
if self.eatc('>') {
|
||||
RArrow
|
||||
} else {
|
||||
return Err(Error::Unexpected(start, '-'));
|
||||
}
|
||||
}
|
||||
'%' => {
|
||||
let mut iter = self.chars.clone();
|
||||
if let Some((_, ch)) = iter.next() {
|
||||
if is_keylike_start(ch) {
|
||||
self.chars = iter.clone();
|
||||
while let Some((_, ch)) = iter.next() {
|
||||
if !is_keylike_continue(ch) {
|
||||
break;
|
||||
}
|
||||
self.chars = iter.clone();
|
||||
}
|
||||
}
|
||||
}
|
||||
ExplicitId
|
||||
}
|
||||
ch if is_keylike_start(ch) => {
|
||||
let remaining = self.chars.chars.as_str().len();
|
||||
let mut iter = self.chars.clone();
|
||||
while let Some((_, ch)) = iter.next() {
|
||||
if !is_keylike_continue(ch) {
|
||||
break;
|
||||
}
|
||||
self.chars = iter.clone();
|
||||
}
|
||||
let end = start + ch.len_utf8() + (remaining - self.chars.chars.as_str().len());
|
||||
match &self.input[start..end] {
|
||||
"use" => Use,
|
||||
"type" => Type,
|
||||
"resource" => Resource,
|
||||
"func" => Func,
|
||||
"u8" => U8,
|
||||
"u16" => U16,
|
||||
"u32" => U32,
|
||||
"u64" => U64,
|
||||
"s8" => S8,
|
||||
"s16" => S16,
|
||||
"s32" => S32,
|
||||
"s64" => S64,
|
||||
"float32" => Float32,
|
||||
"float64" => Float64,
|
||||
"char" => Char,
|
||||
"handle" => Handle,
|
||||
"record" => Record,
|
||||
"flags" => Flags,
|
||||
"variant" => Variant,
|
||||
"enum" => Enum,
|
||||
"union" => Union,
|
||||
"bool" => Bool,
|
||||
"string" => String_,
|
||||
"option" => Option_,
|
||||
"expected" => Expected,
|
||||
"future" => Future,
|
||||
"stream" => Stream,
|
||||
"list" => List,
|
||||
"_" => Underscore,
|
||||
"as" => As,
|
||||
"from" => From_,
|
||||
"static" => Static,
|
||||
"interface" => Interface,
|
||||
"tuple" => Tuple,
|
||||
"async" => Async,
|
||||
"unit" => Unit,
|
||||
"implements" => Implements,
|
||||
_ => Id,
|
||||
}
|
||||
}
|
||||
ch => return Err(Error::Unexpected(start, ch)),
|
||||
};
|
||||
let end = match self.chars.clone().next() {
|
||||
Some((i, _)) => i,
|
||||
None => self.input.len(),
|
||||
};
|
||||
|
||||
let start = u32::try_from(start).unwrap();
|
||||
let end = u32::try_from(end).unwrap();
|
||||
Ok(Some((Span { start, end }, token)))
|
||||
}
|
||||
|
||||
pub fn eat(&mut self, expected: Token) -> Result<bool, Error> {
|
||||
let mut other = self.clone();
|
||||
match other.next()? {
|
||||
Some((_span, found)) if expected == found => {
|
||||
*self = other;
|
||||
Ok(true)
|
||||
}
|
||||
Some(_) => Ok(false),
|
||||
None => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect(&mut self, expected: Token) -> Result<Span, Error> {
|
||||
match self.next()? {
|
||||
Some((span, found)) => {
|
||||
if expected == found {
|
||||
Ok(span)
|
||||
} else {
|
||||
Err(Error::Wanted {
|
||||
at: usize::try_from(span.start).unwrap(),
|
||||
expected: expected.describe(),
|
||||
found: found.describe(),
|
||||
})
|
||||
}
|
||||
}
|
||||
None => Err(Error::Wanted {
|
||||
at: self.input.len(),
|
||||
expected: expected.describe(),
|
||||
found: "eof",
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn expect_raw(&mut self, expected: Token) -> Result<Span, Error> {
|
||||
match self.next_raw()? {
|
||||
Some((span, found)) => {
|
||||
if expected == found {
|
||||
Ok(span)
|
||||
} else {
|
||||
Err(Error::Wanted {
|
||||
at: usize::try_from(span.start).unwrap(),
|
||||
expected: expected.describe(),
|
||||
found: found.describe(),
|
||||
})
|
||||
}
|
||||
}
|
||||
None => Err(Error::Wanted {
|
||||
at: self.input.len(),
|
||||
expected: expected.describe(),
|
||||
found: "eof",
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn eatc(&mut self, ch: char) -> bool {
|
||||
let mut iter = self.chars.clone();
|
||||
match iter.next() {
|
||||
Some((_, ch2)) if ch == ch2 => {
|
||||
self.chars = iter;
|
||||
true
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Iterator for CrlfFold<'a> {
|
||||
type Item = (usize, char);
|
||||
|
||||
fn next(&mut self) -> Option<(usize, char)> {
|
||||
self.chars.next().map(|(i, c)| {
|
||||
if c == '\r' {
|
||||
let mut attempt = self.chars.clone();
|
||||
if let Some((_, '\n')) = attempt.next() {
|
||||
self.chars = attempt;
|
||||
return (i, '\n');
|
||||
}
|
||||
}
|
||||
(i, c)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
fn detect_invalid_input(input: &str) -> Result<()> {
|
||||
// Disallow specific codepoints.
|
||||
let mut line = 1;
|
||||
for ch in input.chars() {
|
||||
match ch {
|
||||
'\n' => line += 1,
|
||||
'\r' | '\t' => {}
|
||||
|
||||
// Bidirectional override codepoints can be used to craft source code that
|
||||
// appears to have a different meaning than its actual meaning. See
|
||||
// [CVE-2021-42574] for background and motivation.
|
||||
//
|
||||
// [CVE-2021-42574]: https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-42574
|
||||
'\u{202a}' | '\u{202b}' | '\u{202c}' | '\u{202d}' | '\u{202e}' | '\u{2066}'
|
||||
| '\u{2067}' | '\u{2068}' | '\u{2069}' => {
|
||||
bail!(
|
||||
"Input contains bidirectional override codepoint {:?} at line {}",
|
||||
ch.escape_unicode(),
|
||||
line
|
||||
);
|
||||
}
|
||||
|
||||
// Disallow several characters which are deprecated or discouraged in Unicode.
|
||||
//
|
||||
// U+149 deprecated; see Unicode 13.0.0, sec. 7.1 Latin, Compatibility Digraphs.
|
||||
// U+673 deprecated; see Unicode 13.0.0, sec. 9.2 Arabic, Additional Vowel Marks.
|
||||
// U+F77 and U+F79 deprecated; see Unicode 13.0.0, sec. 13.4 Tibetan, Vowels.
|
||||
// U+17A3 and U+17A4 deprecated, and U+17B4 and U+17B5 discouraged; see
|
||||
// Unicode 13.0.0, sec. 16.4 Khmer, Characters Whose Use Is Discouraged.
|
||||
'\u{149}' | '\u{673}' | '\u{f77}' | '\u{f79}' | '\u{17a3}' | '\u{17a4}'
|
||||
| '\u{17b4}' | '\u{17b5}' => {
|
||||
bail!(
|
||||
"Codepoint {:?} at line {} is discouraged by Unicode",
|
||||
ch.escape_unicode(),
|
||||
line
|
||||
);
|
||||
}
|
||||
|
||||
// Disallow control codes other than the ones explicitly recognized above,
|
||||
// so that viewing a wit file on a terminal doesn't have surprising side
|
||||
// effects or appear to have a different meaning than its actual meaning.
|
||||
ch if ch.is_control() => {
|
||||
bail!("Control code '{}' at line {}", ch.escape_unicode(), line);
|
||||
}
|
||||
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn is_keylike_start(ch: char) -> bool {
|
||||
// Lex any XID start, `_`, or '-'. These aren't all valid identifier chars,
|
||||
// but we'll diagnose that after we've lexed the full string.
|
||||
UnicodeXID::is_xid_start(ch) || ch == '_' || ch == '-'
|
||||
}
|
||||
|
||||
fn is_keylike_continue(ch: char) -> bool {
|
||||
// Lex any XID continue (which includes `_`) or '-'.
|
||||
UnicodeXID::is_xid_continue(ch) || ch == '-'
|
||||
}
|
||||
|
||||
pub fn validate_id(start: usize, id: &str) -> Result<(), Error> {
|
||||
// Ids must be in stream-safe NFC.
|
||||
if !unicode_normalization::is_nfc_stream_safe(&id) {
|
||||
return Err(Error::IdNotSSNFC(start));
|
||||
}
|
||||
|
||||
// IDs must have at least one part.
|
||||
if id.is_empty() {
|
||||
return Err(Error::IdPartEmpty(start));
|
||||
}
|
||||
|
||||
// Ids consist of parts separated by '-'s.
|
||||
for part in id.split("-") {
|
||||
// Parts must be non-empty and start with a non-combining XID start.
|
||||
match part.chars().next() {
|
||||
None => return Err(Error::IdPartEmpty(start)),
|
||||
Some(first) => {
|
||||
// Require the first character of each part to be non-combining,
|
||||
// so that if a source langauge uses `CamelCase`, they won't
|
||||
// combine with the last character of the previous part.
|
||||
if canonical_combining_class(first) != 0 {
|
||||
return Err(Error::InvalidCharInId(start, first));
|
||||
}
|
||||
|
||||
// Require the first character to be a XID start.
|
||||
if !UnicodeXID::is_xid_start(first) {
|
||||
return Err(Error::InvalidCharInId(start, first));
|
||||
}
|
||||
|
||||
// TODO: Disallow values with 'Grapheme_Extend = Yes', to
|
||||
// prevent them from combining with previous parts?
|
||||
|
||||
// TODO: Disallow values with 'Grapheme_Cluster_Break = SpacingMark'?
|
||||
}
|
||||
};
|
||||
|
||||
// Some XID values are not valid ID part values.
|
||||
for ch in part.chars() {
|
||||
// Disallow uppercase and underscore, so that identifiers
|
||||
// consistently use `kebab-case`, and source languages can map
|
||||
// identifiers according to their own conventions (which might use
|
||||
// `CamelCase` or `snake_case` or something else) without worrying
|
||||
// about collisions.
|
||||
if ch.is_uppercase() || ch == '_' || !UnicodeXID::is_xid_continue(ch) {
|
||||
return Err(Error::InvalidCharInId(start, ch));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
impl Token {
|
||||
pub fn describe(&self) -> &'static str {
|
||||
match self {
|
||||
Whitespace => "whitespace",
|
||||
Comment => "a comment",
|
||||
Equals => "'='",
|
||||
Comma => "','",
|
||||
Colon => "':'",
|
||||
Semicolon => "';'",
|
||||
LeftParen => "'('",
|
||||
RightParen => "')'",
|
||||
LeftBrace => "'{'",
|
||||
RightBrace => "'}'",
|
||||
LessThan => "'<'",
|
||||
GreaterThan => "'>'",
|
||||
Use => "keyword `use`",
|
||||
Type => "keyword `type`",
|
||||
Resource => "keyword `resource`",
|
||||
Func => "keyword `func`",
|
||||
U8 => "keyword `u8`",
|
||||
U16 => "keyword `u16`",
|
||||
U32 => "keyword `u32`",
|
||||
U64 => "keyword `u64`",
|
||||
S8 => "keyword `s8`",
|
||||
S16 => "keyword `s16`",
|
||||
S32 => "keyword `s32`",
|
||||
S64 => "keyword `s64`",
|
||||
Float32 => "keyword `float32`",
|
||||
Float64 => "keyword `float64`",
|
||||
Char => "keyword `char`",
|
||||
Handle => "keyword `handle`",
|
||||
Record => "keyword `record`",
|
||||
Flags => "keyword `flags`",
|
||||
Variant => "keyword `variant`",
|
||||
Enum => "keyword `enum`",
|
||||
Union => "keyword `union`",
|
||||
Bool => "keyword `bool`",
|
||||
String_ => "keyword `string`",
|
||||
Option_ => "keyword `option`",
|
||||
Expected => "keyword `expected`",
|
||||
Future => "keyword `future`",
|
||||
Stream => "keyword `stream`",
|
||||
List => "keyword `list`",
|
||||
Underscore => "keyword `_`",
|
||||
Id => "an identifier",
|
||||
ExplicitId => "an '%' identifier",
|
||||
RArrow => "`->`",
|
||||
Star => "`*`",
|
||||
As => "keyword `as`",
|
||||
From_ => "keyword `from`",
|
||||
Static => "keyword `static`",
|
||||
Interface => "keyword `interface`",
|
||||
Tuple => "keyword `tuple`",
|
||||
Async => "keyword `async`",
|
||||
Unit => "keyword `unit`",
|
||||
Implements => "keyword `implements`",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl std::error::Error for Error {}
|
||||
|
||||
impl fmt::Display for Error {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
match self {
|
||||
Error::Unexpected(_, ch) => write!(f, "unexpected character {:?}", ch),
|
||||
Error::UnterminatedComment(_) => write!(f, "unterminated block comment"),
|
||||
Error::Wanted {
|
||||
expected, found, ..
|
||||
} => write!(f, "expected {}, found {}", expected, found),
|
||||
Error::UnterminatedString(_) => write!(f, "unterminated string literal"),
|
||||
Error::NewlineInString(_) => write!(f, "newline in string literal"),
|
||||
Error::InvalidCharInString(_, ch) => write!(f, "invalid character in string {:?}", ch),
|
||||
Error::InvalidCharInId(_, ch) => write!(f, "invalid character in identifier {:?}", ch),
|
||||
Error::IdPartEmpty(_) => write!(f, "identifiers must have characters between '-'s"),
|
||||
Error::IdNotSSNFC(_) => write!(f, "identifiers must be in stream-safe NFC"),
|
||||
Error::InvalidEscape(_, ch) => write!(f, "invalid escape in string {:?}", ch),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn rewrite_error(err: &mut anyhow::Error, file: &str, contents: &str) {
|
||||
let lex = match err.downcast_mut::<Error>() {
|
||||
Some(err) => err,
|
||||
None => return,
|
||||
};
|
||||
let pos = match lex {
|
||||
Error::Unexpected(at, _)
|
||||
| Error::UnterminatedComment(at)
|
||||
| Error::Wanted { at, .. }
|
||||
| Error::UnterminatedString(at)
|
||||
| Error::NewlineInString(at)
|
||||
| Error::InvalidCharInString(at, _)
|
||||
| Error::InvalidCharInId(at, _)
|
||||
| Error::IdNotSSNFC(at)
|
||||
| Error::IdPartEmpty(at)
|
||||
| Error::InvalidEscape(at, _) => *at,
|
||||
};
|
||||
let msg = super::highlight_err(pos, None, file, contents, lex);
|
||||
*err = anyhow::anyhow!("{}", msg);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_validate_id() {
|
||||
validate_id(0, "apple").unwrap();
|
||||
validate_id(0, "apple-pear").unwrap();
|
||||
validate_id(0, "apple-pear-grape").unwrap();
|
||||
validate_id(0, "garçon").unwrap();
|
||||
validate_id(0, "hühnervögel").unwrap();
|
||||
validate_id(0, "москва").unwrap();
|
||||
validate_id(0, "東京").unwrap();
|
||||
validate_id(0, "東-京").unwrap();
|
||||
validate_id(0, "garçon-hühnervögel-москва-東京").unwrap();
|
||||
validate_id(0, "garçon-hühnervögel-москва-東-京").unwrap();
|
||||
validate_id(0, "a0").unwrap();
|
||||
validate_id(0, "a").unwrap();
|
||||
validate_id(0, "a-a").unwrap();
|
||||
validate_id(0, "bool").unwrap();
|
||||
|
||||
assert!(validate_id(0, "").is_err());
|
||||
assert!(validate_id(0, "0").is_err());
|
||||
assert!(validate_id(0, "%").is_err());
|
||||
assert!(validate_id(0, "$").is_err());
|
||||
assert!(validate_id(0, "0a").is_err());
|
||||
assert!(validate_id(0, ".").is_err());
|
||||
assert!(validate_id(0, "·").is_err());
|
||||
assert!(validate_id(0, "a a").is_err());
|
||||
assert!(validate_id(0, "_").is_err());
|
||||
assert!(validate_id(0, "-").is_err());
|
||||
assert!(validate_id(0, "a-").is_err());
|
||||
assert!(validate_id(0, "-a").is_err());
|
||||
assert!(validate_id(0, "Apple").is_err());
|
||||
assert!(validate_id(0, "APPLE").is_err());
|
||||
assert!(validate_id(0, "applE").is_err());
|
||||
assert!(validate_id(0, "-apple-pear").is_err());
|
||||
assert!(validate_id(0, "apple-pear-").is_err());
|
||||
assert!(validate_id(0, "apple_pear").is_err());
|
||||
assert!(validate_id(0, "apple.pear").is_err());
|
||||
assert!(validate_id(0, "apple pear").is_err());
|
||||
assert!(validate_id(0, "apple/pear").is_err());
|
||||
assert!(validate_id(0, "apple|pear").is_err());
|
||||
assert!(validate_id(0, "apple-Pear").is_err());
|
||||
assert!(validate_id(0, "apple-0").is_err());
|
||||
assert!(validate_id(0, "()()").is_err());
|
||||
assert!(validate_id(0, "").is_err());
|
||||
assert!(validate_id(0, "*").is_err());
|
||||
assert!(validate_id(0, "apple\u{5f3}pear").is_err());
|
||||
assert!(validate_id(0, "apple\u{200c}pear").is_err());
|
||||
assert!(validate_id(0, "apple\u{200d}pear").is_err());
|
||||
assert!(validate_id(0, "apple--pear").is_err());
|
||||
assert!(validate_id(0, "_apple").is_err());
|
||||
assert!(validate_id(0, "apple_").is_err());
|
||||
assert!(validate_id(0, "_Znwj").is_err());
|
||||
assert!(validate_id(0, "__i386").is_err());
|
||||
assert!(validate_id(0, "__i386__").is_err());
|
||||
assert!(validate_id(0, "ENOENT").is_err());
|
||||
assert!(validate_id(0, "Москва").is_err());
|
||||
assert!(validate_id(0, "garçon-hühnervögel-Москва-東京").is_err());
|
||||
assert!(validate_id(0, "😼").is_err(), "non-identifier");
|
||||
assert!(validate_id(0, "\u{212b}").is_err(), "not NFC");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_tokenizer() {
|
||||
fn collect(s: &str) -> Result<Vec<Token>> {
|
||||
let mut t = Tokenizer::new(s)?;
|
||||
let mut tokens = Vec::new();
|
||||
while let Some(token) = t.next()? {
|
||||
tokens.push(token.1);
|
||||
}
|
||||
Ok(tokens)
|
||||
}
|
||||
|
||||
assert_eq!(collect("").unwrap(), vec![]);
|
||||
assert_eq!(collect("_").unwrap(), vec![Token::Underscore]);
|
||||
assert_eq!(collect("apple").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("apple-pear").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("apple--pear").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("apple-Pear").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("apple-pear-grape").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("apple pear").unwrap(), vec![Token::Id, Token::Id]);
|
||||
assert_eq!(collect("_a_p_p_l_e_").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("garçon").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("hühnervögel").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("москва").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("東京").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(
|
||||
collect("garçon-hühnervögel-москва-東京").unwrap(),
|
||||
vec![Token::Id]
|
||||
);
|
||||
assert_eq!(collect("a0").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("a").unwrap(), vec![Token::Id]);
|
||||
assert_eq!(collect("%a").unwrap(), vec![Token::ExplicitId]);
|
||||
assert_eq!(collect("%a-a").unwrap(), vec![Token::ExplicitId]);
|
||||
assert_eq!(collect("%bool").unwrap(), vec![Token::ExplicitId]);
|
||||
assert_eq!(collect("%").unwrap(), vec![Token::ExplicitId]);
|
||||
|
||||
assert_eq!(collect("func").unwrap(), vec![Token::Func]);
|
||||
assert_eq!(
|
||||
collect("a: func()").unwrap(),
|
||||
vec![
|
||||
Token::Id,
|
||||
Token::Colon,
|
||||
Token::Func,
|
||||
Token::LeftParen,
|
||||
Token::RightParen
|
||||
]
|
||||
);
|
||||
|
||||
assert!(collect("\u{149}").is_err(), "strongly discouraged");
|
||||
assert!(collect("\u{673}").is_err(), "strongly discouraged");
|
||||
assert!(collect("\u{17a3}").is_err(), "strongly discouraged");
|
||||
assert!(collect("\u{17a4}").is_err(), "strongly discouraged");
|
||||
assert!(collect("\u{202a}").is_err(), "bidirectional override");
|
||||
assert!(collect("\u{2068}").is_err(), "bidirectional override");
|
||||
assert!(collect("\u{0}").is_err(), "control code");
|
||||
assert!(collect("\u{b}").is_err(), "control code");
|
||||
assert!(collect("\u{c}").is_err(), "control code");
|
||||
assert!(collect("\u{85}").is_err(), "control code");
|
||||
}
|
||||
@@ -0,0 +1,751 @@
|
||||
use super::{Error, Item, Span, Value, ValueKind};
|
||||
use crate::*;
|
||||
use anyhow::Result;
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::mem;
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct Resolver {
|
||||
type_lookup: HashMap<String, TypeId>,
|
||||
types: Arena<TypeDef>,
|
||||
resource_lookup: HashMap<String, ResourceId>,
|
||||
resources_copied: HashMap<(String, ResourceId), ResourceId>,
|
||||
types_copied: HashMap<(String, TypeId), TypeId>,
|
||||
resources: Arena<Resource>,
|
||||
anon_types: HashMap<Key, TypeId>,
|
||||
functions: Vec<Function>,
|
||||
globals: Vec<Global>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Hash)]
|
||||
enum Key {
|
||||
Variant(Vec<(String, Type)>),
|
||||
Record(Vec<(String, Type)>),
|
||||
Flags(Vec<String>),
|
||||
Tuple(Vec<Type>),
|
||||
Enum(Vec<String>),
|
||||
List(Type),
|
||||
Option(Type),
|
||||
Expected(Type, Type),
|
||||
Union(Vec<Type>),
|
||||
Future(Type),
|
||||
Stream(Type, Type),
|
||||
}
|
||||
|
||||
impl Resolver {
|
||||
pub(super) fn resolve(
|
||||
&mut self,
|
||||
name: &str,
|
||||
fields: &[Item<'_>],
|
||||
deps: &HashMap<String, Interface>,
|
||||
) -> Result<Interface> {
|
||||
// First pull in any names from our dependencies
|
||||
self.process_use(fields, deps)?;
|
||||
// ... then register our own names
|
||||
self.register_names(fields)?;
|
||||
|
||||
// With all names registered we can now fully expand and translate all
|
||||
// types.
|
||||
for field in fields {
|
||||
let t = match field {
|
||||
Item::TypeDef(t) => t,
|
||||
_ => continue,
|
||||
};
|
||||
let id = self.type_lookup[&*t.name.name];
|
||||
let kind = self.resolve_type_def(&t.ty)?;
|
||||
self.types.get_mut(id).unwrap().kind = kind;
|
||||
}
|
||||
|
||||
// And finally we can resolve all type references in functions/globals
|
||||
// and additionally validate that types thesmelves are not recursive
|
||||
let mut valid_types = HashSet::new();
|
||||
let mut visiting = HashSet::new();
|
||||
for field in fields {
|
||||
match field {
|
||||
Item::Value(v) => self.resolve_value(v)?,
|
||||
Item::Resource(r) => self.resolve_resource(r)?,
|
||||
Item::TypeDef(t) => {
|
||||
self.validate_type_not_recursive(
|
||||
t.name.span,
|
||||
self.type_lookup[&*t.name.name],
|
||||
&mut visiting,
|
||||
&mut valid_types,
|
||||
)?;
|
||||
}
|
||||
_ => continue,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Interface {
|
||||
name: name.to_string(),
|
||||
module: None,
|
||||
types: mem::take(&mut self.types),
|
||||
type_lookup: mem::take(&mut self.type_lookup),
|
||||
resources: mem::take(&mut self.resources),
|
||||
resource_lookup: mem::take(&mut self.resource_lookup),
|
||||
interface_lookup: Default::default(),
|
||||
interfaces: Default::default(),
|
||||
functions: mem::take(&mut self.functions),
|
||||
globals: mem::take(&mut self.globals),
|
||||
})
|
||||
}
|
||||
|
||||
fn process_use<'a>(
|
||||
&mut self,
|
||||
fields: &[Item<'a>],
|
||||
deps: &'a HashMap<String, Interface>,
|
||||
) -> Result<()> {
|
||||
for field in fields {
|
||||
let u = match field {
|
||||
Item::Use(u) => u,
|
||||
_ => continue,
|
||||
};
|
||||
let mut dep = &deps[&*u.from[0].name];
|
||||
let mut prev = &*u.from[0].name;
|
||||
for name in u.from[1..].iter() {
|
||||
dep = match dep.interface_lookup.get(&*name.name) {
|
||||
Some(i) => &dep.interfaces[*i],
|
||||
None => {
|
||||
return Err(Error {
|
||||
span: name.span,
|
||||
msg: format!("`{}` not defined in `{}`", name.name, prev),
|
||||
}
|
||||
.into())
|
||||
}
|
||||
};
|
||||
prev = &*name.name;
|
||||
}
|
||||
|
||||
let mod_name = &u.from[0];
|
||||
|
||||
match &u.names {
|
||||
Some(names) => {
|
||||
for name in names {
|
||||
let (my_name, span) = match &name.as_ {
|
||||
Some(id) => (&id.name, id.span),
|
||||
None => (&name.name.name, name.name.span),
|
||||
};
|
||||
let mut found = false;
|
||||
|
||||
if let Some(id) = dep.resource_lookup.get(&*name.name.name) {
|
||||
let resource = self.copy_resource(&mod_name.name, dep, *id);
|
||||
self.define_resource(my_name, span, resource)?;
|
||||
found = true;
|
||||
}
|
||||
|
||||
if let Some(id) = dep.type_lookup.get(&*name.name.name) {
|
||||
let ty = self.copy_type_def(&mod_name.name, dep, *id);
|
||||
self.define_type(my_name, span, ty)?;
|
||||
found = true;
|
||||
}
|
||||
|
||||
if !found {
|
||||
return Err(Error {
|
||||
span: name.name.span,
|
||||
msg: "name not defined in submodule".to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
}
|
||||
None => {
|
||||
for (id, resource) in dep.resources.iter() {
|
||||
let id = self.copy_resource(&mod_name.name, dep, id);
|
||||
self.define_resource(&resource.name, mod_name.span, id)?;
|
||||
}
|
||||
let mut names = dep.type_lookup.iter().collect::<Vec<_>>();
|
||||
names.sort(); // produce a stable order by which to add names
|
||||
for (name, id) in names {
|
||||
let ty = self.copy_type_def(&mod_name.name, dep, *id);
|
||||
self.define_type(name, mod_name.span, ty)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn copy_resource(&mut self, dep_name: &str, dep: &Interface, r: ResourceId) -> ResourceId {
|
||||
let resources = &mut self.resources;
|
||||
*self
|
||||
.resources_copied
|
||||
.entry((dep_name.to_string(), r))
|
||||
.or_insert_with(|| {
|
||||
let r = &dep.resources[r];
|
||||
let resource = Resource {
|
||||
docs: r.docs.clone(),
|
||||
name: r.name.clone(),
|
||||
supertype: r.supertype.clone(),
|
||||
foreign_module: Some(
|
||||
r.foreign_module
|
||||
.clone()
|
||||
.unwrap_or_else(|| dep_name.to_string()),
|
||||
),
|
||||
};
|
||||
resources.alloc(resource)
|
||||
})
|
||||
}
|
||||
|
||||
fn copy_type_def(&mut self, dep_name: &str, dep: &Interface, dep_id: TypeId) -> TypeId {
|
||||
if let Some(id) = self.types_copied.get(&(dep_name.to_string(), dep_id)) {
|
||||
return *id;
|
||||
}
|
||||
let ty = &dep.types[dep_id];
|
||||
|
||||
let ty = TypeDef {
|
||||
docs: ty.docs.clone(),
|
||||
name: ty.name.clone(),
|
||||
foreign_module: Some(
|
||||
ty.foreign_module
|
||||
.clone()
|
||||
.unwrap_or_else(|| dep_name.to_string()),
|
||||
),
|
||||
kind: match &ty.kind {
|
||||
TypeDefKind::Type(t) => TypeDefKind::Type(self.copy_type(dep_name, dep, *t)),
|
||||
TypeDefKind::Record(r) => TypeDefKind::Record(Record {
|
||||
fields: r
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| Field {
|
||||
docs: field.docs.clone(),
|
||||
name: field.name.clone(),
|
||||
ty: self.copy_type(dep_name, dep, field.ty),
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
TypeDefKind::Flags(f) => TypeDefKind::Flags(f.clone()),
|
||||
TypeDefKind::Tuple(t) => TypeDefKind::Tuple(Tuple {
|
||||
types: t
|
||||
.types
|
||||
.iter()
|
||||
.map(|ty| self.copy_type(dep_name, dep, *ty))
|
||||
.collect(),
|
||||
}),
|
||||
TypeDefKind::Variant(v) => TypeDefKind::Variant(Variant {
|
||||
cases: v
|
||||
.cases
|
||||
.iter()
|
||||
.map(|case| Case {
|
||||
docs: case.docs.clone(),
|
||||
name: case.name.clone(),
|
||||
ty: self.copy_type(dep_name, dep, case.ty),
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
TypeDefKind::Enum(e) => TypeDefKind::Enum(Enum {
|
||||
cases: e.cases.clone(),
|
||||
}),
|
||||
TypeDefKind::List(t) => TypeDefKind::List(self.copy_type(dep_name, dep, *t)),
|
||||
TypeDefKind::Option(t) => TypeDefKind::Option(self.copy_type(dep_name, dep, *t)),
|
||||
TypeDefKind::Expected(e) => TypeDefKind::Expected(Expected {
|
||||
ok: self.copy_type(dep_name, dep, e.ok),
|
||||
err: self.copy_type(dep_name, dep, e.err),
|
||||
}),
|
||||
TypeDefKind::Union(u) => TypeDefKind::Union(Union {
|
||||
cases: u
|
||||
.cases
|
||||
.iter()
|
||||
.map(|c| UnionCase {
|
||||
docs: c.docs.clone(),
|
||||
ty: self.copy_type(dep_name, dep, c.ty),
|
||||
})
|
||||
.collect(),
|
||||
}),
|
||||
TypeDefKind::Future(t) => TypeDefKind::Future(self.copy_type(dep_name, dep, *t)),
|
||||
TypeDefKind::Stream(e) => TypeDefKind::Stream(Stream {
|
||||
element: self.copy_type(dep_name, dep, e.element),
|
||||
end: self.copy_type(dep_name, dep, e.end),
|
||||
}),
|
||||
},
|
||||
};
|
||||
let id = self.types.alloc(ty);
|
||||
self.types_copied.insert((dep_name.to_string(), dep_id), id);
|
||||
id
|
||||
}
|
||||
|
||||
fn copy_type(&mut self, dep_name: &str, dep: &Interface, ty: Type) -> Type {
|
||||
match ty {
|
||||
Type::Id(id) => Type::Id(self.copy_type_def(dep_name, dep, id)),
|
||||
Type::Handle(id) => Type::Handle(self.copy_resource(dep_name, dep, id)),
|
||||
other => other,
|
||||
}
|
||||
}
|
||||
|
||||
fn register_names(&mut self, fields: &[Item<'_>]) -> Result<()> {
|
||||
let mut values = HashSet::new();
|
||||
for field in fields {
|
||||
match field {
|
||||
Item::Resource(r) => {
|
||||
let docs = self.docs(&r.docs);
|
||||
let id = self.resources.alloc(Resource {
|
||||
docs,
|
||||
name: r.name.name.to_string(),
|
||||
supertype: r
|
||||
.supertype
|
||||
.as_ref()
|
||||
.map(|supertype| supertype.name.to_string()),
|
||||
foreign_module: None,
|
||||
});
|
||||
self.define_resource(&r.name.name, r.name.span, id)?;
|
||||
let type_id = self.types.alloc(TypeDef {
|
||||
docs: Docs::default(),
|
||||
kind: TypeDefKind::Type(Type::Handle(id)),
|
||||
name: None,
|
||||
foreign_module: None,
|
||||
});
|
||||
self.define_type(&r.name.name, r.name.span, type_id)?;
|
||||
}
|
||||
Item::TypeDef(t) => {
|
||||
let docs = self.docs(&t.docs);
|
||||
let id = self.types.alloc(TypeDef {
|
||||
docs,
|
||||
// a dummy kind is used for now which will get filled in
|
||||
// later with the actual desired contents.
|
||||
kind: TypeDefKind::List(Type::U8),
|
||||
name: Some(t.name.name.to_string()),
|
||||
foreign_module: None,
|
||||
});
|
||||
self.define_type(&t.name.name, t.name.span, id)?;
|
||||
}
|
||||
Item::Value(f) => {
|
||||
if !values.insert(&f.name.name) {
|
||||
return Err(Error {
|
||||
span: f.name.span,
|
||||
msg: format!("{:?} defined twice", f.name.name),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
}
|
||||
Item::Use(_) => {}
|
||||
|
||||
Item::Interface(_) => unimplemented!(),
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn define_resource(&mut self, name: &str, span: Span, id: ResourceId) -> Result<()> {
|
||||
if self.resource_lookup.insert(name.to_string(), id).is_some() {
|
||||
Err(Error {
|
||||
span,
|
||||
msg: format!("resource {:?} defined twice", name),
|
||||
}
|
||||
.into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn define_type(&mut self, name: &str, span: Span, id: TypeId) -> Result<()> {
|
||||
if self.type_lookup.insert(name.to_string(), id).is_some() {
|
||||
Err(Error {
|
||||
span,
|
||||
msg: format!("type {:?} defined twice", name),
|
||||
}
|
||||
.into())
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
fn resolve_type_def(&mut self, ty: &super::Type<'_>) -> Result<TypeDefKind> {
|
||||
Ok(match ty {
|
||||
super::Type::Unit => TypeDefKind::Type(Type::Unit),
|
||||
super::Type::Bool => TypeDefKind::Type(Type::Bool),
|
||||
super::Type::U8 => TypeDefKind::Type(Type::U8),
|
||||
super::Type::U16 => TypeDefKind::Type(Type::U16),
|
||||
super::Type::U32 => TypeDefKind::Type(Type::U32),
|
||||
super::Type::U64 => TypeDefKind::Type(Type::U64),
|
||||
super::Type::S8 => TypeDefKind::Type(Type::S8),
|
||||
super::Type::S16 => TypeDefKind::Type(Type::S16),
|
||||
super::Type::S32 => TypeDefKind::Type(Type::S32),
|
||||
super::Type::S64 => TypeDefKind::Type(Type::S64),
|
||||
super::Type::Float32 => TypeDefKind::Type(Type::Float32),
|
||||
super::Type::Float64 => TypeDefKind::Type(Type::Float64),
|
||||
super::Type::Char => TypeDefKind::Type(Type::Char),
|
||||
super::Type::String => TypeDefKind::Type(Type::String),
|
||||
super::Type::Handle(resource) => {
|
||||
let id = match self.resource_lookup.get(&*resource.name) {
|
||||
Some(id) => *id,
|
||||
None => {
|
||||
return Err(Error {
|
||||
span: resource.span,
|
||||
msg: format!("no resource named `{}`", resource.name),
|
||||
}
|
||||
.into())
|
||||
}
|
||||
};
|
||||
TypeDefKind::Type(Type::Handle(id))
|
||||
}
|
||||
super::Type::Name(name) => {
|
||||
let id = match self.type_lookup.get(&*name.name) {
|
||||
Some(id) => *id,
|
||||
None => {
|
||||
return Err(Error {
|
||||
span: name.span,
|
||||
msg: format!("no type named `{}`", name.name),
|
||||
}
|
||||
.into())
|
||||
}
|
||||
};
|
||||
TypeDefKind::Type(Type::Id(id))
|
||||
}
|
||||
super::Type::List(list) => {
|
||||
let ty = self.resolve_type(list)?;
|
||||
TypeDefKind::List(ty)
|
||||
}
|
||||
super::Type::Record(record) => {
|
||||
let fields = record
|
||||
.fields
|
||||
.iter()
|
||||
.map(|field| {
|
||||
Ok(Field {
|
||||
docs: self.docs(&field.docs),
|
||||
name: field.name.name.to_string(),
|
||||
ty: self.resolve_type(&field.ty)?,
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
TypeDefKind::Record(Record { fields })
|
||||
}
|
||||
super::Type::Flags(flags) => {
|
||||
let flags = flags
|
||||
.flags
|
||||
.iter()
|
||||
.map(|flag| Flag {
|
||||
docs: self.docs(&flag.docs),
|
||||
name: flag.name.name.to_string(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
TypeDefKind::Flags(Flags { flags })
|
||||
}
|
||||
super::Type::Tuple(types) => {
|
||||
let types = types
|
||||
.iter()
|
||||
.map(|ty| self.resolve_type(ty))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
TypeDefKind::Tuple(Tuple { types })
|
||||
}
|
||||
super::Type::Variant(variant) => {
|
||||
if variant.cases.is_empty() {
|
||||
return Err(Error {
|
||||
span: variant.span,
|
||||
msg: "empty variant".to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let cases = variant
|
||||
.cases
|
||||
.iter()
|
||||
.map(|case| {
|
||||
Ok(Case {
|
||||
docs: self.docs(&case.docs),
|
||||
name: case.name.name.to_string(),
|
||||
ty: match &case.ty {
|
||||
Some(ty) => self.resolve_type(ty)?,
|
||||
None => Type::Unit,
|
||||
},
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
TypeDefKind::Variant(Variant { cases })
|
||||
}
|
||||
super::Type::Enum(e) => {
|
||||
if e.cases.is_empty() {
|
||||
return Err(Error {
|
||||
span: e.span,
|
||||
msg: "empty enum".to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let cases = e
|
||||
.cases
|
||||
.iter()
|
||||
.map(|case| {
|
||||
Ok(EnumCase {
|
||||
docs: self.docs(&case.docs),
|
||||
name: case.name.name.to_string(),
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
TypeDefKind::Enum(Enum { cases })
|
||||
}
|
||||
super::Type::Option(ty) => TypeDefKind::Option(self.resolve_type(ty)?),
|
||||
super::Type::Expected(e) => TypeDefKind::Expected(Expected {
|
||||
ok: self.resolve_type(&e.ok)?,
|
||||
err: self.resolve_type(&e.err)?,
|
||||
}),
|
||||
super::Type::Union(e) => {
|
||||
if e.cases.is_empty() {
|
||||
return Err(Error {
|
||||
span: e.span,
|
||||
msg: "empty union".to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let cases = e
|
||||
.cases
|
||||
.iter()
|
||||
.map(|case| {
|
||||
Ok(UnionCase {
|
||||
docs: self.docs(&case.docs),
|
||||
ty: self.resolve_type(&case.ty)?,
|
||||
})
|
||||
})
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
TypeDefKind::Union(Union { cases })
|
||||
}
|
||||
super::Type::Future(t) => TypeDefKind::Future(self.resolve_type(t)?),
|
||||
super::Type::Stream(s) => TypeDefKind::Stream(Stream {
|
||||
element: self.resolve_type(&s.element)?,
|
||||
end: self.resolve_type(&s.end)?,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
fn resolve_type(&mut self, ty: &super::Type<'_>) -> Result<Type> {
|
||||
let kind = self.resolve_type_def(ty)?;
|
||||
Ok(self.anon_type_def(TypeDef {
|
||||
kind,
|
||||
name: None,
|
||||
docs: Docs::default(),
|
||||
foreign_module: None,
|
||||
}))
|
||||
}
|
||||
|
||||
fn anon_type_def(&mut self, ty: TypeDef) -> Type {
|
||||
let key = match &ty.kind {
|
||||
TypeDefKind::Type(t) => return *t,
|
||||
TypeDefKind::Variant(v) => Key::Variant(
|
||||
v.cases
|
||||
.iter()
|
||||
.map(|case| (case.name.clone(), case.ty))
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
TypeDefKind::Record(r) => Key::Record(
|
||||
r.fields
|
||||
.iter()
|
||||
.map(|case| (case.name.clone(), case.ty))
|
||||
.collect::<Vec<_>>(),
|
||||
),
|
||||
TypeDefKind::Flags(r) => {
|
||||
Key::Flags(r.flags.iter().map(|f| f.name.clone()).collect::<Vec<_>>())
|
||||
}
|
||||
TypeDefKind::Tuple(t) => Key::Tuple(t.types.clone()),
|
||||
TypeDefKind::Enum(r) => {
|
||||
Key::Enum(r.cases.iter().map(|f| f.name.clone()).collect::<Vec<_>>())
|
||||
}
|
||||
TypeDefKind::List(ty) => Key::List(*ty),
|
||||
TypeDefKind::Option(t) => Key::Option(*t),
|
||||
TypeDefKind::Expected(e) => Key::Expected(e.ok, e.err),
|
||||
TypeDefKind::Union(u) => Key::Union(u.cases.iter().map(|c| c.ty).collect()),
|
||||
TypeDefKind::Future(ty) => Key::Future(*ty),
|
||||
TypeDefKind::Stream(s) => Key::Stream(s.element, s.end),
|
||||
};
|
||||
let types = &mut self.types;
|
||||
let id = self
|
||||
.anon_types
|
||||
.entry(key)
|
||||
.or_insert_with(|| types.alloc(ty));
|
||||
Type::Id(*id)
|
||||
}
|
||||
|
||||
fn docs(&mut self, doc: &super::Docs<'_>) -> Docs {
|
||||
let mut docs = None;
|
||||
for doc in doc.docs.iter() {
|
||||
// Comments which are not doc-comments are silently ignored
|
||||
if let Some(doc) = doc.strip_prefix("///") {
|
||||
let docs = docs.get_or_insert_with(String::new);
|
||||
docs.push_str(doc.trim_start_matches('/').trim());
|
||||
docs.push('\n');
|
||||
} else if let Some(doc) = doc.strip_prefix("/**") {
|
||||
let docs = docs.get_or_insert_with(String::new);
|
||||
assert!(doc.ends_with("*/"));
|
||||
for line in doc[..doc.len() - 2].lines() {
|
||||
docs.push_str(line);
|
||||
docs.push('\n');
|
||||
}
|
||||
}
|
||||
}
|
||||
Docs { contents: docs }
|
||||
}
|
||||
|
||||
fn resolve_value(&mut self, value: &Value<'_>) -> Result<()> {
|
||||
let docs = self.docs(&value.docs);
|
||||
match &value.kind {
|
||||
ValueKind::Function {
|
||||
is_async,
|
||||
params,
|
||||
result,
|
||||
} => {
|
||||
let params = params
|
||||
.iter()
|
||||
.map(|(name, ty)| Ok((name.name.to_string(), self.resolve_type(ty)?)))
|
||||
.collect::<Result<_>>()?;
|
||||
let result = self.resolve_type(result)?;
|
||||
self.functions.push(Function {
|
||||
docs,
|
||||
name: value.name.name.to_string(),
|
||||
kind: FunctionKind::Freestanding,
|
||||
params,
|
||||
result,
|
||||
is_async: *is_async,
|
||||
});
|
||||
}
|
||||
ValueKind::Global(ty) => {
|
||||
let ty = self.resolve_type(ty)?;
|
||||
self.globals.push(Global {
|
||||
docs,
|
||||
name: value.name.name.to_string(),
|
||||
ty,
|
||||
});
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn resolve_resource(&mut self, resource: &super::Resource<'_>) -> Result<()> {
|
||||
let mut names = HashSet::new();
|
||||
let id = self.resource_lookup[&*resource.name.name];
|
||||
for (statik, value) in resource.values.iter() {
|
||||
let (is_async, params, result) = match &value.kind {
|
||||
ValueKind::Function {
|
||||
is_async,
|
||||
params,
|
||||
result,
|
||||
} => (*is_async, params, result),
|
||||
ValueKind::Global(_) => {
|
||||
return Err(Error {
|
||||
span: value.name.span,
|
||||
msg: "globals not allowed in resources".to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
};
|
||||
if !names.insert(&value.name.name) {
|
||||
return Err(Error {
|
||||
span: value.name.span,
|
||||
msg: format!("{:?} defined twice in this resource", value.name.name),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
let docs = self.docs(&value.docs);
|
||||
let mut params = params
|
||||
.iter()
|
||||
.map(|(name, ty)| Ok((name.name.to_string(), self.resolve_type(ty)?)))
|
||||
.collect::<Result<Vec<_>>>()?;
|
||||
let result = self.resolve_type(result)?;
|
||||
let kind = if *statik {
|
||||
FunctionKind::Static {
|
||||
resource: id,
|
||||
name: value.name.name.to_string(),
|
||||
}
|
||||
} else {
|
||||
params.insert(0, ("self".to_string(), Type::Handle(id)));
|
||||
FunctionKind::Method {
|
||||
resource: id,
|
||||
name: value.name.name.to_string(),
|
||||
}
|
||||
};
|
||||
self.functions.push(Function {
|
||||
is_async,
|
||||
docs,
|
||||
name: format!("{}::{}", resource.name.name, value.name.name),
|
||||
kind,
|
||||
params,
|
||||
result,
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn validate_type_not_recursive(
|
||||
&self,
|
||||
span: Span,
|
||||
ty: TypeId,
|
||||
visiting: &mut HashSet<TypeId>,
|
||||
valid: &mut HashSet<TypeId>,
|
||||
) -> Result<()> {
|
||||
if valid.contains(&ty) {
|
||||
return Ok(());
|
||||
}
|
||||
if !visiting.insert(ty) {
|
||||
return Err(Error {
|
||||
span,
|
||||
msg: "type can recursively refer to itself".to_string(),
|
||||
}
|
||||
.into());
|
||||
}
|
||||
|
||||
match &self.types[ty].kind {
|
||||
TypeDefKind::List(Type::Id(id)) | TypeDefKind::Type(Type::Id(id)) => {
|
||||
self.validate_type_not_recursive(span, *id, visiting, valid)?
|
||||
}
|
||||
TypeDefKind::Variant(v) => {
|
||||
for case in v.cases.iter() {
|
||||
if let Type::Id(id) = case.ty {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
TypeDefKind::Record(r) => {
|
||||
for case in r.fields.iter() {
|
||||
if let Type::Id(id) = case.ty {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
TypeDefKind::Tuple(t) => {
|
||||
for ty in t.types.iter() {
|
||||
if let Type::Id(id) = *ty {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TypeDefKind::Option(t) => {
|
||||
if let Type::Id(id) = *t {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
}
|
||||
TypeDefKind::Expected(e) => {
|
||||
if let Type::Id(id) = e.ok {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
if let Type::Id(id) = e.err {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
}
|
||||
TypeDefKind::Future(t) => {
|
||||
if let Type::Id(id) = *t {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
}
|
||||
TypeDefKind::Stream(s) => {
|
||||
if let Type::Id(id) = s.element {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
if let Type::Id(id) = s.end {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
}
|
||||
TypeDefKind::Union(u) => {
|
||||
for c in u.cases.iter() {
|
||||
if let Type::Id(id) = c.ty {
|
||||
self.validate_type_not_recursive(span, id, visiting, valid)?
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TypeDefKind::Flags(_)
|
||||
| TypeDefKind::List(_)
|
||||
| TypeDefKind::Type(_)
|
||||
| TypeDefKind::Enum(_) => {}
|
||||
}
|
||||
|
||||
valid.insert(ty);
|
||||
visiting.remove(&ty);
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
524
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/src/lib.rs
Normal file
524
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/src/lib.rs
Normal file
@@ -0,0 +1,524 @@
|
||||
use anyhow::{anyhow, bail, Context, Result};
|
||||
use id_arena::{Arena, Id};
|
||||
use pulldown_cmark::{CodeBlockKind, CowStr, Event, Options, Parser, Tag};
|
||||
use std::collections::{HashMap, HashSet};
|
||||
use std::fs;
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
pub mod abi;
|
||||
mod ast;
|
||||
mod sizealign;
|
||||
pub use sizealign::*;
|
||||
|
||||
/// Checks if the given string is a legal identifier in wit.
|
||||
pub fn validate_id(s: &str) -> Result<()> {
|
||||
ast::validate_id(0, s)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct Interface {
|
||||
pub name: String,
|
||||
/// The module name to use for bindings generation.
|
||||
///
|
||||
/// If `None`, then the interface name will be used.
|
||||
///
|
||||
/// If `Some`, then this value is used to format an export
|
||||
/// name of `<module>#<name>` for exports or an import module
|
||||
/// name of `<module>` for imports.
|
||||
pub module: Option<String>,
|
||||
pub types: Arena<TypeDef>,
|
||||
pub type_lookup: HashMap<String, TypeId>,
|
||||
pub resources: Arena<Resource>,
|
||||
pub resource_lookup: HashMap<String, ResourceId>,
|
||||
pub interfaces: Arena<Interface>,
|
||||
pub interface_lookup: HashMap<String, InterfaceId>,
|
||||
pub functions: Vec<Function>,
|
||||
pub globals: Vec<Global>,
|
||||
}
|
||||
|
||||
pub type TypeId = Id<TypeDef>;
|
||||
pub type ResourceId = Id<Resource>;
|
||||
pub type InterfaceId = Id<Interface>;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct TypeDef {
|
||||
pub docs: Docs,
|
||||
pub kind: TypeDefKind,
|
||||
pub name: Option<String>,
|
||||
/// `None` if this type is originally declared in this instance or
|
||||
/// otherwise `Some` if it was originally defined in a different module.
|
||||
pub foreign_module: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum TypeDefKind {
|
||||
Record(Record),
|
||||
Flags(Flags),
|
||||
Tuple(Tuple),
|
||||
Variant(Variant),
|
||||
Enum(Enum),
|
||||
Option(Type),
|
||||
Expected(Expected),
|
||||
Union(Union),
|
||||
List(Type),
|
||||
Future(Type),
|
||||
Stream(Stream),
|
||||
Type(Type),
|
||||
}
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone)]
|
||||
pub enum Type {
|
||||
Unit,
|
||||
Bool,
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
S8,
|
||||
S16,
|
||||
S32,
|
||||
S64,
|
||||
Float32,
|
||||
Float64,
|
||||
Char,
|
||||
String,
|
||||
Handle(ResourceId),
|
||||
Id(TypeId),
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Copy, Clone)]
|
||||
pub enum Int {
|
||||
U8,
|
||||
U16,
|
||||
U32,
|
||||
U64,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Record {
|
||||
pub fields: Vec<Field>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Field {
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
pub ty: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Flags {
|
||||
pub flags: Vec<Flag>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Flag {
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FlagsRepr {
|
||||
U8,
|
||||
U16,
|
||||
U32(usize),
|
||||
}
|
||||
|
||||
impl Flags {
|
||||
pub fn repr(&self) -> FlagsRepr {
|
||||
match self.flags.len() {
|
||||
n if n <= 8 => FlagsRepr::U8,
|
||||
n if n <= 16 => FlagsRepr::U16,
|
||||
n => FlagsRepr::U32(sizealign::align_to(n, 32) / 32),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FlagsRepr {
|
||||
pub fn count(&self) -> usize {
|
||||
match self {
|
||||
FlagsRepr::U8 => 1,
|
||||
FlagsRepr::U16 => 1,
|
||||
FlagsRepr::U32(n) => *n,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Tuple {
|
||||
pub types: Vec<Type>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Variant {
|
||||
pub cases: Vec<Case>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Case {
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
pub ty: Type,
|
||||
}
|
||||
|
||||
impl Variant {
|
||||
pub fn tag(&self) -> Int {
|
||||
match self.cases.len() {
|
||||
n if n <= u8::max_value() as usize => Int::U8,
|
||||
n if n <= u16::max_value() as usize => Int::U16,
|
||||
n if n <= u32::max_value() as usize => Int::U32,
|
||||
_ => panic!("too many cases to fit in a repr"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Enum {
|
||||
pub cases: Vec<EnumCase>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct EnumCase {
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl Enum {
|
||||
pub fn tag(&self) -> Int {
|
||||
match self.cases.len() {
|
||||
n if n <= u8::max_value() as usize => Int::U8,
|
||||
n if n <= u16::max_value() as usize => Int::U16,
|
||||
n if n <= u32::max_value() as usize => Int::U32,
|
||||
_ => panic!("too many cases to fit in a repr"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Expected {
|
||||
pub ok: Type,
|
||||
pub err: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Union {
|
||||
pub cases: Vec<UnionCase>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct UnionCase {
|
||||
pub docs: Docs,
|
||||
pub ty: Type,
|
||||
}
|
||||
|
||||
impl Union {
|
||||
pub fn tag(&self) -> Int {
|
||||
match self.cases.len() {
|
||||
n if n <= u8::max_value() as usize => Int::U8,
|
||||
n if n <= u16::max_value() as usize => Int::U16,
|
||||
n if n <= u32::max_value() as usize => Int::U32,
|
||||
_ => panic!("too many cases to fit in a repr"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Stream {
|
||||
pub element: Type,
|
||||
pub end: Type,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default, Debug)]
|
||||
pub struct Docs {
|
||||
pub contents: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Resource {
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
pub supertype: Option<String>,
|
||||
/// `None` if this resource is defined within the containing instance,
|
||||
/// otherwise `Some` if it's defined in an instance named here.
|
||||
pub foreign_module: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Global {
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
pub ty: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Function {
|
||||
pub is_async: bool,
|
||||
pub docs: Docs,
|
||||
pub name: String,
|
||||
pub kind: FunctionKind,
|
||||
pub params: Vec<(String, Type)>,
|
||||
pub result: Type,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum FunctionKind {
|
||||
Freestanding,
|
||||
Static { resource: ResourceId, name: String },
|
||||
Method { resource: ResourceId, name: String },
|
||||
}
|
||||
|
||||
impl Function {
|
||||
pub fn item_name(&self) -> &str {
|
||||
match &self.kind {
|
||||
FunctionKind::Freestanding => &self.name,
|
||||
FunctionKind::Static { name, .. } => name,
|
||||
FunctionKind::Method { name, .. } => name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn unwrap_md(contents: &str) -> String {
|
||||
let mut wit = String::new();
|
||||
let mut last_pos = 0;
|
||||
let mut in_wit_code_block = false;
|
||||
Parser::new_ext(contents, Options::empty())
|
||||
.into_offset_iter()
|
||||
.for_each(|(event, range)| match (event, range) {
|
||||
(Event::Start(Tag::CodeBlock(CodeBlockKind::Fenced(CowStr::Borrowed("wit")))), _) => {
|
||||
in_wit_code_block = true;
|
||||
}
|
||||
(Event::Text(text), range) if in_wit_code_block => {
|
||||
// Ensure that offsets are correct by inserting newlines to
|
||||
// cover the Markdown content outside of wit code blocks.
|
||||
for _ in contents[last_pos..range.start].lines() {
|
||||
wit.push_str("\n");
|
||||
}
|
||||
wit.push_str(&text);
|
||||
last_pos = range.end;
|
||||
}
|
||||
(Event::End(Tag::CodeBlock(CodeBlockKind::Fenced(CowStr::Borrowed("wit")))), _) => {
|
||||
in_wit_code_block = false;
|
||||
}
|
||||
_ => {}
|
||||
});
|
||||
wit
|
||||
}
|
||||
|
||||
impl Interface {
|
||||
pub fn parse(name: &str, input: &str) -> Result<Interface> {
|
||||
Interface::parse_with(name, input, |f| {
|
||||
Err(anyhow!("cannot load submodule `{}`", f))
|
||||
})
|
||||
}
|
||||
|
||||
pub fn parse_file(path: impl AsRef<Path>) -> Result<Interface> {
|
||||
let path = path.as_ref();
|
||||
let parent = path.parent().unwrap();
|
||||
let contents = std::fs::read_to_string(&path)
|
||||
.with_context(|| format!("failed to read: {}", path.display()))?;
|
||||
Interface::parse_with(path, &contents, |path| load_fs(parent, path))
|
||||
}
|
||||
|
||||
pub fn parse_with(
|
||||
filename: impl AsRef<Path>,
|
||||
contents: &str,
|
||||
mut load: impl FnMut(&str) -> Result<(PathBuf, String)>,
|
||||
) -> Result<Interface> {
|
||||
Interface::_parse_with(
|
||||
filename.as_ref(),
|
||||
contents,
|
||||
&mut load,
|
||||
&mut HashSet::new(),
|
||||
&mut HashMap::new(),
|
||||
)
|
||||
}
|
||||
|
||||
fn _parse_with(
|
||||
filename: &Path,
|
||||
contents: &str,
|
||||
load: &mut dyn FnMut(&str) -> Result<(PathBuf, String)>,
|
||||
visiting: &mut HashSet<PathBuf>,
|
||||
map: &mut HashMap<String, Interface>,
|
||||
) -> Result<Interface> {
|
||||
let mut name = filename.file_stem().unwrap();
|
||||
let mut contents = contents;
|
||||
|
||||
// If we have a ".md" file, it's a wit file wrapped in a markdown file;
|
||||
// parse the markdown to extract the `wit` code blocks.
|
||||
let md_contents;
|
||||
if filename.extension().and_then(|s| s.to_str()) == Some("md") {
|
||||
md_contents = unwrap_md(contents);
|
||||
contents = &md_contents[..];
|
||||
|
||||
// Also strip the inner ".wit" extension.
|
||||
name = Path::new(name).file_stem().unwrap();
|
||||
}
|
||||
|
||||
// Parse the `contents `into an AST
|
||||
let ast = match ast::Ast::parse(contents) {
|
||||
Ok(ast) => ast,
|
||||
Err(mut e) => {
|
||||
let file = filename.display().to_string();
|
||||
ast::rewrite_error(&mut e, &file, contents);
|
||||
return Err(e);
|
||||
}
|
||||
};
|
||||
|
||||
// Load up any modules into our `map` that have not yet been parsed.
|
||||
if !visiting.insert(filename.to_path_buf()) {
|
||||
bail!("file `{}` recursively imports itself", filename.display())
|
||||
}
|
||||
for item in ast.items.iter() {
|
||||
let u = match item {
|
||||
ast::Item::Use(u) => u,
|
||||
_ => continue,
|
||||
};
|
||||
if map.contains_key(&*u.from[0].name) {
|
||||
continue;
|
||||
}
|
||||
let (filename, contents) = load(&u.from[0].name)
|
||||
// TODO: insert context here about `u.name.span` and `filename`
|
||||
?;
|
||||
let instance = Interface::_parse_with(&filename, &contents, load, visiting, map)?;
|
||||
map.insert(u.from[0].name.to_string(), instance);
|
||||
}
|
||||
visiting.remove(filename);
|
||||
|
||||
// and finally resolve everything into our final instance
|
||||
match ast.resolve(name.to_str().unwrap(), map) {
|
||||
Ok(i) => Ok(i),
|
||||
Err(mut e) => {
|
||||
let file = filename.display().to_string();
|
||||
ast::rewrite_error(&mut e, &file, contents);
|
||||
Err(e)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn topological_types(&self) -> Vec<TypeId> {
|
||||
let mut ret = Vec::new();
|
||||
let mut visited = HashSet::new();
|
||||
for (id, _) in self.types.iter() {
|
||||
self.topo_visit(id, &mut ret, &mut visited);
|
||||
}
|
||||
ret
|
||||
}
|
||||
|
||||
fn topo_visit(&self, id: TypeId, list: &mut Vec<TypeId>, visited: &mut HashSet<TypeId>) {
|
||||
if !visited.insert(id) {
|
||||
return;
|
||||
}
|
||||
match &self.types[id].kind {
|
||||
TypeDefKind::Flags(_) | TypeDefKind::Enum(_) => {}
|
||||
TypeDefKind::Type(t) | TypeDefKind::List(t) => self.topo_visit_ty(t, list, visited),
|
||||
TypeDefKind::Record(r) => {
|
||||
for f in r.fields.iter() {
|
||||
self.topo_visit_ty(&f.ty, list, visited);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Tuple(t) => {
|
||||
for t in t.types.iter() {
|
||||
self.topo_visit_ty(t, list, visited);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Variant(v) => {
|
||||
for v in v.cases.iter() {
|
||||
self.topo_visit_ty(&v.ty, list, visited);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Option(ty) => self.topo_visit_ty(ty, list, visited),
|
||||
TypeDefKind::Expected(e) => {
|
||||
self.topo_visit_ty(&e.ok, list, visited);
|
||||
self.topo_visit_ty(&e.err, list, visited);
|
||||
}
|
||||
TypeDefKind::Union(u) => {
|
||||
for t in u.cases.iter() {
|
||||
self.topo_visit_ty(&t.ty, list, visited);
|
||||
}
|
||||
}
|
||||
TypeDefKind::Future(ty) => {
|
||||
self.topo_visit_ty(ty, list, visited);
|
||||
}
|
||||
TypeDefKind::Stream(s) => {
|
||||
self.topo_visit_ty(&s.element, list, visited);
|
||||
self.topo_visit_ty(&s.end, list, visited);
|
||||
}
|
||||
}
|
||||
list.push(id);
|
||||
}
|
||||
|
||||
fn topo_visit_ty(&self, ty: &Type, list: &mut Vec<TypeId>, visited: &mut HashSet<TypeId>) {
|
||||
if let Type::Id(id) = ty {
|
||||
self.topo_visit(*id, list, visited);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn all_bits_valid(&self, ty: &Type) -> bool {
|
||||
match ty {
|
||||
Type::Unit
|
||||
| Type::U8
|
||||
| Type::S8
|
||||
| Type::U16
|
||||
| Type::S16
|
||||
| Type::U32
|
||||
| Type::S32
|
||||
| Type::U64
|
||||
| Type::S64
|
||||
| Type::Float32
|
||||
| Type::Float64 => true,
|
||||
|
||||
Type::Bool | Type::Char | Type::Handle(_) | Type::String => false,
|
||||
|
||||
Type::Id(id) => match &self.types[*id].kind {
|
||||
TypeDefKind::List(_)
|
||||
| TypeDefKind::Variant(_)
|
||||
| TypeDefKind::Enum(_)
|
||||
| TypeDefKind::Option(_)
|
||||
| TypeDefKind::Expected(_)
|
||||
| TypeDefKind::Future(_)
|
||||
| TypeDefKind::Stream(_)
|
||||
| TypeDefKind::Union(_) => false,
|
||||
TypeDefKind::Type(t) => self.all_bits_valid(t),
|
||||
TypeDefKind::Record(r) => r.fields.iter().all(|f| self.all_bits_valid(&f.ty)),
|
||||
TypeDefKind::Tuple(t) => t.types.iter().all(|t| self.all_bits_valid(t)),
|
||||
|
||||
// FIXME: this could perhaps be `true` for multiples-of-32 but
|
||||
// seems better to probably leave this as unconditionally
|
||||
// `false` for now, may want to reconsider later?
|
||||
TypeDefKind::Flags(_) => false,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_variant(&self, ty: &Type) -> Option<&Variant> {
|
||||
if let Type::Id(id) = ty {
|
||||
match &self.types[*id].kind {
|
||||
TypeDefKind::Variant(v) => Some(v),
|
||||
_ => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn load_fs(root: &Path, name: &str) -> Result<(PathBuf, String)> {
|
||||
let wit = root.join(name).with_extension("wit");
|
||||
|
||||
// Attempt to read a ".wit" file.
|
||||
match fs::read_to_string(&wit) {
|
||||
Ok(contents) => Ok((wit, contents)),
|
||||
|
||||
// If no such file was found, attempt to read a ".wit.md" file.
|
||||
Err(err) if err.kind() == std::io::ErrorKind::NotFound => {
|
||||
let wit_md = wit.with_extension("wit.md");
|
||||
match fs::read_to_string(&wit_md) {
|
||||
Ok(contents) => Ok((wit_md, contents)),
|
||||
Err(_err) => Err(err.into()),
|
||||
}
|
||||
}
|
||||
|
||||
Err(err) => return Err(err.into()),
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,121 @@
|
||||
use crate::{FlagsRepr, Int, Interface, Type, TypeDef, TypeDefKind};
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct SizeAlign {
|
||||
map: Vec<(usize, usize)>,
|
||||
}
|
||||
|
||||
impl SizeAlign {
|
||||
pub fn fill(&mut self, iface: &Interface) {
|
||||
self.map = vec![(0, 0); iface.types.len()];
|
||||
for ty in iface.topological_types() {
|
||||
let pair = self.calculate(&iface.types[ty]);
|
||||
self.map[ty.index()] = pair;
|
||||
}
|
||||
}
|
||||
|
||||
fn calculate(&self, ty: &TypeDef) -> (usize, usize) {
|
||||
match &ty.kind {
|
||||
TypeDefKind::Type(t) => (self.size(t), self.align(t)),
|
||||
TypeDefKind::List(_) => (8, 4),
|
||||
TypeDefKind::Record(r) => self.record(r.fields.iter().map(|f| &f.ty)),
|
||||
TypeDefKind::Tuple(t) => self.record(t.types.iter()),
|
||||
TypeDefKind::Flags(f) => match f.repr() {
|
||||
FlagsRepr::U8 => (1, 1),
|
||||
FlagsRepr::U16 => (2, 2),
|
||||
FlagsRepr::U32(n) => (n * 4, 4),
|
||||
},
|
||||
TypeDefKind::Variant(v) => self.variant(v.tag(), v.cases.iter().map(|c| &c.ty)),
|
||||
TypeDefKind::Enum(e) => self.variant(e.tag(), []),
|
||||
TypeDefKind::Option(t) => self.variant(Int::U8, [&Type::Unit, t]),
|
||||
TypeDefKind::Expected(e) => self.variant(Int::U8, [&e.ok, &e.err]),
|
||||
TypeDefKind::Union(u) => self.variant(u.tag(), u.cases.iter().map(|c| &c.ty)),
|
||||
// A future is represented as an index.
|
||||
TypeDefKind::Future(_) => (4, 4),
|
||||
// A stream is represented as an index.
|
||||
TypeDefKind::Stream(_) => (4, 4),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn size(&self, ty: &Type) -> usize {
|
||||
match ty {
|
||||
Type::Unit => 0,
|
||||
Type::Bool | Type::U8 | Type::S8 => 1,
|
||||
Type::U16 | Type::S16 => 2,
|
||||
Type::U32 | Type::S32 | Type::Float32 | Type::Char | Type::Handle(_) => 4,
|
||||
Type::U64 | Type::S64 | Type::Float64 | Type::String => 8,
|
||||
Type::Id(id) => self.map[id.index()].0,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn align(&self, ty: &Type) -> usize {
|
||||
match ty {
|
||||
Type::Unit | Type::Bool | Type::U8 | Type::S8 => 1,
|
||||
Type::U16 | Type::S16 => 2,
|
||||
Type::U32 | Type::S32 | Type::Float32 | Type::Char | Type::Handle(_) | Type::String => {
|
||||
4
|
||||
}
|
||||
Type::U64 | Type::S64 | Type::Float64 => 8,
|
||||
Type::Id(id) => self.map[id.index()].1,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn field_offsets<'a>(&self, types: impl IntoIterator<Item = &'a Type>) -> Vec<usize> {
|
||||
let mut cur = 0;
|
||||
types
|
||||
.into_iter()
|
||||
.map(|ty| {
|
||||
let ret = align_to(cur, self.align(ty));
|
||||
cur = ret + self.size(ty);
|
||||
ret
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
pub fn payload_offset<'a>(&self, tag: Int, cases: impl IntoIterator<Item = &'a Type>) -> usize {
|
||||
let mut max_align = 1;
|
||||
for ty in cases {
|
||||
max_align = max_align.max(self.align(ty));
|
||||
}
|
||||
let tag_size = int_size_align(tag).0;
|
||||
align_to(tag_size, max_align)
|
||||
}
|
||||
|
||||
pub fn record<'a>(&self, types: impl Iterator<Item = &'a Type>) -> (usize, usize) {
|
||||
let mut size = 0;
|
||||
let mut align = 1;
|
||||
for ty in types {
|
||||
let field_size = self.size(ty);
|
||||
let field_align = self.align(ty);
|
||||
size = align_to(size, field_align) + field_size;
|
||||
align = align.max(field_align);
|
||||
}
|
||||
(align_to(size, align), align)
|
||||
}
|
||||
|
||||
fn variant<'a>(&self, tag: Int, types: impl IntoIterator<Item = &'a Type>) -> (usize, usize) {
|
||||
let (discrim_size, discrim_align) = int_size_align(tag);
|
||||
let mut size = discrim_size;
|
||||
let mut align = discrim_align;
|
||||
for ty in types {
|
||||
let case_size = self.size(ty);
|
||||
let case_align = self.align(ty);
|
||||
align = align.max(case_align);
|
||||
size = size.max(align_to(discrim_size, case_align) + case_size);
|
||||
}
|
||||
(size, align)
|
||||
}
|
||||
}
|
||||
|
||||
fn int_size_align(i: Int) -> (usize, usize) {
|
||||
match i {
|
||||
Int::U8 => (1, 1),
|
||||
Int::U16 => (2, 2),
|
||||
Int::U32 => (4, 4),
|
||||
Int::U64 => (8, 8),
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn align_to(val: usize, align: usize) -> usize {
|
||||
(val + align - 1) & !(align - 1)
|
||||
}
|
||||
344
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/tests/all.rs
Normal file
344
__wasm/wit-bindgen-sample/wit-bindgen/crates/parser/tests/all.rs
Normal file
@@ -0,0 +1,344 @@
|
||||
//! You can run this test suite with:
|
||||
//!
|
||||
//! cargo test --test all
|
||||
//!
|
||||
//! An argument can be passed as well to filter, based on filename, which test
|
||||
//! to run
|
||||
//!
|
||||
//! cargo test --test all foo.wit
|
||||
|
||||
use anyhow::{bail, Context, Result};
|
||||
use rayon::prelude::*;
|
||||
use serde::Serialize;
|
||||
use std::env;
|
||||
use std::ffi::OsStr;
|
||||
use std::fs;
|
||||
use std::io;
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::str;
|
||||
use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
|
||||
use wit_parser::*;
|
||||
|
||||
fn main() {
|
||||
let tests = find_tests();
|
||||
let filter = std::env::args().nth(1);
|
||||
|
||||
let tests = tests
|
||||
.par_iter()
|
||||
.filter_map(|test| {
|
||||
if let Some(filter) = &filter {
|
||||
if let Some(s) = test.to_str() {
|
||||
if !s.contains(filter) {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
let contents = fs::read(test).unwrap();
|
||||
Some((test, contents))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
println!("running {} test files\n", tests.len());
|
||||
|
||||
let ntests = AtomicUsize::new(0);
|
||||
let errors = tests
|
||||
.par_iter()
|
||||
.filter_map(|(test, contents)| {
|
||||
Runner { ntests: &ntests }
|
||||
.run(test, contents)
|
||||
.context(format!("test {:?} failed", test))
|
||||
.err()
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if !errors.is_empty() {
|
||||
for msg in errors.iter() {
|
||||
eprintln!("{:?}", msg);
|
||||
}
|
||||
|
||||
panic!("{} tests failed", errors.len())
|
||||
}
|
||||
|
||||
println!(
|
||||
"test result: ok. {} directives passed\n",
|
||||
ntests.load(SeqCst)
|
||||
);
|
||||
}
|
||||
|
||||
/// Recursively finds all tests in a whitelisted set of directories which we
|
||||
/// then load up and test in parallel.
|
||||
fn find_tests() -> Vec<PathBuf> {
|
||||
let mut tests = Vec::new();
|
||||
find_tests("tests/ui".as_ref(), &mut tests);
|
||||
tests.sort();
|
||||
return tests;
|
||||
|
||||
fn find_tests(path: &Path, tests: &mut Vec<PathBuf>) {
|
||||
for f in path.read_dir().unwrap() {
|
||||
let f = f.unwrap();
|
||||
if f.file_type().unwrap().is_dir() {
|
||||
find_tests(&f.path(), tests);
|
||||
continue;
|
||||
}
|
||||
|
||||
match f.path().extension().and_then(|s| s.to_str()) {
|
||||
Some("md") => {}
|
||||
Some("wit") => {}
|
||||
_ => continue,
|
||||
}
|
||||
tests.push(f.path());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct Runner<'a> {
|
||||
ntests: &'a AtomicUsize,
|
||||
}
|
||||
|
||||
impl Runner<'_> {
|
||||
fn run(&mut self, test: &Path, contents: &[u8]) -> Result<()> {
|
||||
let contents = str::from_utf8(contents)?;
|
||||
|
||||
let result = Interface::parse_file(test);
|
||||
|
||||
let result = if contents.contains("// parse-fail") {
|
||||
match result {
|
||||
Ok(_) => bail!("expected test to not parse but it did"),
|
||||
Err(mut e) => {
|
||||
if let Some(err) = e.downcast_mut::<io::Error>() {
|
||||
*err = io::Error::new(
|
||||
io::ErrorKind::Other,
|
||||
"some generic platform-agnostic error message",
|
||||
);
|
||||
}
|
||||
normalize(test, &format!("{:?}", e))
|
||||
}
|
||||
}
|
||||
} else {
|
||||
let instance = result?;
|
||||
to_json(&instance)
|
||||
};
|
||||
|
||||
// "foo.wit" => "foo.wit.result"
|
||||
// "foo.wit.md" => "foo.wit.md.result"
|
||||
let result_file = if test.extension() == Some(OsStr::new("md"))
|
||||
&& test
|
||||
.file_stem()
|
||||
.and_then(|path| Path::new(path).extension())
|
||||
== Some(OsStr::new("wit"))
|
||||
{
|
||||
test.with_extension("md.result")
|
||||
} else {
|
||||
test.with_extension("wit.result")
|
||||
};
|
||||
if env::var_os("BLESS").is_some() {
|
||||
fs::write(&result_file, result)?;
|
||||
} else {
|
||||
let expected = fs::read_to_string(&result_file).context(format!(
|
||||
"failed to read test expectation file {:?}\nthis can be fixed with BLESS=1",
|
||||
result_file
|
||||
))?;
|
||||
let expected = normalize(test, &expected);
|
||||
if expected != result {
|
||||
bail!(
|
||||
"failed test: expected `{:?}` but found `{:?}`",
|
||||
expected,
|
||||
result
|
||||
);
|
||||
}
|
||||
}
|
||||
self.bump_ntests();
|
||||
return Ok(());
|
||||
|
||||
fn normalize(test: &Path, s: &str) -> String {
|
||||
s.replace(
|
||||
&test.display().to_string(),
|
||||
&test.display().to_string().replace("\\", "/"),
|
||||
)
|
||||
.replace("\\parse-fail\\", "/parse-fail/")
|
||||
.replace("\r\n", "\n")
|
||||
}
|
||||
}
|
||||
|
||||
fn bump_ntests(&self) {
|
||||
self.ntests.fetch_add(1, SeqCst);
|
||||
}
|
||||
}
|
||||
|
||||
fn to_json(i: &Interface) -> String {
|
||||
#[derive(Serialize)]
|
||||
struct Interface {
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
resources: Vec<Resource>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
types: Vec<TypeDef>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
functions: Vec<Function>,
|
||||
#[serde(skip_serializing_if = "Vec::is_empty")]
|
||||
globals: Vec<Global>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Resource {
|
||||
name: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
supertype: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
foreign_module: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct TypeDef {
|
||||
idx: usize,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
name: Option<String>,
|
||||
#[serde(flatten)]
|
||||
ty: Type,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
foreign_module: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
#[serde(rename_all = "kebab-case")]
|
||||
enum Type {
|
||||
Primitive(String),
|
||||
Record { fields: Vec<(String, String)> },
|
||||
Flags { flags: Vec<String> },
|
||||
Enum { cases: Vec<String> },
|
||||
Variant { cases: Vec<(String, String)> },
|
||||
Tuple { types: Vec<String> },
|
||||
Option(String),
|
||||
Expected { ok: String, err: String },
|
||||
Future(String),
|
||||
Stream { element: String, end: String },
|
||||
List(String),
|
||||
Union { cases: Vec<String> },
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Function {
|
||||
name: String,
|
||||
#[serde(rename = "async", skip_serializing_if = "Option::is_none")]
|
||||
is_async: Option<bool>,
|
||||
params: Vec<String>,
|
||||
result: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct Global {
|
||||
name: String,
|
||||
ty: String,
|
||||
}
|
||||
|
||||
let resources = i
|
||||
.resources
|
||||
.iter()
|
||||
.map(|(_, r)| Resource {
|
||||
name: r.name.clone(),
|
||||
supertype: r.supertype.as_ref().map(|supertype| supertype.clone()),
|
||||
foreign_module: r.foreign_module.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let types = i
|
||||
.types
|
||||
.iter()
|
||||
.map(|(i, r)| TypeDef {
|
||||
idx: i.index(),
|
||||
name: r.name.clone(),
|
||||
ty: translate_typedef(r),
|
||||
foreign_module: r.foreign_module.clone(),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let functions = i
|
||||
.functions
|
||||
.iter()
|
||||
.map(|f| Function {
|
||||
name: f.name.clone(),
|
||||
is_async: if f.is_async { Some(f.is_async) } else { None },
|
||||
params: f.params.iter().map(|(_, ty)| translate_type(ty)).collect(),
|
||||
result: translate_type(&f.result),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
let globals = i
|
||||
.globals
|
||||
.iter()
|
||||
.map(|g| Global {
|
||||
name: g.name.clone(),
|
||||
ty: translate_type(&g.ty),
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let iface = Interface {
|
||||
resources,
|
||||
types,
|
||||
functions,
|
||||
globals,
|
||||
};
|
||||
return serde_json::to_string_pretty(&iface).unwrap();
|
||||
|
||||
fn translate_typedef(ty: &wit_parser::TypeDef) -> Type {
|
||||
match &ty.kind {
|
||||
TypeDefKind::Type(t) => Type::Primitive(translate_type(t)),
|
||||
TypeDefKind::Record(r) => Type::Record {
|
||||
fields: r
|
||||
.fields
|
||||
.iter()
|
||||
.map(|f| (f.name.clone(), translate_type(&f.ty)))
|
||||
.collect(),
|
||||
},
|
||||
TypeDefKind::Tuple(t) => Type::Tuple {
|
||||
types: t.types.iter().map(|ty| translate_type(ty)).collect(),
|
||||
},
|
||||
TypeDefKind::Flags(r) => Type::Flags {
|
||||
flags: r.flags.iter().map(|f| f.name.clone()).collect(),
|
||||
},
|
||||
TypeDefKind::Enum(r) => Type::Enum {
|
||||
cases: r.cases.iter().map(|f| f.name.clone()).collect(),
|
||||
},
|
||||
TypeDefKind::Variant(v) => Type::Variant {
|
||||
cases: v
|
||||
.cases
|
||||
.iter()
|
||||
.map(|f| (f.name.clone(), translate_type(&f.ty)))
|
||||
.collect(),
|
||||
},
|
||||
TypeDefKind::Option(t) => Type::Option(translate_type(t)),
|
||||
TypeDefKind::Expected(e) => Type::Expected {
|
||||
ok: translate_type(&e.ok),
|
||||
err: translate_type(&e.err),
|
||||
},
|
||||
TypeDefKind::Future(t) => Type::Future(translate_type(t)),
|
||||
TypeDefKind::Stream(s) => Type::Stream {
|
||||
element: translate_type(&s.element),
|
||||
end: translate_type(&s.end),
|
||||
},
|
||||
TypeDefKind::List(ty) => Type::List(translate_type(ty)),
|
||||
TypeDefKind::Union(u) => Type::Union {
|
||||
cases: u.cases.iter().map(|c| translate_type(&c.ty)).collect(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn translate_type(ty: &wit_parser::Type) -> String {
|
||||
use wit_parser::Type;
|
||||
match ty {
|
||||
Type::Unit => format!("unit"),
|
||||
Type::Bool => format!("bool"),
|
||||
Type::U8 => format!("u8"),
|
||||
Type::U16 => format!("u16"),
|
||||
Type::U32 => format!("u32"),
|
||||
Type::U64 => format!("u64"),
|
||||
Type::S8 => format!("s8"),
|
||||
Type::S16 => format!("s16"),
|
||||
Type::S32 => format!("s32"),
|
||||
Type::S64 => format!("s64"),
|
||||
Type::Float32 => format!("float32"),
|
||||
Type::Float64 => format!("float64"),
|
||||
Type::Char => format!("char"),
|
||||
Type::String => format!("string"),
|
||||
Type::Handle(resource) => format!("handle-{}", resource.index()),
|
||||
Type::Id(id) => format!("type-{}", id.index()),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
a: async func()
|
||||
b: async func(x: s32)
|
||||
c: async func() -> u32
|
||||
|
||||
resource y {
|
||||
a: async func()
|
||||
b: async func(x: s32)
|
||||
c: async func() -> u32
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "y"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"primitive": "handle-0"
|
||||
}
|
||||
],
|
||||
"functions": [
|
||||
{
|
||||
"name": "a",
|
||||
"async": true,
|
||||
"params": [],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "b",
|
||||
"async": true,
|
||||
"params": [
|
||||
"s32"
|
||||
],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "c",
|
||||
"async": true,
|
||||
"params": [],
|
||||
"result": "u32"
|
||||
},
|
||||
{
|
||||
"name": "y::a",
|
||||
"async": true,
|
||||
"params": [
|
||||
"handle-0"
|
||||
],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "y::b",
|
||||
"async": true,
|
||||
"params": [
|
||||
"handle-0",
|
||||
"s32"
|
||||
],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "y::c",
|
||||
"async": true,
|
||||
"params": [
|
||||
"handle-0"
|
||||
],
|
||||
"result": "u32"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
// hello
|
||||
// world
|
||||
// why, yes
|
||||
// this is a comment
|
||||
/* this too */ /* is a comment */
|
||||
/* this /* is /* a */ nested */ comment */
|
||||
|
||||
|
||||
type /* foo */ bar /* baz */ = //
|
||||
handle //
|
||||
//
|
||||
//
|
||||
|
||||
|
||||
|
||||
|
||||
x
|
||||
|
||||
resource /* x */ x // ...
|
||||
@@ -0,0 +1,18 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "x"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"name": "bar",
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"primitive": "handle-0"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,24 @@
|
||||
# A Markdown file!
|
||||
|
||||
containing stuff, and also some code blocks, wit and other.
|
||||
|
||||
```wit
|
||||
x: func()
|
||||
```
|
||||
|
||||
Intervening content, including a non-wit codeblock:
|
||||
```js
|
||||
function func() {}
|
||||
```
|
||||
|
||||
```wit
|
||||
y: func()
|
||||
```
|
||||
|
||||
## A new section
|
||||
|
||||
In which, another wit code block!
|
||||
|
||||
```wit
|
||||
z: func()
|
||||
```
|
||||
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"functions": [
|
||||
{
|
||||
"name": "x",
|
||||
"params": [],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "y",
|
||||
"params": [],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "z",
|
||||
"params": [],
|
||||
"result": "unit"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
{}
|
||||
@@ -0,0 +1,7 @@
|
||||
f1: func()
|
||||
f2: func(a: u32)
|
||||
f3: func(a: u32,)
|
||||
f4: func() -> u32
|
||||
f6: func() -> tuple<u32, u32>
|
||||
f7: func(a: float32, b: float32) -> tuple<u32, u32>
|
||||
f8: func(a: option<u32>) -> expected<u32, float32>
|
||||
@@ -0,0 +1,70 @@
|
||||
{
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"tuple": {
|
||||
"types": [
|
||||
"u32",
|
||||
"u32"
|
||||
]
|
||||
}
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"option": "u32"
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"expected": {
|
||||
"ok": "u32",
|
||||
"err": "float32"
|
||||
}
|
||||
}
|
||||
],
|
||||
"functions": [
|
||||
{
|
||||
"name": "f1",
|
||||
"params": [],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "f2",
|
||||
"params": [
|
||||
"u32"
|
||||
],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "f3",
|
||||
"params": [
|
||||
"u32"
|
||||
],
|
||||
"result": "unit"
|
||||
},
|
||||
{
|
||||
"name": "f4",
|
||||
"params": [],
|
||||
"result": "u32"
|
||||
},
|
||||
{
|
||||
"name": "f6",
|
||||
"params": [],
|
||||
"result": "type-0"
|
||||
},
|
||||
{
|
||||
"name": "f7",
|
||||
"params": [
|
||||
"float32",
|
||||
"float32"
|
||||
],
|
||||
"result": "type-0"
|
||||
},
|
||||
{
|
||||
"name": "f8",
|
||||
"params": [
|
||||
"type-1"
|
||||
],
|
||||
"result": "type-2"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
# Title
|
||||
|
||||
This file is like import-me.wit, but it's a Markdown file with embedded wit
|
||||
code blocks.
|
||||
|
||||
## `foo`
|
||||
```wit
|
||||
/// This is foo.
|
||||
type foo = u32
|
||||
```
|
||||
|
||||
## `x`
|
||||
```wit
|
||||
/// This is x.
|
||||
resource x
|
||||
```
|
||||
|
||||
## `handle`
|
||||
```wit
|
||||
/// This is handle.
|
||||
type %handle = handle x
|
||||
```
|
||||
|
||||
## `some-record`
|
||||
```wit
|
||||
/// This is some-record.
|
||||
type some-record = tuple<u32, u64, float32>
|
||||
```
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "x"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"name": "foo",
|
||||
"primitive": "u32"
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"name": "handle",
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"name": "some-record",
|
||||
"tuple": {
|
||||
"types": [
|
||||
"u32",
|
||||
"u64",
|
||||
"float32"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,7 @@
|
||||
type foo = u32
|
||||
|
||||
resource x
|
||||
|
||||
type %handle = handle x
|
||||
|
||||
type some-record = tuple<u32, u64, float32>
|
||||
@@ -0,0 +1,34 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "x"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"name": "foo",
|
||||
"primitive": "u32"
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"name": "handle",
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"name": "some-record",
|
||||
"tuple": {
|
||||
"types": [
|
||||
"u32",
|
||||
"u64",
|
||||
"float32"
|
||||
]
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,18 @@
|
||||
// This test is like imports.wit, but uses import-me-too, which is a markdown
|
||||
// file instead of a plain wit file.
|
||||
|
||||
use { foo } from import-me-too
|
||||
use { foo as bar } from import-me-too
|
||||
use { x as import-me-x } from import-me-too
|
||||
|
||||
type x = foo
|
||||
type y = bar
|
||||
type z1 = import-me-x
|
||||
type z2 = handle import-me-x
|
||||
|
||||
use { %handle } from import-me-too
|
||||
resource xyz
|
||||
type my-handle = handle xyz
|
||||
type my-handle2 = xyz
|
||||
|
||||
use { some-record } from import-me-too
|
||||
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "x",
|
||||
"foreign_module": "import-me-too"
|
||||
},
|
||||
{
|
||||
"name": "xyz"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"name": "foo",
|
||||
"primitive": "u32",
|
||||
"foreign_module": "import-me-too"
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"primitive": "handle-0",
|
||||
"foreign_module": "import-me-too"
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"name": "handle",
|
||||
"primitive": "handle-0",
|
||||
"foreign_module": "import-me-too"
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"name": "some-record",
|
||||
"tuple": {
|
||||
"types": [
|
||||
"u32",
|
||||
"u64",
|
||||
"float32"
|
||||
]
|
||||
},
|
||||
"foreign_module": "import-me-too"
|
||||
},
|
||||
{
|
||||
"idx": 4,
|
||||
"name": "x",
|
||||
"primitive": "type-0"
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"name": "y",
|
||||
"primitive": "type-0"
|
||||
},
|
||||
{
|
||||
"idx": 6,
|
||||
"name": "z1",
|
||||
"primitive": "type-1"
|
||||
},
|
||||
{
|
||||
"idx": 7,
|
||||
"name": "z2",
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 8,
|
||||
"primitive": "handle-1"
|
||||
},
|
||||
{
|
||||
"idx": 9,
|
||||
"name": "my-handle",
|
||||
"primitive": "handle-1"
|
||||
},
|
||||
{
|
||||
"idx": 10,
|
||||
"name": "my-handle2",
|
||||
"primitive": "type-8"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,15 @@
|
||||
use { foo } from import-me
|
||||
use { foo as bar } from import-me
|
||||
use { x as import-me-x } from import-me
|
||||
|
||||
type x = foo
|
||||
type y = bar
|
||||
type z1 = import-me-x
|
||||
type z2 = handle import-me-x
|
||||
|
||||
use { %handle } from import-me
|
||||
resource xyz
|
||||
type my-handle = handle xyz
|
||||
type my-handle2 = xyz
|
||||
|
||||
use { some-record } from import-me
|
||||
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "x",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"name": "xyz"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"name": "foo",
|
||||
"primitive": "u32",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"primitive": "handle-0",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"name": "handle",
|
||||
"primitive": "handle-0",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"name": "some-record",
|
||||
"tuple": {
|
||||
"types": [
|
||||
"u32",
|
||||
"u64",
|
||||
"float32"
|
||||
]
|
||||
},
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 4,
|
||||
"name": "x",
|
||||
"primitive": "type-0"
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"name": "y",
|
||||
"primitive": "type-0"
|
||||
},
|
||||
{
|
||||
"idx": 6,
|
||||
"name": "z1",
|
||||
"primitive": "type-1"
|
||||
},
|
||||
{
|
||||
"idx": 7,
|
||||
"name": "z2",
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 8,
|
||||
"primitive": "handle-1"
|
||||
},
|
||||
{
|
||||
"idx": 9,
|
||||
"name": "my-handle",
|
||||
"primitive": "handle-1"
|
||||
},
|
||||
{
|
||||
"idx": 10,
|
||||
"name": "my-handle2",
|
||||
"primitive": "type-8"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
use * from import-me
|
||||
|
||||
type my-handle = handle x
|
||||
type my-handle2 = x
|
||||
@@ -0,0 +1,49 @@
|
||||
{
|
||||
"resources": [
|
||||
{
|
||||
"name": "x",
|
||||
"foreign_module": "import-me"
|
||||
}
|
||||
],
|
||||
"types": [
|
||||
{
|
||||
"idx": 0,
|
||||
"name": "foo",
|
||||
"primitive": "u32",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 1,
|
||||
"name": "handle",
|
||||
"primitive": "handle-0",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 2,
|
||||
"name": "some-record",
|
||||
"tuple": {
|
||||
"types": [
|
||||
"u32",
|
||||
"u64",
|
||||
"float32"
|
||||
]
|
||||
},
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 3,
|
||||
"primitive": "handle-0",
|
||||
"foreign_module": "import-me"
|
||||
},
|
||||
{
|
||||
"idx": 4,
|
||||
"name": "my-handle",
|
||||
"primitive": "handle-0"
|
||||
},
|
||||
{
|
||||
"idx": 5,
|
||||
"name": "my-handle2",
|
||||
"primitive": "type-3"
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
// parse-fail
|
||||
a: async
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user