mirror of
https://github.com/ziglang/zig.git
synced 2025-12-06 06:13:07 +00:00
Compare commits
10 Commits
3f2cf1c002
...
6d543bcf94
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6d543bcf94 | ||
|
|
0a9f666ea6 | ||
|
|
1231aa9719 | ||
|
|
8284da2f3d | ||
|
|
d6931b0ff5 | ||
|
|
0e3b5e6d8f | ||
|
|
9473011052 | ||
|
|
dec1163fbb | ||
|
|
ce0df033cf | ||
|
|
c5383173a0 |
12
.github/ISSUE_TEMPLATE/config.yml
vendored
12
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,13 +1,13 @@
|
||||
contact_links:
|
||||
- name: Language Proposal
|
||||
about: Propose to improve the Zig language
|
||||
url: https://github.com/ziglang/zig/wiki/Language-Proposals
|
||||
about: "Please do not submit a proposal to change the language"
|
||||
url: https://ziglang.org/code-of-conduct
|
||||
- name: Question
|
||||
about: Please use one of the community spaces for questions or general discussions.
|
||||
url: https://github.com/ziglang/zig/wiki/Community
|
||||
about: "Please use one of the community spaces instead for questions or general discussions."
|
||||
url: https://ziglang.org/community
|
||||
- name: C Translation
|
||||
about: "Issues related to `zig translate-c` and `@cImport` are tracked separately."
|
||||
url: https://github.com/ziglang/translate-c/
|
||||
- name: Copilot and Other LLMs
|
||||
about: Please do not use GitHub Copilot or any other LLM to write an issue.
|
||||
url: https://github.com/ziglang/zig/wiki/Writing-Issues-with-Copilot-and-Other-LLMs
|
||||
about: "Please do not use GitHub Copilot or any other LLM to write an issue."
|
||||
url: https://ziglang.org/code-of-conduct
|
||||
|
||||
680
README.md
680
README.md
@ -47,7 +47,10 @@ Ensure you have the required dependencies:
|
||||
|
||||
* CMake >= 3.15
|
||||
* System C/C++ Toolchain
|
||||
* LLVM, Clang, LLD development libraries == 21.x
|
||||
* LLVM, Clang, LLD development libraries, version 21.x, compiled with the
|
||||
same system C/C++ toolchain.
|
||||
- If the system package manager lacks these libraries, or has them misconfigured,
|
||||
see below for how to build them from source.
|
||||
|
||||
Then it is the standard CMake build process:
|
||||
|
||||
@ -58,9 +61,9 @@ cmake ..
|
||||
make install
|
||||
```
|
||||
|
||||
For more options, tips, and troubleshooting, please see the
|
||||
[Building Zig From Source](https://github.com/ziglang/zig/wiki/Building-Zig-From-Source)
|
||||
page on the wiki.
|
||||
Use `CMAKE_PREFIX_PATH` if needed to help CMake find LLVM.
|
||||
|
||||
This produces `stage3/bin/zig` which is the Zig compiler built by itself.
|
||||
|
||||
## Building from Source without LLVM
|
||||
|
||||
@ -88,15 +91,359 @@ files, which may be optimized and compiled into object files via a system Clang
|
||||
package. This can be used to produce system packages of Zig applications
|
||||
without the Zig package dependency on LLVM.
|
||||
|
||||
## Building from Source Using Prebuilt Zig
|
||||
|
||||
Dependencies:
|
||||
|
||||
* A recent prior build of Zig. The exact version required depends on how
|
||||
recently breaking changes occurred. If the language or std lib changed too
|
||||
much since this version, then this method of building from source will fail.
|
||||
* LLVM, Clang, and LLD libraries built using Zig.
|
||||
|
||||
The easiest way to obtain both of these artifacts is to use
|
||||
[zig-bootstrap](https://github.com/ziglang/zig-bootstrap), which creates the
|
||||
directory `out/zig-$target-$cpu` and `out/$target-$cpu`, to be used as
|
||||
`$ZIG_PREFIX` and `$LLVM_PREFIX`, respectively, in the following command:
|
||||
|
||||
```
|
||||
"$ZIG_PREFIX/zig" build \
|
||||
-p stage3 \
|
||||
--search-prefix "$LLVM_PREFIX" \
|
||||
--zig-lib-dir "lib" \
|
||||
-Dstatic-llvm
|
||||
```
|
||||
|
||||
Where `$LLVM_PREFIX` is the path that contains, for example,
|
||||
`include/llvm/Pass.h` and `lib/libLLVMCore.a`.
|
||||
|
||||
This produces `stage3/bin/zig`. See `zig build -h` to learn about the options
|
||||
that can be passed such as `-Drelease`.
|
||||
|
||||
## Building from Source on Windows
|
||||
|
||||
### Option 1: Use the Windows Zig Compiler Dev Kit
|
||||
|
||||
This one has the benefit that LLVM, LLD, and Clang are built in Release mode,
|
||||
while your Zig build has the option to be a Debug build. It also works
|
||||
completely independently from MSVC so you don't need it to be installed.
|
||||
|
||||
Determine the URL by
|
||||
[looking at the CI script](https://github.com/ziglang/zig/blob/master/ci/x86_64-windows-debug.ps1#L1-L4).
|
||||
It will look something like this (replace `$VERSION` with the one you see by
|
||||
following the above link):
|
||||
|
||||
```
|
||||
https://ziglang.org/deps/zig+llvm+lld+clang-x86_64-windows-gnu-$VERSION.zip
|
||||
```
|
||||
|
||||
This zip file contains:
|
||||
|
||||
* An older Zig installation.
|
||||
* LLVM, LLD, and Clang libraries (.lib and .h files), version 16.0.1, built in Release mode.
|
||||
* zlib (.lib and .h files), v1.2.13, built in Release mode
|
||||
* zstd (.lib and .h files), v1.5.2, built in Release mode
|
||||
|
||||
#### Option 1a: CMake + [Ninja](https://ninja-build.org/)
|
||||
|
||||
Unzip the dev kit and then in cmd.exe in your Zig source checkout:
|
||||
|
||||
```bat
|
||||
mkdir build
|
||||
cd build
|
||||
set DEVKIT=$DEVKIT
|
||||
```
|
||||
|
||||
Replace `$DEVKIT` with the path to the folder that you unzipped after
|
||||
downloading it from the link above. Make sure to use forward slashes (`/`) for
|
||||
all path separators (otherwise CMake will try to interpret backslashes as
|
||||
escapes and fail).
|
||||
|
||||
Then run:
|
||||
|
||||
```bat
|
||||
cmake .. -GNinja -DCMAKE_PREFIX_PATH="%DEVKIT%" -DCMAKE_C_COMPILER="%DEVKIT%/bin/zig.exe;cc" -DCMAKE_CXX_COMPILER="%DEVKIT%/bin/zig.exe;c++" -DCMAKE_AR="%DEVKIT%/bin/zig.exe" -DZIG_AR_WORKAROUND=ON -DZIG_STATIC=ON -DZIG_USE_LLVM_CONFIG=OFF
|
||||
```
|
||||
|
||||
* Append `-DCMAKE_BUILD_TYPE=Release` for a Release build.
|
||||
* Append `-DZIG_NO_LIB=ON` to avoid having multiple copies of the lib/ folder.
|
||||
|
||||
Finally, run:
|
||||
|
||||
```bat
|
||||
ninja install
|
||||
```
|
||||
|
||||
You now have the `zig.exe` binary at `stage3\bin\zig.exe`.
|
||||
|
||||
#### Option 1b: zig build
|
||||
|
||||
Unzip the dev kit and then in cmd.exe in your Zig source checkout:
|
||||
|
||||
```bat
|
||||
$DEVKIT\bin\zig.exe build -p stage3 --search-prefix $DEVKIT --zig-lib-dir lib -Dstatic-llvm -Duse-zig-libcxx -Dtarget=x86_64-windows-gnu
|
||||
```
|
||||
|
||||
Replace `$DEVKIT` with the path to the folder that you unzipped after
|
||||
downloading it from the link above.
|
||||
|
||||
Append `-Doptimize=ReleaseSafe` for a Release build.
|
||||
|
||||
**If you get an error building at this step**, it is most likely that the Zig
|
||||
installation inside the dev kit is too old, and the dev kit needs to be
|
||||
updated. In this case one more step is required:
|
||||
|
||||
1. [Download the latest master branch zip file](https://ziglang.org/download/#release-master).
|
||||
2. Unzip, and try the above command again, replacing the path to zig.exe with
|
||||
the path to the zig.exe you just extracted, and also replace the lib\zig
|
||||
folder with the new contents.
|
||||
|
||||
You now have the `zig.exe` binary at `stage3\bin\zig.exe`.
|
||||
|
||||
### Option 2: Using CMake and Microsoft Visual Studio
|
||||
|
||||
This one has the benefit that changes to the language or build system won't
|
||||
break your dev kit. This option can be used to upgrade a dev kit.
|
||||
|
||||
First, [build LLVM, LLD, and Clang from source using CMake and Microsoft Visual Studio](https://github.com/ziglang/zig/wiki/How-to-build-LLVM,-libclang,-and-liblld-from-source#windows). Or, skip this step using a pre-built binary tarball, which unfortunately is not provided here.
|
||||
|
||||
Install [Build Tools for Visual Studio 2019](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019). Be sure to select "Desktop development with C++" when prompted.
|
||||
* You must additionally check the optional component labeled **C++ ATL for v142 build tools**.
|
||||
|
||||
Install [CMake](http://cmake.org).
|
||||
|
||||
Use [git](https://git-scm.com/) to clone the zig repository to a path with no spaces, e.g. `C:\Users\Andy\zig`.
|
||||
|
||||
Using the start menu, run **x64 Native Tools Command Prompt for VS 2019** and execute these commands, replacing `C:\Users\Andy` with the correct value.
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\zig\build-release
|
||||
cd C:\Users\Andy\zig\build-release
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_PREFIX_PATH=C:\Users\Andy\llvm+clang+lld-20.0.0-x86_64-windows-msvc-release-mt -DCMAKE_BUILD_TYPE=Release
|
||||
msbuild -p:Configuration=Release INSTALL.vcxproj
|
||||
```
|
||||
|
||||
You now have the `zig.exe` binary at `bin\zig.exe` and you can run the tests:
|
||||
|
||||
```bat
|
||||
bin\zig.exe build test
|
||||
```
|
||||
|
||||
This can take a long time. For tips & tricks on using the test suite, see [Contributing](https://github.com/ziglang/zig/blob/master/.github/CONTRIBUTING.md#editing-source-code).
|
||||
|
||||
Note: In case you get the error "llvm-config not found" (or similar), make sure that you have **no** trailing slash (`/` or `\`) at the end of the `-DCMAKE_PREFIX_PATH` value.
|
||||
|
||||
## Building LLVM, LLD, and Clang from Source
|
||||
|
||||
### Windows
|
||||
|
||||
Install [CMake](https://cmake.org/), version 3.20.0 or newer.
|
||||
|
||||
[Download LLVM, Clang, and LLD sources](http://releases.llvm.org/download.html#21.0.0)
|
||||
The downloads from llvm lead to the github release pages, where the source's
|
||||
will be listed as : `llvm-21.X.X.src.tar.xz`, `clang-21.X.X.src.tar.xz`,
|
||||
`lld-21.X.X.src.tar.xz`. Unzip each to their own directory. Ensure no
|
||||
directories have spaces in them. For example:
|
||||
|
||||
* `C:\Users\Andy\llvm-21.0.0.src`
|
||||
* `C:\Users\Andy\clang-21.0.0.src`
|
||||
* `C:\Users\Andy\lld-21.0.0.src`
|
||||
|
||||
Install [Build Tools for Visual Studio
|
||||
2019](https://visualstudio.microsoft.com/downloads/#build-tools-for-visual-studio-2019).
|
||||
Be sure to select "C++ build tools" when prompted.
|
||||
* You **must** additionally check the optional component labeled **C++ ATL for
|
||||
v142 build tools**. As this won't be supplied by a default installation of
|
||||
Visual Studio.
|
||||
* Full list of supported MSVC versions:
|
||||
- 2017 (version 15.8) (unverified)
|
||||
- 2019 (version 16.7)
|
||||
|
||||
Install [Python 3.9.4](https://www.python.org). Tick the box to add python to
|
||||
your PATH environment variable.
|
||||
|
||||
#### LLVM
|
||||
|
||||
Using the start menu, run **x64 Native Tools Command Prompt for VS 2019** and execute these commands, replacing `C:\Users\Andy` with the correct value. Here is listed a brief explanation of each of the CMake parameters we pass when configuring the build
|
||||
|
||||
- `-Thost=x64` : Sets the windows toolset to use 64 bit mode.
|
||||
- `-A x64` : Make the build target 64 bit .
|
||||
- `-G "Visual Studio 16 2019"` : Specifies to generate a 2019 Visual Studio project, the best supported version.
|
||||
- `-DCMAKE_INSTALL_PREFIX=""` : Path that llvm components will being installed into by the install project.
|
||||
- `-DCMAKE_PREFIX_PATH=""` : Path that CMake will look into first when trying to locate dependencies, should be the same place as the install prefix. This will ensure that clang and lld will use your newly built llvm libraries.
|
||||
- `-DLLVM_ENABLE_ZLIB=OFF` : Don't build llvm with ZLib support as it's not required and will disrupt the target dependencies for components linking against llvm. This only has to be passed when building llvm, as this option will be saved into the config headers.
|
||||
- `-DCMAKE_BUILD_TYPE=Release` : Build llvm and components in release mode.
|
||||
- `-DCMAKE_BUILD_TYPE=Debug` : Build llvm and components in debug mode.
|
||||
- `-DLLVM_USE_CRT_RELEASE=MT` : Which C runtime should llvm use during release builds.
|
||||
- `-DLLVM_USE_CRT_DEBUG=MTd` : Make llvm use the debug version of the runtime in debug builds.
|
||||
|
||||
##### Release Mode
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\llvm-21.0.0.src\build-release
|
||||
cd C:\Users\Andy\llvm-21.0.0.src\build-release
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX=C:\Users\Andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-release-mt -DCMAKE_PREFIX_PATH=C:\Users\Andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-release-mt -
|
||||
DLLVM_ENABLE_ZLIB=OFF -DCMAKE_BUILD_TYPE=Release -DLLVM_ENABLE_LIBXML2=OFF -DLLVM_USE_CRT_RELEASE=MT
|
||||
msbuild /m -p:Configuration=Release INSTALL.vcxproj
|
||||
```
|
||||
|
||||
##### Debug Mode
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\llvm-21.0.0.src\build-debug
|
||||
cd C:\Users\Andy\llvm-21.0.0.src\build-debug
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX=C:\Users\andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-debug -
|
||||
DLLVM_ENABLE_ZLIB=OFF -DCMAKE_PREFIX_PATH=C:\Users\andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-debug -DCMAKE_BUILD_TYPE=Debug -DLLVM_EXPERIMENTAL_TARGETS_TO_BUILD="AVR" -DLLVM_ENABLE_LIBXML2=OFF -DLLVM_USE_CRT_DEBUG=MTd
|
||||
msbuild /m INSTALL.vcxproj
|
||||
```
|
||||
|
||||
#### LLD
|
||||
|
||||
Using the start menu, run **x64 Native Tools Command Prompt for VS 2019** and execute these commands, replacing `C:\Users\Andy` with the correct value.
|
||||
|
||||
##### Release Mode
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\lld-21.0.0.src\build-release
|
||||
cd C:\Users\Andy\lld-21.0.0.src\build-release
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX=C:\Users\Andy\llvm+clang+lld-14.0.6-x86_64-windows-msvc-release-mt -DCMAKE_PREFIX_PATH=C:\Users\Andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-release-mt -DCMAKE_BUILD_TYPE=Release -DLLVM_USE_CRT_RELEASE=MT
|
||||
msbuild /m -p:Configuration=Release INSTALL.vcxproj
|
||||
```
|
||||
|
||||
##### Debug Mode
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\lld-21.0.0.src\build-debug
|
||||
cd C:\Users\Andy\lld-21.0.0.src\build-debug
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX=C:\Users\andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-debug -DCMAKE_PREFIX_PATH=C:\Users\andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-debug -DCMAKE_BUILD_TYPE=Debug -DLLVM_USE_CRT_DEBUG=MTd
|
||||
msbuild /m INSTALL.vcxproj
|
||||
```
|
||||
|
||||
#### Clang
|
||||
|
||||
Using the start menu, run **x64 Native Tools Command Prompt for VS 2019** and execute these commands, replacing `C:\Users\Andy` with the correct value.
|
||||
|
||||
##### Release Mode
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\clang-21.0.0.src\build-release
|
||||
cd C:\Users\Andy\clang-21.0.0.src\build-release
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX=C:\Users\Andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-release-mt -DCMAKE_PREFIX_PATH=C:\Users\Andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-release-mt -DCMAKE_BUILD_TYPE=Release -DLLVM_USE_CRT_RELEASE=MT
|
||||
msbuild /m -p:Configuration=Release INSTALL.vcxproj
|
||||
```
|
||||
|
||||
##### Debug Mode
|
||||
|
||||
```bat
|
||||
mkdir C:\Users\Andy\clang-21.0.0.src\build-debug
|
||||
cd C:\Users\Andy\clang-21.0.0.src\build-debug
|
||||
"c:\Program Files\CMake\bin\cmake.exe" .. -Thost=x64 -G "Visual Studio 16 2019" -A x64 -DCMAKE_INSTALL_PREFIX=C:\Users\andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-debug -DCMAKE_PREFIX_PATH=C:\Users\andy\llvm+clang+lld-21.0.0-x86_64-windows-msvc-debug -DCMAKE_BUILD_TYPE=Debug -DLLVM_USE_CRT_DEBUG=MTd
|
||||
msbuild /m INSTALL.vcxproj
|
||||
```
|
||||
|
||||
### POSIX Systems
|
||||
|
||||
This guide will get you both a Debug build of LLVM, and/or a Release build of LLVM.
|
||||
It intentionally does not require privileged access, using a prefix inside your home
|
||||
directory instead of a global installation.
|
||||
|
||||
#### Release
|
||||
|
||||
This is the generally recommended approach.
|
||||
|
||||
```
|
||||
cd ~/Downloads
|
||||
git clone --depth 1 --branch release/21.x https://github.com/llvm/llvm-project llvm-project-21
|
||||
cd llvm-project-21
|
||||
git checkout release/21.x
|
||||
|
||||
mkdir build-release
|
||||
cd build-release
|
||||
cmake ../llvm \
|
||||
-DCMAKE_INSTALL_PREFIX=$HOME/local/llvm21-assert \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DLLVM_ENABLE_PROJECTS="lld;clang" \
|
||||
-DLLVM_ENABLE_LIBXML2=OFF \
|
||||
-DLLVM_ENABLE_TERMINFO=OFF \
|
||||
-DLLVM_ENABLE_LIBEDIT=OFF \
|
||||
-DLLVM_ENABLE_ASSERTIONS=ON \
|
||||
-DLLVM_PARALLEL_LINK_JOBS=1 \
|
||||
-G Ninja
|
||||
ninja install
|
||||
```
|
||||
|
||||
#### Debug
|
||||
|
||||
This is occasionally needed when debugging Zig's LLVM backend. Here we build
|
||||
the three projects separately so that LLVM can be in Debug mode while the
|
||||
others are in Release mode.
|
||||
|
||||
```
|
||||
cd ~/Downloads
|
||||
git clone --depth 1 --branch release/21.x https://github.com/llvm/llvm-project llvm-project-21
|
||||
cd llvm-project-21
|
||||
git checkout release/21.x
|
||||
|
||||
# LLVM
|
||||
mkdir llvm/build-debug
|
||||
cd llvm/build-debug
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX=$HOME/local/llvm21-debug \
|
||||
-DCMAKE_PREFIX_PATH=$HOME/local/llvm21-debug \
|
||||
-DCMAKE_BUILD_TYPE=Debug \
|
||||
-DLLVM_ENABLE_LIBXML2=OFF \
|
||||
-DLLVM_ENABLE_TERMINFO=OFF \
|
||||
-DLLVM_ENABLE_LIBEDIT=OFF \
|
||||
-DLLVM_PARALLEL_LINK_JOBS=1 \
|
||||
-G Ninja
|
||||
ninja install
|
||||
cd ../..
|
||||
|
||||
# LLD
|
||||
mkdir lld/build-debug
|
||||
cd lld/build-debug
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX=$HOME/local/llvm21-debug \
|
||||
-DCMAKE_PREFIX_PATH=$HOME/local/llvm21-debug \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DLLVM_PARALLEL_LINK_JOBS=1 \
|
||||
-DCMAKE_CXX_STANDARD=17 \
|
||||
-G Ninja
|
||||
ninja install
|
||||
cd ../..
|
||||
|
||||
# Clang
|
||||
mkdir clang/build-debug
|
||||
cd clang/build-debug
|
||||
cmake .. \
|
||||
-DCMAKE_INSTALL_PREFIX=$HOME/local/llvm21-debug \
|
||||
-DCMAKE_PREFIX_PATH=$HOME/local/llvm21-debug \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DLLVM_PARALLEL_LINK_JOBS=1 \
|
||||
-DLLVM_INCLUDE_TESTS=OFF \
|
||||
-G Ninja
|
||||
ninja install
|
||||
cd ../..
|
||||
```
|
||||
|
||||
Then add to your Zig CMake line that you got from the README.md:
|
||||
`-DCMAKE_PREFIX_PATH=$HOME/local/llvm21-debug` or
|
||||
`-DCMAKE_PREFIX_PATH=$HOME/local/llvm21-assert` depending on whether you want
|
||||
Debug or Release LLVM.
|
||||
|
||||
|
||||
## Contributing
|
||||
|
||||
[Donate monthly](https://ziglang.org/zsf/).
|
||||
|
||||
[Join a community](https://ziglang.org/community/).
|
||||
|
||||
Zig is Free and Open Source Software. We welcome bug reports and patches from
|
||||
everyone. However, keep in mind that Zig governance is BDFN (Benevolent
|
||||
Dictator For Now) which means that Andrew Kelley has final say on the design
|
||||
and implementation of everything.
|
||||
|
||||
### Make Software With Zig
|
||||
|
||||
One of the best ways you can contribute to Zig is to start using it for an
|
||||
open-source personal project.
|
||||
|
||||
@ -105,13 +452,36 @@ further design iterations of Zig. Importantly, each issue found this way comes
|
||||
with real world motivations, making it straightforward to explain the reasoning
|
||||
behind proposals and feature requests.
|
||||
|
||||
You will be taken much more seriously on the issue tracker if you have a
|
||||
personal project that uses Zig.
|
||||
Ideally, such a project will help you to learn new skills and add something
|
||||
to your personal portfolio at the same time.
|
||||
|
||||
### Talk About Zig
|
||||
|
||||
Another way to contribute is to write about Zig, speak about Zig at a
|
||||
conference, or do either of those things for your project which uses Zig.
|
||||
|
||||
Programming languages live and die based on the pulse of their ecosystems. The
|
||||
more people involved, the more we can build great things upon each other's
|
||||
abstractions.
|
||||
|
||||
### Strict No LLM / No AI Policy
|
||||
|
||||
No LLMs for issues.
|
||||
|
||||
No LLMs for patches / pull requests.
|
||||
|
||||
No LLMs for comments on the bug tracker, including translation.
|
||||
|
||||
English is encouraged, but not required. You are welcome to post in your native
|
||||
language and rely on others to have their own translation tools of choice to
|
||||
interpret your words.
|
||||
|
||||
### Find a Contributor Friendly Issue
|
||||
|
||||
The issue label
|
||||
[Contributor Friendly](https://github.com/ziglang/zig/issues?q=is%3Aissue+is%3Aopen+label%3A%22contributor+friendly%22)
|
||||
exists to help you find issues that are **limited in scope and/or knowledge of
|
||||
Zig internals.**
|
||||
exists to help you find issues that are **limited in scope and/or
|
||||
knowledge of Zig internals.**
|
||||
|
||||
Please note that issues labeled
|
||||
[Proposal](https://github.com/ziglang/zig/issues?q=is%3Aissue+is%3Aopen+label%3Aproposal)
|
||||
@ -123,17 +493,289 @@ still under consideration, please express your interest in the issue tracker,
|
||||
providing extra insights and considerations that others have not yet expressed.
|
||||
The most highly regarded argument in such a discussion is a real world use case.
|
||||
|
||||
For more tips, please see the
|
||||
[Contributing](https://github.com/ziglang/zig/wiki/Contributing) page on the
|
||||
wiki.
|
||||
Language proposals are not accepted. Please do not open an issue proposing to
|
||||
change the Zig language or syntax.
|
||||
|
||||
## Community
|
||||
### Editing Source Code
|
||||
|
||||
The Zig community is decentralized. Anyone is free to start and maintain their
|
||||
own space for Zig users to gather. There is no concept of "official" or
|
||||
"unofficial". Each gathering place has its own moderators and rules. Users are
|
||||
encouraged to be aware of the social structures of the spaces they inhabit, and
|
||||
work purposefully to facilitate spaces that align with their values.
|
||||
For a smooth workflow, when building from source, it is recommended to use
|
||||
CMake with the following settings:
|
||||
|
||||
* `-DCMAKE_BUILD_TYPE=Release` - to recompile zig faster.
|
||||
* `-GNinja` - Ninja is faster and simpler to use than Make.
|
||||
* `-DZIG_NO_LIB=ON` - Prevents the build system from copying the lib/
|
||||
directory to the installation prefix, causing zig use lib/ directly from the
|
||||
source tree instead. Effectively, this makes it so that changes to lib/ do
|
||||
not require re-running the install command to become active.
|
||||
|
||||
After configuration, there are two scenarios:
|
||||
|
||||
1. Pulling upstream changes and rebuilding.
|
||||
- In this case use `git pull` and then `ninja install`. Expected wait:
|
||||
about 10 minutes.
|
||||
2. Building from source after making local changes.
|
||||
- In this case use `stage3/bin/zig build -p stage4 -Denable-llvm -Dno-lib`.
|
||||
Expected wait: about 20 seconds.
|
||||
|
||||
This leaves you with two builds of Zig:
|
||||
|
||||
* `stage3/bin/zig` - an optimized master branch build. Useful for
|
||||
miscellaneous activities such as `zig fmt`, as well as for building the
|
||||
compiler itself after changing the source code.
|
||||
* `stage4/bin/zig` - a debug build that includes your local changes; useful
|
||||
for testing and eliminating bugs before submitting a patch.
|
||||
|
||||
To reduce time spent waiting for the compiler to build, try these techniques:
|
||||
|
||||
* Omit `-Denable-llvm` if you don't need the LLVM backend.
|
||||
* Use `-Ddev=foo` to build with a reduced feature set for development of
|
||||
specific features. See `zig build -h` for a list of options.
|
||||
* Use `--watch -fincremental` to enable incremental compilation. This offers
|
||||
**near instant rebuilds**.
|
||||
|
||||
### Testing
|
||||
|
||||
```
|
||||
stage4/bin/zig build test
|
||||
```
|
||||
|
||||
This command runs the whole test suite, which does a lot of extra testing that
|
||||
you likely won't always need, and can take upwards of 1 hour. This is what the
|
||||
CI server runs when you make a pull request.
|
||||
|
||||
To save time, you can add the `--help` option to the `zig build` command and
|
||||
see what options are available. One of the most helpful ones is
|
||||
`-Dskip-release`. Adding this option to the command above, along with
|
||||
`-Dskip-non-native`, will take the time down from around 2 hours to about 30
|
||||
minutes, and this is a good enough amount of testing before making a pull
|
||||
request.
|
||||
|
||||
Another example is choosing a different set of things to test. For example,
|
||||
`test-std` instead of `test` will only run the standard library tests, and
|
||||
not the other ones. Combining this suggestion with the previous one, you could
|
||||
do this:
|
||||
|
||||
```
|
||||
stage4/bin/zig build test-std -Dskip-release
|
||||
```
|
||||
|
||||
This will run only the standard library tests in debug mode for all targets.
|
||||
It will cross-compile the tests for non-native targets but not run them.
|
||||
|
||||
When making changes to the compiler source code, the most helpful test step to
|
||||
run is `test-behavior`. When editing documentation it is `docs`. You can find
|
||||
this information and more in the `zig build --help` menu.
|
||||
|
||||
#### Directly Testing the Standard Library with `zig test`
|
||||
|
||||
This command will run the standard library tests with only the native target
|
||||
configuration and is estimated to complete in 3 minutes:
|
||||
|
||||
```
|
||||
zig build test-std -Dno-matrix
|
||||
```
|
||||
|
||||
However, one may also use `zig test` directly. From inside the `ziglang/zig` repo root:
|
||||
|
||||
```
|
||||
zig test lib/std/std.zig --zig-lib-dir lib
|
||||
```
|
||||
|
||||
You can add `--test-filter "some test name"` to run a specific test or a subset of tests.
|
||||
(Running exactly 1 test is not reliably possible, because the test filter does not
|
||||
exclude anonymous test blocks, but that shouldn't interfere with whatever
|
||||
you're trying to test in practice.)
|
||||
|
||||
Note that `--test-filter` filters on fully qualified names, so e.g. it's possible to run only the `std.json` tests with:
|
||||
|
||||
```
|
||||
zig test lib/std/std.zig --zig-lib-dir lib --test-filter "json."
|
||||
```
|
||||
|
||||
If you used `-Dno-lib` and you are in a `build/` subdirectory, you can omit the
|
||||
`--zig-lib-dir` argument:
|
||||
|
||||
```
|
||||
stage3/bin/zig test ../lib/std/std.zig
|
||||
```
|
||||
|
||||
#### Testing Non-Native Architectures with QEMU
|
||||
|
||||
The Linux CI server additionally has qemu installed and sets `-fqemu`.
|
||||
This provides test coverage for, e.g. aarch64 even on x86_64 machines. It's
|
||||
recommended for Linux users to install qemu and enable this testing option
|
||||
when editing the standard library or anything related to a non-native
|
||||
architecture.
|
||||
|
||||
QEMU packages provided by some system package managers (such as Debian) may be
|
||||
a few releases old, or may be missing newer targets such as aarch64 and RISC-V.
|
||||
[ziglang/qemu-static](https://github.com/ziglang/qemu-static) offers static
|
||||
binaries of the latest QEMU version.
|
||||
|
||||
##### Testing Non-Native glibc Targets
|
||||
|
||||
Testing foreign architectures with dynamically linked glibc is one step trickier.
|
||||
This requires enabling `--glibc-runtimes /path/to/glibc/multi/install/glibcs`.
|
||||
This path is obtained by building glibc for multiple architectures. This
|
||||
process for me took an entire day to complete and takes up 65 GiB on my hard
|
||||
drive. The CI server does not provide this test coverage.
|
||||
|
||||
[Instructions for producing this path](https://codeberg.org/ziglang/infra/src/branch/master/building-libcs.md#linux-glibc) (just the part with `build-many-glibcs.py`).
|
||||
|
||||
It is understood that most contributors will not have these tests enabled.
|
||||
|
||||
#### Testing Windows from a Linux Machine with Wine
|
||||
|
||||
When developing on Linux, another option is available to you: `-fwine`.
|
||||
This will enable running behavior tests and std lib tests with Wine. It's
|
||||
recommended for Linux users to install Wine and enable this testing option
|
||||
when editing the standard library or anything Windows-related.
|
||||
|
||||
#### Testing WebAssembly using wasmtime
|
||||
|
||||
If you have [wasmtime](https://wasmtime.dev/) installed, take advantage of the
|
||||
`-fwasmtime` flag which will enable running WASI behavior tests and std
|
||||
lib tests. It's recommended for all users to install wasmtime and enable this
|
||||
testing option when editing the standard library and especially anything
|
||||
WebAssembly-related.
|
||||
|
||||
### Improving Translate-C
|
||||
|
||||
`translate-c` is a feature provided by Zig that converts C source code into
|
||||
Zig source code. It powers the `zig translate-c` command as well as
|
||||
[@cImport](https://ziglang.org/documentation/master/#cImport), allowing Zig
|
||||
code to not only take advantage of function prototypes defined in .h files,
|
||||
but also `static inline` functions written in C, and even some macros.
|
||||
|
||||
This feature used to work by using libclang API to parse and semantically
|
||||
analyze C/C++ files, and then based on the provided AST and type information,
|
||||
generating Zig AST, and finally using the mechanisms of `zig fmt` to render the
|
||||
Zig AST to a file.
|
||||
|
||||
However, C translation is in a transitional period right now. It used to be
|
||||
based on Clang, but is now based on Aro:
|
||||
|
||||
[Pull Request: update aro and translate-c to latest; delete clang translate-c](https://github.com/ziglang/zig/pull/24497)
|
||||
|
||||
Test coverage as well as bug reports have been moved to this repository:
|
||||
|
||||
[ziglang/translate-c](https://github.com/ziglang/translate-c/)
|
||||
|
||||
In the future, [@cImport will move to the build system](https://github.com/ziglang/zig/issues/20630),
|
||||
but for now, the translate-c logic is copy-pasted from that project into
|
||||
[ziglang/zig](https://github.com/ziglang/zig/), powering both `zig translate-c`
|
||||
and `@cImport`.
|
||||
|
||||
Please see the readme of the translate-c project for how to contribute. Once an
|
||||
issue is resolved (and test coverage added) there, the changes can be
|
||||
immediately backported to the zig compiler.
|
||||
|
||||
Once we fix the problems people are facing from this transition from Clang to
|
||||
Aro, we can move on to enhancing the translate-c package such that `@cImport`
|
||||
becomes redundant and can therefore be eliminated from the language.
|
||||
|
||||
### Autodoc
|
||||
|
||||
Autodoc is an interactive, searchable, single-page web application for browsing
|
||||
Zig codebases.
|
||||
|
||||
An autodoc deployment looks like this:
|
||||
|
||||
```
|
||||
index.html
|
||||
main.js
|
||||
main.wasm
|
||||
sources.tar
|
||||
```
|
||||
|
||||
* `main.js` and `index.html` are static files which live in a Zig installation
|
||||
at `lib/docs/`.
|
||||
* `main.wasm` is compiled from the Zig files inside `lib/docs/wasm/`.
|
||||
* `sources.tar` is all the zig source files of the project.
|
||||
|
||||
These artifacts are produced by the compiler when `-femit-docs` is passed.
|
||||
|
||||
#### Making Changes
|
||||
|
||||
The command `zig std` spawns an HTTP server that provides all the assets
|
||||
mentioned above specifically for the standard library.
|
||||
|
||||
The server creates the requested files on the fly, including rebuilding
|
||||
`main.wasm` if any of its source files changed, and constructing `sources.tar`,
|
||||
meaning that any source changes to the documented files, or to the autodoc
|
||||
system itself are immediately reflected when viewing docs.
|
||||
|
||||
This means you can test changes to Zig standard library documentation, as well
|
||||
as autodocs functionality, by pressing refresh in the browser.
|
||||
|
||||
Prefixing the URL with `/debug` results in a debug build of `main.wasm`.
|
||||
|
||||
#### Debugging the Zig Code
|
||||
|
||||
While Firefox and Safari support are obviously required, I recommend Chromium
|
||||
for development for one reason in particular:
|
||||
|
||||
[C/C++ DevTools Support (DWARF)](https://chromewebstore.google.com/detail/cc++-devtools-support-dwa/pdcpmagijalfljmkmjngeonclgbbannb)
|
||||
|
||||
This makes debugging Zig WebAssembly code a breeze.
|
||||
|
||||
#### The Sources Tarball
|
||||
|
||||
The system expects the top level of `sources.tar` to be the set of modules
|
||||
documented. So for the Zig standard library you would do this:
|
||||
`tar cf std.tar std/`. Don't compress it; the idea is to rely on HTTP
|
||||
compression.
|
||||
|
||||
Any files that are not `.zig` source files will be ignored by `main.wasm`,
|
||||
however, those files will take up wasted space in the tar file. For the
|
||||
standard library, use the set of files that zig installs to when running `zig
|
||||
build`, which is the same as the set of files that are provided on
|
||||
ziglang.org/download.
|
||||
|
||||
If the system doesn't find a file named "foo/root.zig" or "foo/foo.zig", it
|
||||
will use the first file in the tar as the module root.
|
||||
|
||||
You don't typically need to create `sources.tar` yourself, since it is lazily
|
||||
provided by the `zig std` HTTP server as well as produced by `-femit-docs`.
|
||||
|
||||
|
||||
## Testing Zig Code With LLDB
|
||||
|
||||
[@jacobly0](https://github.com/jacobly0) maintains a fork of LLDB with Zig support: https://github.com/jacobly0/llvm-project/tree/lldb-zig
|
||||
|
||||
This fork only contains changes for debugging programs compiled by Zig's self-hosted backends, i.e. `zig build-exe -fno-llvm ...`.
|
||||
|
||||
### Building
|
||||
|
||||
To build the LLDB fork, make sure you have [prerequisites](https://lldb.llvm.org/resources/build.html#preliminaries) installed, and then do something like:
|
||||
|
||||
```console
|
||||
$ cmake llvm -G Ninja -B build -DLLVM_ENABLE_PROJECTS="clang;lldb" -DCMAKE_BUILD_TYPE=RelWithDebInfo -DLLVM_ENABLE_ASSERTIONS=ON -DLLDB_ENABLE_LIBEDIT=ON -DLLDB_ENABLE_PYTHON=ON
|
||||
$ cmake --build build --target lldb --target lldb-server
|
||||
```
|
||||
|
||||
(You may need to manually [configure dependencies](https://lldb.llvm.org/resources/build.html#optional-dependencies) if CMake can't find them.)
|
||||
|
||||
Once built, you can run `./build/bin/lldb` and so on.
|
||||
|
||||
### Pretty Printers
|
||||
|
||||
If you will be debugging the Zig compiler itself, or if you will be debugging any project compiled with Zig's LLVM backend (not recommended with the LLDB fork, prefer vanilla LLDB with a version that matches the version of LLVM that Zig is using), you can get a better debugging experience by using [`lldb_pretty_printers.py`](https://github.com/ziglang/zig/blob/master/tools/lldb_pretty_printers.py).
|
||||
|
||||
Put this line in `~/.lldbinit`:
|
||||
|
||||
```
|
||||
command script import /path/to/zig/tools/lldb_pretty_printers.py
|
||||
```
|
||||
|
||||
If you will be using Zig's LLVM backend (again, not recommended with the LLDB fork), you will also want these lines:
|
||||
|
||||
```
|
||||
type category enable zig.lang
|
||||
type category enable zig.std
|
||||
```
|
||||
If you will be debugging a Zig compiler built using Zig's LLVM backend (again, not recommended with the LLDB fork), you will also want this line:
|
||||
```
|
||||
type category enable zig.stage2
|
||||
```
|
||||
|
||||
Please see the [Community](https://github.com/ziglang/zig/wiki/Community) wiki
|
||||
page for a public listing of social spaces.
|
||||
|
||||
@ -638,7 +638,7 @@
|
||||
{#syntax#}i7{#endsyntax#} refers to a signed 7-bit integer. The maximum allowed bit-width of an
|
||||
integer type is {#syntax#}65535{#endsyntax#}.
|
||||
</p>
|
||||
{#see_also|Integers|Floats|void|Errors|@Type#}
|
||||
{#see_also|Integers|Floats|void|Errors|@Int#}
|
||||
{#header_close#}
|
||||
{#header_open|Primitive Values#}
|
||||
<div class="table-wrapper">
|
||||
@ -3723,9 +3723,9 @@ void do_a_thing(struct Foo *foo) {
|
||||
<td>{#syntax#}x{#endsyntax#} is a {#syntax#}@FieldType(T, "a"){#endsyntax#}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{#syntax#}@Type(x){#endsyntax#}</th>
|
||||
<th scope="row">{#syntax#}@Int(x, y){#endsyntax#}</th>
|
||||
<td>-</td>
|
||||
<td>{#syntax#}x{#endsyntax#} is a {#syntax#}std.builtin.Type{#endsyntax#}</td>
|
||||
<td>{#syntax#}x{#endsyntax#} is a {#syntax#}std.builtin.Signedness{#endsyntax#}, {#syntax#}y{#endsyntax#} is a {#syntax#}u16{#endsyntax#}</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{#syntax#}@typeInfo(x){#endsyntax#}</th>
|
||||
@ -3839,9 +3839,9 @@ void do_a_thing(struct Foo *foo) {
|
||||
<td>{#syntax#}x{#endsyntax#} has no result location (typed initializers do not propagate result locations)</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{#syntax#}@Type(x){#endsyntax#}</th>
|
||||
<td>{#syntax#}ptr{#endsyntax#}</td>
|
||||
<td>{#syntax#}x{#endsyntax#} has no result location</td>
|
||||
<th scope="row">{#syntax#}@Int(x, y){#endsyntax#}</th>
|
||||
<td>-</td>
|
||||
<td>{#syntax#}x{#endsyntax#} and {#syntax#}y{#endsyntax#} do not have result locations</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<th scope="row">{#syntax#}@typeInfo(x){#endsyntax#}</th>
|
||||
@ -5755,41 +5755,75 @@ fn cmpxchgWeakButNotAtomic(comptime T: type, ptr: *T, expected_value: T, new_val
|
||||
</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Type#}
|
||||
<pre>{#syntax#}@Type(comptime info: std.builtin.Type) type{#endsyntax#}</pre>
|
||||
<p>
|
||||
This function is the inverse of {#link|@typeInfo#}. It reifies type information
|
||||
into a {#syntax#}type{#endsyntax#}.
|
||||
</p>
|
||||
<p>
|
||||
It is available for the following types:
|
||||
</p>
|
||||
<ul>
|
||||
<li>{#syntax#}type{#endsyntax#}</li>
|
||||
<li>{#syntax#}noreturn{#endsyntax#}</li>
|
||||
<li>{#syntax#}void{#endsyntax#}</li>
|
||||
<li>{#syntax#}bool{#endsyntax#}</li>
|
||||
<li>{#link|Integers#} - The maximum bit count for an integer type is {#syntax#}65535{#endsyntax#}.</li>
|
||||
<li>{#link|Floats#}</li>
|
||||
<li>{#link|Pointers#}</li>
|
||||
<li>{#syntax#}comptime_int{#endsyntax#}</li>
|
||||
<li>{#syntax#}comptime_float{#endsyntax#}</li>
|
||||
<li>{#syntax#}@TypeOf(undefined){#endsyntax#}</li>
|
||||
<li>{#syntax#}@TypeOf(null){#endsyntax#}</li>
|
||||
<li>{#link|Arrays#}</li>
|
||||
<li>{#link|Optionals#}</li>
|
||||
<li>{#link|Error Set Type#}</li>
|
||||
<li>{#link|Error Union Type#}</li>
|
||||
<li>{#link|Vectors#}</li>
|
||||
<li>{#link|opaque#}</li>
|
||||
<li>{#syntax#}anyframe{#endsyntax#}</li>
|
||||
<li>{#link|struct#}</li>
|
||||
<li>{#link|enum#}</li>
|
||||
<li>{#link|Enum Literals#}</li>
|
||||
<li>{#link|union#}</li>
|
||||
<li>{#link|Functions#}</li>
|
||||
</ul>
|
||||
{#header_open|@EnumLiteral#}
|
||||
<pre>{#syntax#}@EnumLiteral() type{#endsyntax#}</pre>
|
||||
<p>Returns the comptime-only "enum literal" type. This is the type of uncoerced {#link|Enum Literals#}. Values of this type can coerce to any {#link|enum#} with a matching field.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Int#}
|
||||
<pre>{#syntax#}@Int(comptime signedness: std.builtin.Signedness, comptime bits: u16) type{#endsyntax#}</pre>
|
||||
<p>Returns an integer type with the given signedness and bit width.</p>
|
||||
<p>For instance, {#syntax#}@Int(.unsigned, 18){#endsyntax#} returns the type {#syntax#}u18{#endsyntax#}.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Tuple#}
|
||||
<pre>{#syntax#}@Tuple(comptime field_types: []const type) type{#endsyntax#}</pre>
|
||||
<p>Returns a {#link|tuple|Tuples#} type with the given field types.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Pointer#}
|
||||
<pre>{#syntax#}@Pointer(
|
||||
comptime size: std.builtin.Type.Pointer.Size,
|
||||
comptime attrs: std.builtin.Type.Pointer.Attributes,
|
||||
comptime Element: type,
|
||||
comptime sentinel: ?Element,
|
||||
) type{#endsyntax#}</pre>
|
||||
<p>Returns a {#link|pointer|Pointers#} type with the properties specified by the arguments.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Fn#}
|
||||
<pre>{#syntax#}@Fn(
|
||||
comptime param_types: []const type,
|
||||
comptime param_attrs: *const [param_types.len]std.builtin.Type.Fn.Param.Attributes,
|
||||
comptime ReturnType: type,
|
||||
comptime attrs: std.builtin.Type.Fn.Attributes,
|
||||
) type{#endsyntax#}</pre>
|
||||
<p>Returns a {#link|function|Functions#} type with the properties specified by the arguments.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Struct#}
|
||||
<pre>{#syntax#}@Struct(
|
||||
comptime layout: std.builtin.Type.ContainerLayout,
|
||||
comptime BackingInt: ?type,
|
||||
comptime field_names: []const []const u8,
|
||||
comptime field_types: *const [field_names.len]type,
|
||||
comptime field_attrs: *const [field_names.len]std.builtin.Type.StructField.Attributes,
|
||||
) type{#endsyntax#}</pre>
|
||||
<p>Returns a {#link|struct#} type with the properties specified by the arguments.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Union#}
|
||||
<pre>{#syntax#}@Union(
|
||||
comptime layout: std.builtin.Type.ContainerLayout,
|
||||
/// Either the integer tag type, or the integer backing type, depending on `layout`.
|
||||
comptime ArgType: ?type,
|
||||
comptime field_names: []const []const u8,
|
||||
comptime field_types: *const [field_names.len]type,
|
||||
comptime field_attrs: *const [field_names.len]std.builtin.Type.UnionField.Attributes,
|
||||
) type{#endsyntax#}</pre>
|
||||
<p>Returns a {#link|union#} type with the properties specified by the arguments.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@Enum#}
|
||||
<pre>{#syntax#}@Enum(
|
||||
comptime TagInt: type,
|
||||
comptime mode: std.builtin.Type.Enum.Mode,
|
||||
comptime field_names: []const []const u8,
|
||||
comptime field_values: *const [field_names.len]TagInt,
|
||||
) type{#endsyntax#}</pre>
|
||||
<p>Returns an {#link|enum#} type with the properties specified by the arguments.</p>
|
||||
{#header_close#}
|
||||
|
||||
{#header_open|@typeInfo#}
|
||||
<pre>{#syntax#}@typeInfo(comptime T: type) std.builtin.Type{#endsyntax#}</pre>
|
||||
<p>
|
||||
|
||||
@ -11,7 +11,7 @@ pub const std_options: std.Options = .{
|
||||
|
||||
fn myLogFn(
|
||||
comptime level: std.log.Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
||||
@ -41,7 +41,7 @@ test "coercion between unions and enums" {
|
||||
try expect(u_4.tag() == 1);
|
||||
|
||||
// The following example is invalid.
|
||||
// error: coercion from enum '@TypeOf(.enum_literal)' to union 'test_coerce_unions_enum.U2' must initialize 'f32' field 'b'
|
||||
// error: coercion from enum '@EnumLiteral()' to union 'test_coerce_unions_enum.U2' must initialize 'f32' field 'b'
|
||||
//var u_5: U2 = .b;
|
||||
//try expect(u_5.tag() == 2);
|
||||
}
|
||||
|
||||
@ -49,7 +49,7 @@ pub fn panic(msg: []const u8, st: ?*std.builtin.StackTrace, addr: ?usize) noretu
|
||||
|
||||
fn logFn(
|
||||
comptime message_level: log.Level,
|
||||
comptime scope: @TypeOf(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
||||
22
lib/compiler/aro/aro/Attribute.zig
vendored
22
lib/compiler/aro/aro/Attribute.zig
vendored
@ -717,23 +717,13 @@ pub const Tag = std.meta.DeclEnum(attributes);
|
||||
|
||||
pub const Arguments = blk: {
|
||||
const decls = @typeInfo(attributes).@"struct".decls;
|
||||
var union_fields: [decls.len]ZigType.UnionField = undefined;
|
||||
for (decls, &union_fields) |decl, *field| {
|
||||
field.* = .{
|
||||
.name = decl.name,
|
||||
.type = @field(attributes, decl.name),
|
||||
.alignment = @alignOf(@field(attributes, decl.name)),
|
||||
};
|
||||
var names: [decls.len][]const u8 = undefined;
|
||||
var types: [decls.len]type = undefined;
|
||||
for (decls, &names, &types) |decl, *name, *T| {
|
||||
name.* = decl.name;
|
||||
T.* = @field(attributes, decl.name);
|
||||
}
|
||||
|
||||
break :blk @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = null,
|
||||
.fields = &union_fields,
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
break :blk @Union(.auto, null, &names, &types, &@splat(.{}));
|
||||
};
|
||||
|
||||
pub fn ArgumentsForTag(comptime tag: Tag) type {
|
||||
|
||||
2
lib/compiler/aro/assembly_backend/x86_64.zig
vendored
2
lib/compiler/aro/assembly_backend/x86_64.zig
vendored
@ -59,7 +59,7 @@ fn serializeFloat(comptime T: type, value: T, w: *std.Io.Writer) !void {
|
||||
else => {
|
||||
const size = @bitSizeOf(T);
|
||||
const storage_unit = std.meta.intToEnum(StorageUnit, size) catch unreachable;
|
||||
const IntTy = @Type(.{ .int = .{ .signedness = .unsigned, .bits = size } });
|
||||
const IntTy = @Int(.unsigned, size);
|
||||
const int_val: IntTy = @bitCast(value);
|
||||
return serializeInt(int_val, storage_unit, w);
|
||||
},
|
||||
|
||||
13
lib/compiler/resinator/code_pages.zig
vendored
13
lib/compiler/resinator/code_pages.zig
vendored
@ -179,12 +179,13 @@ pub const UnsupportedCodePage = enum(u16) {
|
||||
|
||||
pub const CodePage = blk: {
|
||||
const fields = @typeInfo(SupportedCodePage).@"enum".fields ++ @typeInfo(UnsupportedCodePage).@"enum".fields;
|
||||
break :blk @Type(.{ .@"enum" = .{
|
||||
.tag_type = u16,
|
||||
.decls = &.{},
|
||||
.fields = fields,
|
||||
.is_exhaustive = true,
|
||||
} });
|
||||
var field_names: [fields.len][]const u8 = undefined;
|
||||
var field_values: [fields.len]u16 = undefined;
|
||||
for (fields, &field_names, &field_values) |field, *name, *val| {
|
||||
name.* = field.name;
|
||||
val.* = field.value;
|
||||
}
|
||||
break :blk @Enum(u16, .exhaustive, &field_names, &field_values);
|
||||
};
|
||||
|
||||
pub fn isSupported(code_page: CodePage) bool {
|
||||
|
||||
21
lib/compiler/resinator/errors.zig
vendored
21
lib/compiler/resinator/errors.zig
vendored
@ -862,20 +862,23 @@ pub const ErrorDetails = struct {
|
||||
pub const ErrorDetailsWithoutCodePage = blk: {
|
||||
const details_info = @typeInfo(ErrorDetails);
|
||||
const fields = details_info.@"struct".fields;
|
||||
var fields_without_codepage: [fields.len - 1]std.builtin.Type.StructField = undefined;
|
||||
var field_names: [fields.len - 1][]const u8 = undefined;
|
||||
var field_types: [fields.len - 1]type = undefined;
|
||||
var field_attrs: [fields.len - 1]std.builtin.Type.StructField.Attributes = undefined;
|
||||
var i: usize = 0;
|
||||
for (fields) |field| {
|
||||
if (std.mem.eql(u8, field.name, "code_page")) continue;
|
||||
fields_without_codepage[i] = field;
|
||||
field_names[i] = field.name;
|
||||
field_types[i] = field.type;
|
||||
field_attrs[i] = .{
|
||||
.@"comptime" = field.is_comptime,
|
||||
.@"align" = field.alignment,
|
||||
.default_value_ptr = field.default_value_ptr,
|
||||
};
|
||||
i += 1;
|
||||
}
|
||||
std.debug.assert(i == fields_without_codepage.len);
|
||||
break :blk @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &fields_without_codepage,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
std.debug.assert(i == fields.len - 1);
|
||||
break :blk @Struct(.auto, null, &field_names, &field_types, &field_attrs);
|
||||
};
|
||||
|
||||
fn cellCount(code_page: SupportedCodePage, source: []const u8, start_index: usize, end_index: usize) usize {
|
||||
|
||||
@ -298,7 +298,7 @@ fn mainTerminal() void {
|
||||
|
||||
pub fn log(
|
||||
comptime message_level: std.log.Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
||||
@ -290,10 +290,7 @@ pub fn normalize(comptime T: type, significand: *std.meta.Int(.unsigned, @typeIn
|
||||
pub inline fn fneg(a: anytype) @TypeOf(a) {
|
||||
const F = @TypeOf(a);
|
||||
const bits = @typeInfo(F).float.bits;
|
||||
const U = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = bits,
|
||||
} });
|
||||
const U = @Int(.unsigned, bits);
|
||||
const sign_bit_mask = @as(U, 1) << (bits - 1);
|
||||
const negated = @as(U, @bitCast(a)) ^ sign_bit_mask;
|
||||
return @bitCast(negated);
|
||||
|
||||
@ -66,17 +66,17 @@ pub inline fn floatFromBigInt(comptime T: type, comptime signedness: std.builtin
|
||||
switch (x.len) {
|
||||
0 => return 0,
|
||||
inline 1...4 => |limbs_len| return @floatFromInt(@as(
|
||||
@Type(.{ .int = .{ .signedness = signedness, .bits = 32 * limbs_len } }),
|
||||
@Int(signedness, 32 * limbs_len),
|
||||
@bitCast(x[0..limbs_len].*),
|
||||
)),
|
||||
else => {},
|
||||
}
|
||||
|
||||
// sign implicit fraction round sticky
|
||||
const I = comptime @Type(.{ .int = .{
|
||||
.signedness = signedness,
|
||||
.bits = @as(u16, @intFromBool(signedness == .signed)) + 1 + math.floatFractionalBits(T) + 1 + 1,
|
||||
} });
|
||||
const I = comptime @Int(
|
||||
signedness,
|
||||
@as(u16, @intFromBool(signedness == .signed)) + 1 + math.floatFractionalBits(T) + 1 + 1,
|
||||
);
|
||||
|
||||
const clrsb = clrsb: {
|
||||
var clsb: usize = 0;
|
||||
|
||||
@ -56,7 +56,7 @@ pub inline fn bigIntFromFloat(comptime signedness: std.builtin.Signedness, resul
|
||||
0 => return,
|
||||
inline 1...4 => |limbs_len| {
|
||||
result[0..limbs_len].* = @bitCast(@as(
|
||||
@Type(.{ .int = .{ .signedness = signedness, .bits = 32 * limbs_len } }),
|
||||
@Int(signedness, 32 * limbs_len),
|
||||
@intFromFloat(a),
|
||||
));
|
||||
return;
|
||||
@ -66,10 +66,7 @@ pub inline fn bigIntFromFloat(comptime signedness: std.builtin.Signedness, resul
|
||||
|
||||
// sign implicit fraction
|
||||
const significand_bits = 1 + math.floatFractionalBits(@TypeOf(a));
|
||||
const I = @Type(comptime .{ .int = .{
|
||||
.signedness = signedness,
|
||||
.bits = @as(u16, @intFromBool(signedness == .signed)) + significand_bits,
|
||||
} });
|
||||
const I = @Int(signedness, @as(u16, @intFromBool(signedness == .signed)) + significand_bits);
|
||||
|
||||
const parts = math.frexp(a);
|
||||
const significand_bits_adjusted_to_handle_smin = @as(i32, significand_bits) +
|
||||
|
||||
@ -159,7 +159,7 @@ inline fn copyFixedLength(
|
||||
else if (len > @sizeOf(usize))
|
||||
@Vector(len, u8)
|
||||
else
|
||||
@Type(.{ .int = .{ .signedness = .unsigned, .bits = len * 8 } });
|
||||
@Int(.unsigned, len * 8);
|
||||
|
||||
const loop_count = @divExact(len, @sizeOf(T));
|
||||
|
||||
|
||||
@ -41,7 +41,7 @@ pub fn panic(msg: []const u8, st: ?*std.builtin.StackTrace, addr: ?usize) noretu
|
||||
|
||||
fn logFn(
|
||||
comptime message_level: log.Level,
|
||||
comptime scope: @TypeOf(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
||||
@ -15,7 +15,7 @@ pub const std_options = std.Options{
|
||||
|
||||
fn logOverride(
|
||||
comptime level: std.log.Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
||||
@ -416,7 +416,7 @@ fn createChildOnly(
|
||||
fn userInputOptionsFromArgs(arena: Allocator, args: anytype) UserInputOptionsMap {
|
||||
var map = UserInputOptionsMap.init(arena);
|
||||
inline for (@typeInfo(@TypeOf(args)).@"struct".fields) |field| {
|
||||
if (field.type == @Type(.null)) continue;
|
||||
if (field.type == @TypeOf(null)) continue;
|
||||
addUserInputOptionFromArg(arena, &map, field, field.type, @field(args, field.name));
|
||||
}
|
||||
return map;
|
||||
@ -526,16 +526,11 @@ fn addUserInputOptionFromArg(
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => |array_info| {
|
||||
comptime var slice_info = ptr_info;
|
||||
slice_info.size = .slice;
|
||||
slice_info.is_const = true;
|
||||
slice_info.child = array_info.child;
|
||||
slice_info.sentinel_ptr = null;
|
||||
addUserInputOptionFromArg(
|
||||
arena,
|
||||
map,
|
||||
field,
|
||||
@Type(.{ .pointer = slice_info }),
|
||||
@Pointer(.slice, .{ .@"const" = true }, array_info.child, null),
|
||||
maybe_value orelse null,
|
||||
);
|
||||
return;
|
||||
@ -553,14 +548,11 @@ fn addUserInputOptionFromArg(
|
||||
}) catch @panic("OOM");
|
||||
},
|
||||
else => {
|
||||
comptime var slice_info = ptr_info;
|
||||
slice_info.is_const = true;
|
||||
slice_info.sentinel_ptr = null;
|
||||
addUserInputOptionFromArg(
|
||||
arena,
|
||||
map,
|
||||
field,
|
||||
@Type(.{ .pointer = slice_info }),
|
||||
@Pointer(ptr_info.size, .{ .@"const" = true }, ptr_info.child, null),
|
||||
maybe_value orelse null,
|
||||
);
|
||||
return;
|
||||
|
||||
@ -528,23 +528,7 @@ pub fn Poller(comptime StreamEnum: type) type {
|
||||
/// Given an enum, returns a struct with fields of that enum, each field
|
||||
/// representing an I/O stream for polling.
|
||||
pub fn PollFiles(comptime StreamEnum: type) type {
|
||||
const enum_fields = @typeInfo(StreamEnum).@"enum".fields;
|
||||
var struct_fields: [enum_fields.len]std.builtin.Type.StructField = undefined;
|
||||
for (&struct_fields, enum_fields) |*struct_field, enum_field| {
|
||||
struct_field.* = .{
|
||||
.name = enum_field.name,
|
||||
.type = std.fs.File,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(std.fs.File),
|
||||
};
|
||||
}
|
||||
return @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &struct_fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
return @Struct(.auto, null, std.meta.fieldNames(StreamEnum), &@splat(std.fs.File), &@splat(.{}));
|
||||
}
|
||||
|
||||
test {
|
||||
@ -1625,22 +1609,14 @@ pub fn sleep(io: Io, duration: Duration, clock: Clock) SleepError!void {
|
||||
/// fields, each field type the future's result.
|
||||
pub fn SelectUnion(S: type) type {
|
||||
const struct_fields = @typeInfo(S).@"struct".fields;
|
||||
var fields: [struct_fields.len]std.builtin.Type.UnionField = undefined;
|
||||
for (&fields, struct_fields) |*union_field, struct_field| {
|
||||
const F = @typeInfo(struct_field.type).pointer.child;
|
||||
const Result = @TypeOf(@as(F, undefined).result);
|
||||
union_field.* = .{
|
||||
.name = struct_field.name,
|
||||
.type = Result,
|
||||
.alignment = struct_field.alignment,
|
||||
};
|
||||
var names: [struct_fields.len][]const u8 = undefined;
|
||||
var types: [struct_fields.len]type = undefined;
|
||||
for (struct_fields, &names, &types) |struct_field, *union_field_name, *UnionFieldType| {
|
||||
const FieldFuture = @typeInfo(struct_field.type).pointer.child;
|
||||
union_field_name.* = struct_field.name;
|
||||
UnionFieldType.* = @FieldType(FieldFuture, "result");
|
||||
}
|
||||
return @Type(.{ .@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = std.meta.FieldEnum(S),
|
||||
.fields = &fields,
|
||||
.decls = &.{},
|
||||
} });
|
||||
return @Union(.auto, std.meta.FieldEnum(S), &names, &types, &@splat(.{}));
|
||||
}
|
||||
|
||||
/// `s` is a struct with every field a `*Future(T)`, where `T` can be any type,
|
||||
|
||||
@ -1273,20 +1273,17 @@ pub const TakeLeb128Error = Error || error{Overflow};
|
||||
/// Read a single LEB128 value as type T, or `error.Overflow` if the value cannot fit.
|
||||
pub fn takeLeb128(r: *Reader, comptime Result: type) TakeLeb128Error!Result {
|
||||
const result_info = @typeInfo(Result).int;
|
||||
return std.math.cast(Result, try r.takeMultipleOf7Leb128(@Type(.{ .int = .{
|
||||
.signedness = result_info.signedness,
|
||||
.bits = std.mem.alignForwardAnyAlign(u16, result_info.bits, 7),
|
||||
} }))) orelse error.Overflow;
|
||||
return std.math.cast(Result, try r.takeMultipleOf7Leb128(@Int(
|
||||
result_info.signedness,
|
||||
std.mem.alignForwardAnyAlign(u16, result_info.bits, 7),
|
||||
))) orelse error.Overflow;
|
||||
}
|
||||
|
||||
fn takeMultipleOf7Leb128(r: *Reader, comptime Result: type) TakeLeb128Error!Result {
|
||||
const result_info = @typeInfo(Result).int;
|
||||
comptime assert(result_info.bits % 7 == 0);
|
||||
var remaining_bits: std.math.Log2IntCeil(Result) = result_info.bits;
|
||||
const UnsignedResult = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = result_info.bits,
|
||||
} });
|
||||
const UnsignedResult = @Int(.unsigned, result_info.bits);
|
||||
var result: UnsignedResult = 0;
|
||||
var fits = true;
|
||||
while (true) {
|
||||
|
||||
@ -1890,7 +1890,7 @@ pub fn writeUleb128(w: *Writer, value: anytype) Error!void {
|
||||
try w.writeLeb128(switch (@typeInfo(@TypeOf(value))) {
|
||||
.comptime_int => @as(std.math.IntFittingRange(0, @abs(value)), value),
|
||||
.int => |value_info| switch (value_info.signedness) {
|
||||
.signed => @as(@Type(.{ .int = .{ .signedness = .unsigned, .bits = value_info.bits -| 1 } }), @intCast(value)),
|
||||
.signed => @as(@Int(.unsigned, value_info.bits -| 1), @intCast(value)),
|
||||
.unsigned => value,
|
||||
},
|
||||
else => comptime unreachable,
|
||||
@ -1903,7 +1903,7 @@ pub fn writeSleb128(w: *Writer, value: anytype) Error!void {
|
||||
.comptime_int => @as(std.math.IntFittingRange(@min(value, -1), @max(0, value)), value),
|
||||
.int => |value_info| switch (value_info.signedness) {
|
||||
.signed => value,
|
||||
.unsigned => @as(@Type(.{ .int = .{ .signedness = .signed, .bits = value_info.bits + 1 } }), value),
|
||||
.unsigned => @as(@Int(.signed, value_info.bits + 1), value),
|
||||
},
|
||||
else => comptime unreachable,
|
||||
});
|
||||
@ -1912,10 +1912,10 @@ pub fn writeSleb128(w: *Writer, value: anytype) Error!void {
|
||||
/// Write a single integer as LEB128 to the given writer.
|
||||
pub fn writeLeb128(w: *Writer, value: anytype) Error!void {
|
||||
const value_info = @typeInfo(@TypeOf(value)).int;
|
||||
try w.writeMultipleOf7Leb128(@as(@Type(.{ .int = .{
|
||||
.signedness = value_info.signedness,
|
||||
.bits = @max(std.mem.alignForwardAnyAlign(u16, value_info.bits, 7), 7),
|
||||
} }), value));
|
||||
try w.writeMultipleOf7Leb128(@as(@Int(
|
||||
value_info.signedness,
|
||||
@max(std.mem.alignForwardAnyAlign(u16, value_info.bits, 7), 7),
|
||||
), value));
|
||||
}
|
||||
|
||||
fn writeMultipleOf7Leb128(w: *Writer, value: anytype) Error!void {
|
||||
@ -1929,10 +1929,10 @@ fn writeMultipleOf7Leb128(w: *Writer, value: anytype) Error!void {
|
||||
.unsigned => remaining > std.math.maxInt(u7),
|
||||
};
|
||||
byte.* = .{
|
||||
.bits = @bitCast(@as(@Type(.{ .int = .{
|
||||
.signedness = value_info.signedness,
|
||||
.bits = 7,
|
||||
} }), @truncate(remaining))),
|
||||
.bits = @bitCast(@as(
|
||||
@Int(value_info.signedness, 7),
|
||||
@truncate(remaining),
|
||||
)),
|
||||
.more = more,
|
||||
};
|
||||
if (value_info.bits > 7) remaining >>= 7;
|
||||
|
||||
@ -548,19 +548,19 @@ pub const TypeId = std.meta.Tag(Type);
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Type = union(enum) {
|
||||
type: void,
|
||||
void: void,
|
||||
bool: void,
|
||||
noreturn: void,
|
||||
type,
|
||||
void,
|
||||
bool,
|
||||
noreturn,
|
||||
int: Int,
|
||||
float: Float,
|
||||
pointer: Pointer,
|
||||
array: Array,
|
||||
@"struct": Struct,
|
||||
comptime_float: void,
|
||||
comptime_int: void,
|
||||
undefined: void,
|
||||
null: void,
|
||||
comptime_float,
|
||||
comptime_int,
|
||||
undefined,
|
||||
null,
|
||||
optional: Optional,
|
||||
error_union: ErrorUnion,
|
||||
error_set: ErrorSet,
|
||||
@ -571,7 +571,7 @@ pub const Type = union(enum) {
|
||||
frame: Frame,
|
||||
@"anyframe": AnyFrame,
|
||||
vector: Vector,
|
||||
enum_literal: void,
|
||||
enum_literal,
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
@ -619,6 +619,16 @@ pub const Type = union(enum) {
|
||||
slice,
|
||||
c,
|
||||
};
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Attributes = struct {
|
||||
@"const": bool = false,
|
||||
@"volatile": bool = false,
|
||||
@"allowzero": bool = false,
|
||||
@"addrspace": ?AddressSpace = null,
|
||||
@"align": ?usize = null,
|
||||
};
|
||||
};
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
@ -668,6 +678,14 @@ pub const Type = union(enum) {
|
||||
const dp: *const sf.type = @ptrCast(@alignCast(sf.default_value_ptr orelse return null));
|
||||
return dp.*;
|
||||
}
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Attributes = struct {
|
||||
@"comptime": bool = false,
|
||||
@"align": ?usize = null,
|
||||
default_value_ptr: ?*const anyopaque = null,
|
||||
};
|
||||
};
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
@ -718,6 +736,10 @@ pub const Type = union(enum) {
|
||||
fields: []const EnumField,
|
||||
decls: []const Declaration,
|
||||
is_exhaustive: bool,
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Mode = enum { exhaustive, nonexhaustive };
|
||||
};
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
@ -726,6 +748,12 @@ pub const Type = union(enum) {
|
||||
name: [:0]const u8,
|
||||
type: type,
|
||||
alignment: comptime_int,
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Attributes = struct {
|
||||
@"align": ?usize = null,
|
||||
};
|
||||
};
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
@ -753,6 +781,19 @@ pub const Type = union(enum) {
|
||||
is_generic: bool,
|
||||
is_noalias: bool,
|
||||
type: ?type,
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Attributes = struct {
|
||||
@"noalias": bool = false,
|
||||
};
|
||||
};
|
||||
|
||||
/// This data structure is used by the Zig language code generation and
|
||||
/// therefore must be kept in sync with the compiler implementation.
|
||||
pub const Attributes = struct {
|
||||
@"callconv": CallingConvention = .auto,
|
||||
varargs: bool = false,
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
@ -993,14 +993,15 @@ const huffman = struct {
|
||||
const max_leafs = 286;
|
||||
const max_nodes = max_leafs * 2;
|
||||
|
||||
const Node = struct {
|
||||
freq: u16,
|
||||
const Node = packed struct(u32) {
|
||||
depth: u16,
|
||||
freq: u16,
|
||||
|
||||
pub const Index = u16;
|
||||
|
||||
/// `freq` is more significant than `depth`
|
||||
pub fn smaller(a: Node, b: Node) bool {
|
||||
return if (a.freq != b.freq) a.freq < b.freq else a.depth < b.depth;
|
||||
return @as(u32, @bitCast(a)) < @as(u32, @bitCast(b));
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@ -12,8 +12,8 @@ const Vec16 = @Vector(16, u32);
|
||||
const chunk_length = 1024;
|
||||
const max_depth = 54;
|
||||
|
||||
pub const simd_degree = std.simd.suggestVectorLength(u32) orelse 1;
|
||||
pub const max_simd_degree = simd_degree;
|
||||
const simd_degree = std.simd.suggestVectorLength(u32) orelse 1;
|
||||
const max_simd_degree = simd_degree;
|
||||
const max_simd_degree_or_2 = if (max_simd_degree > 2) max_simd_degree else 2;
|
||||
|
||||
/// Threshold for switching to parallel processing.
|
||||
@ -502,9 +502,7 @@ fn hashManySimd(
|
||||
var out_ptr = out.ptr;
|
||||
var cnt = counter;
|
||||
|
||||
const simd_deg = comptime simd_degree;
|
||||
|
||||
if (comptime simd_deg >= 16) {
|
||||
if (simd_degree >= 16) {
|
||||
while (remaining >= 16) {
|
||||
const sixteen_inputs = [16][*]const u8{
|
||||
inp[0], inp[1], inp[2], inp[3],
|
||||
@ -525,7 +523,7 @@ fn hashManySimd(
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime simd_deg >= 8) {
|
||||
if (simd_degree >= 8) {
|
||||
while (remaining >= 8) {
|
||||
const eight_inputs = [8][*]const u8{
|
||||
inp[0], inp[1], inp[2], inp[3],
|
||||
@ -544,7 +542,7 @@ fn hashManySimd(
|
||||
}
|
||||
}
|
||||
|
||||
if (comptime simd_deg >= 4) {
|
||||
if (simd_degree >= 4) {
|
||||
while (remaining >= 4) {
|
||||
const four_inputs = [4][*]const u8{
|
||||
inp[0],
|
||||
@ -571,7 +569,7 @@ fn hashManySimd(
|
||||
}
|
||||
|
||||
fn hashMany(inputs: [][*]const u8, num_inputs: usize, blocks: usize, key: [8]u32, counter: u64, increment_counter: bool, flags: Flags, flags_start: Flags, flags_end: Flags, out: []u8) void {
|
||||
if (comptime max_simd_degree >= 4) {
|
||||
if (max_simd_degree >= 4) {
|
||||
hashManySimd(inputs, num_inputs, blocks, key, counter, increment_counter, flags, flags_start, flags_end, out);
|
||||
} else {
|
||||
hashManyPortable(inputs, num_inputs, blocks, key, counter, increment_counter, flags, flags_start, flags_end, out);
|
||||
@ -909,7 +907,7 @@ pub const Blake3 = struct {
|
||||
pub const digest_length = 32;
|
||||
pub const key_length = 32;
|
||||
|
||||
pub const Options = struct { key: ?[digest_length]u8 = null };
|
||||
pub const Options = struct { key: ?[key_length]u8 = null };
|
||||
pub const KdfOptions = struct {};
|
||||
|
||||
key: [8]u32,
|
||||
|
||||
@ -94,12 +94,12 @@ pub fn deserialize(comptime HashResult: type, str: []const u8) Error!HashResult
|
||||
if (kvSplit(field)) |opt_version| {
|
||||
if (mem.eql(u8, opt_version.key, version_param_name)) {
|
||||
if (@hasField(HashResult, "alg_version")) {
|
||||
const value_type_info = switch (@typeInfo(@TypeOf(out.alg_version))) {
|
||||
.optional => |opt| @typeInfo(opt.child),
|
||||
else => |t| t,
|
||||
const ValueType = switch (@typeInfo(@TypeOf(out.alg_version))) {
|
||||
.optional => |opt| opt.child,
|
||||
else => @TypeOf(out.alg_version),
|
||||
};
|
||||
out.alg_version = fmt.parseUnsigned(
|
||||
@Type(value_type_info),
|
||||
ValueType,
|
||||
opt_version.value,
|
||||
10,
|
||||
) catch return Error.InvalidEncoding;
|
||||
|
||||
@ -606,7 +606,7 @@ pub fn array(
|
||||
const elem_size = @divExact(@bitSizeOf(Elem), 8);
|
||||
var arr: [len_size + elem_size * elems.len]u8 = undefined;
|
||||
std.mem.writeInt(Len, arr[0..len_size], @intCast(elem_size * elems.len), .big);
|
||||
const ElemInt = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(Elem) } });
|
||||
const ElemInt = @Int(.unsigned, @bitSizeOf(Elem));
|
||||
for (0.., @as([elems.len]Elem, elems)) |index, elem| {
|
||||
std.mem.writeInt(
|
||||
ElemInt,
|
||||
|
||||
@ -33,22 +33,8 @@ pub fn fromInt(comptime E: type, integer: anytype) ?E {
|
||||
/// default, which may be undefined.
|
||||
pub fn EnumFieldStruct(comptime E: type, comptime Data: type, comptime field_default: ?Data) type {
|
||||
@setEvalBranchQuota(@typeInfo(E).@"enum".fields.len + eval_branch_quota_cushion);
|
||||
var struct_fields: [@typeInfo(E).@"enum".fields.len]std.builtin.Type.StructField = undefined;
|
||||
for (&struct_fields, @typeInfo(E).@"enum".fields) |*struct_field, enum_field| {
|
||||
struct_field.* = .{
|
||||
.name = enum_field.name,
|
||||
.type = Data,
|
||||
.default_value_ptr = if (field_default) |d| @as(?*const anyopaque, @ptrCast(&d)) else null,
|
||||
.is_comptime = false,
|
||||
.alignment = if (@sizeOf(Data) > 0) @alignOf(Data) else 0,
|
||||
};
|
||||
}
|
||||
return @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &struct_fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
const default_ptr: ?*const anyopaque = if (field_default) |d| @ptrCast(&d) else null;
|
||||
return @Struct(.auto, null, std.meta.fieldNames(E), &@splat(Data), &@splat(.{ .default_value_ptr = default_ptr }));
|
||||
}
|
||||
|
||||
/// Looks up the supplied fields in the given enum type.
|
||||
@ -1532,19 +1518,15 @@ test "EnumIndexer empty" {
|
||||
test "EnumIndexer large dense unsorted" {
|
||||
@setEvalBranchQuota(500_000); // many `comptimePrint`s
|
||||
// Make an enum with 500 fields with values in *descending* order.
|
||||
const E = @Type(.{ .@"enum" = .{
|
||||
.tag_type = u32,
|
||||
.fields = comptime fields: {
|
||||
var fields: [500]EnumField = undefined;
|
||||
for (&fields, 0..) |*f, i| f.* = .{
|
||||
.name = std.fmt.comptimePrint("f{d}", .{i}),
|
||||
.value = 500 - i,
|
||||
};
|
||||
break :fields &fields;
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
} });
|
||||
const E = @Enum(u32, .exhaustive, names: {
|
||||
var names: [500][]const u8 = undefined;
|
||||
for (&names, 0..) |*name, i| name.* = std.fmt.comptimePrint("f{d}", .{i});
|
||||
break :names &names;
|
||||
}, vals: {
|
||||
var vals: [500]u32 = undefined;
|
||||
for (&vals, 0..) |*val, i| val.* = 500 - i;
|
||||
break :vals &vals;
|
||||
});
|
||||
const Indexer = EnumIndexer(E);
|
||||
try testing.expectEqual(E.f0, Indexer.keyForIndex(499));
|
||||
try testing.expectEqual(E.f499, Indexer.keyForIndex(0));
|
||||
|
||||
@ -279,7 +279,7 @@ pub fn Alt(
|
||||
/// Helper for calling alternate format methods besides one named "format".
|
||||
pub fn alt(
|
||||
context: anytype,
|
||||
comptime func_name: @TypeOf(.enum_literal),
|
||||
comptime func_name: @EnumLiteral(),
|
||||
) Alt(@TypeOf(context), @field(@TypeOf(context), @tagName(func_name))) {
|
||||
return .{ .data = context };
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ pub fn render(buf: []u8, value: anytype, options: Options) Error![]const u8 {
|
||||
|
||||
const T = @TypeOf(v);
|
||||
comptime std.debug.assert(@typeInfo(T) == .float);
|
||||
const I = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(T) } });
|
||||
const I = @Int(.unsigned, @bitSizeOf(T));
|
||||
|
||||
const DT = if (@bitSizeOf(T) <= 64) u64 else u128;
|
||||
const tables = switch (DT) {
|
||||
@ -1516,7 +1516,7 @@ const FLOAT128_POW5_INV_ERRORS: [154]u64 = .{
|
||||
const builtin = @import("builtin");
|
||||
|
||||
fn check(comptime T: type, value: T, comptime expected: []const u8) !void {
|
||||
const I = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(T) } });
|
||||
const I = @Int(.unsigned, @bitSizeOf(T));
|
||||
|
||||
var buf: [6000]u8 = undefined;
|
||||
const value_bits: I = @bitCast(value);
|
||||
|
||||
@ -42,7 +42,7 @@ pub fn int(input: anytype) @TypeOf(input) {
|
||||
const info = @typeInfo(@TypeOf(input)).int;
|
||||
const bits = info.bits;
|
||||
// Convert input to unsigned integer (easier to deal with)
|
||||
const Uint = @Type(.{ .int = .{ .bits = bits, .signedness = .unsigned } });
|
||||
const Uint = @Int(.unsigned, bits);
|
||||
const u_input: Uint = @bitCast(input);
|
||||
if (bits > 256) @compileError("bit widths > 256 are unsupported, use std.hash.autoHash functionality.");
|
||||
// For bit widths that don't have a dedicated function, use a heuristic
|
||||
|
||||
@ -91,10 +91,7 @@ pub fn hash(hasher: anytype, key: anytype, comptime strat: HashStrategy) void {
|
||||
// Help the optimizer see that hashing an int is easy by inlining!
|
||||
// TODO Check if the situation is better after #561 is resolved.
|
||||
.int => |int| switch (int.signedness) {
|
||||
.signed => hash(hasher, @as(@Type(.{ .int = .{
|
||||
.bits = int.bits,
|
||||
.signedness = .unsigned,
|
||||
} }), @bitCast(key)), strat),
|
||||
.signed => hash(hasher, @as(@Int(.unsigned, int.bits), @bitCast(key)), strat),
|
||||
.unsigned => {
|
||||
if (std.meta.hasUniqueRepresentation(Key)) {
|
||||
@call(.always_inline, Hasher.update, .{ hasher, std.mem.asBytes(&key) });
|
||||
|
||||
@ -57,13 +57,13 @@ pub const default_level: Level = switch (builtin.mode) {
|
||||
};
|
||||
|
||||
pub const ScopeLevel = struct {
|
||||
scope: @Type(.enum_literal),
|
||||
scope: @EnumLiteral(),
|
||||
level: Level,
|
||||
};
|
||||
|
||||
fn log(
|
||||
comptime level: Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
@ -73,7 +73,7 @@ fn log(
|
||||
}
|
||||
|
||||
/// Determine if a specific log message level and scope combination are enabled for logging.
|
||||
pub fn logEnabled(comptime level: Level, comptime scope: @Type(.enum_literal)) bool {
|
||||
pub fn logEnabled(comptime level: Level, comptime scope: @EnumLiteral()) bool {
|
||||
inline for (std.options.log_scope_levels) |scope_level| {
|
||||
if (scope_level.scope == scope) return @intFromEnum(level) <= @intFromEnum(scope_level.level);
|
||||
}
|
||||
@ -87,7 +87,7 @@ pub fn logEnabled(comptime level: Level, comptime scope: @Type(.enum_literal)) b
|
||||
/// function returns.
|
||||
pub fn defaultLog(
|
||||
comptime level: Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
@ -115,7 +115,7 @@ pub fn defaultLog(
|
||||
|
||||
/// Returns a scoped logging namespace that logs all messages using the scope
|
||||
/// provided here.
|
||||
pub fn scoped(comptime scope: @Type(.enum_literal)) type {
|
||||
pub fn scoped(comptime scope: @EnumLiteral()) type {
|
||||
return struct {
|
||||
/// Log an error message. This log level is intended to be used
|
||||
/// when something has gone wrong. This might be recoverable or might
|
||||
|
||||
@ -450,12 +450,7 @@ pub fn wrap(x: anytype, r: anytype) @TypeOf(x) {
|
||||
// in the rare usecase of r not being comptime_int or float,
|
||||
// take the penalty of having an intermediary type conversion,
|
||||
// otherwise the alternative is to unwind iteratively to avoid overflow
|
||||
const R = comptime do: {
|
||||
var info = info_r;
|
||||
info.int.bits += 1;
|
||||
info.int.signedness = .signed;
|
||||
break :do @Type(info);
|
||||
};
|
||||
const R = @Int(.signed, info_r.int.bits + 1);
|
||||
const radius: if (info_r.int.signedness == .signed) @TypeOf(r) else R = r;
|
||||
return @intCast(@mod(x - radius, 2 * @as(R, r)) - r); // provably impossible to overflow
|
||||
},
|
||||
@ -799,14 +794,14 @@ pub fn Log2IntCeil(comptime T: type) type {
|
||||
pub fn IntFittingRange(comptime from: comptime_int, comptime to: comptime_int) type {
|
||||
assert(from <= to);
|
||||
const signedness: std.builtin.Signedness = if (from < 0) .signed else .unsigned;
|
||||
return @Type(.{ .int = .{
|
||||
.signedness = signedness,
|
||||
.bits = @as(u16, @intFromBool(signedness == .signed)) +
|
||||
return @Int(
|
||||
signedness,
|
||||
@as(u16, @intFromBool(signedness == .signed)) +
|
||||
switch (if (from < 0) @max(@abs(from) - 1, to) else to) {
|
||||
0 => 0,
|
||||
else => |pos_max| 1 + log2(pos_max),
|
||||
},
|
||||
} });
|
||||
);
|
||||
}
|
||||
|
||||
test IntFittingRange {
|
||||
@ -1107,9 +1102,14 @@ test cast {
|
||||
pub const AlignCastError = error{UnalignedMemory};
|
||||
|
||||
fn AlignCastResult(comptime alignment: Alignment, comptime Ptr: type) type {
|
||||
var ptr_info = @typeInfo(Ptr);
|
||||
ptr_info.pointer.alignment = alignment.toByteUnits();
|
||||
return @Type(ptr_info);
|
||||
const orig = @typeInfo(Ptr).pointer;
|
||||
return @Pointer(orig.size, .{
|
||||
.@"const" = orig.is_const,
|
||||
.@"volatile" = orig.is_volatile,
|
||||
.@"allowzero" = orig.is_allowzero,
|
||||
.@"align" = alignment.toByteUnits(),
|
||||
.@"addrspace" = orig.address_space,
|
||||
}, orig.child, orig.sentinel());
|
||||
}
|
||||
|
||||
/// Align cast a pointer but return an error if it's the wrong alignment
|
||||
|
||||
@ -2787,11 +2787,11 @@ test "bitNotWrap more than two limbs" {
|
||||
const bits = @bitSizeOf(Limb) * 4 + 2;
|
||||
|
||||
try res.bitNotWrap(&a, .unsigned, bits);
|
||||
const Unsigned = @Type(.{ .int = .{ .signedness = .unsigned, .bits = bits } });
|
||||
const Unsigned = @Int(.unsigned, bits);
|
||||
try testing.expectEqual((try res.toInt(Unsigned)), ~@as(Unsigned, maxInt(Limb)));
|
||||
|
||||
try res.bitNotWrap(&a, .signed, bits);
|
||||
const Signed = @Type(.{ .int = .{ .signedness = .signed, .bits = bits } });
|
||||
const Signed = @Int(.signed, bits);
|
||||
try testing.expectEqual((try res.toInt(Signed)), ~@as(Signed, maxInt(Limb)));
|
||||
}
|
||||
|
||||
|
||||
@ -14,22 +14,10 @@ pub fn FloatRepr(comptime Float: type) type {
|
||||
exponent: BiasedExponent,
|
||||
sign: std.math.Sign,
|
||||
|
||||
pub const StoredMantissa = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = floatMantissaBits(Float),
|
||||
} });
|
||||
pub const Mantissa = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = 1 + fractional_bits,
|
||||
} });
|
||||
pub const Exponent = @Type(.{ .int = .{
|
||||
.signedness = .signed,
|
||||
.bits = exponent_bits,
|
||||
} });
|
||||
pub const BiasedExponent = enum(@Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = exponent_bits,
|
||||
} })) {
|
||||
pub const StoredMantissa = @Int(.unsigned, floatMantissaBits(Float));
|
||||
pub const Mantissa = @Int(.unsigned, 1 + fractional_bits);
|
||||
pub const Exponent = @Int(.signed, exponent_bits);
|
||||
pub const BiasedExponent = enum(@Int(.unsigned, exponent_bits)) {
|
||||
denormal = 0,
|
||||
min_normal = 1,
|
||||
zero = (1 << (exponent_bits - 1)) - 1,
|
||||
@ -56,14 +44,8 @@ pub fn FloatRepr(comptime Float: type) type {
|
||||
fraction: Fraction,
|
||||
exponent: Normalized.Exponent,
|
||||
|
||||
pub const Fraction = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = fractional_bits,
|
||||
} });
|
||||
pub const Exponent = @Type(.{ .int = .{
|
||||
.signedness = .signed,
|
||||
.bits = 1 + exponent_bits,
|
||||
} });
|
||||
pub const Fraction = @Int(.unsigned, fractional_bits);
|
||||
pub const Exponent = @Int(.signed, 1 + exponent_bits);
|
||||
|
||||
/// This currently truncates denormal values, which needs to be fixed before this can be used to
|
||||
/// produce a rounded value.
|
||||
@ -122,7 +104,7 @@ inline fn mantissaOne(comptime T: type) comptime_int {
|
||||
|
||||
/// Creates floating point type T from an unbiased exponent and raw mantissa.
|
||||
inline fn reconstructFloat(comptime T: type, comptime exponent: comptime_int, comptime mantissa: comptime_int) T {
|
||||
const TBits = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @bitSizeOf(T) } });
|
||||
const TBits = @Int(.unsigned, @bitSizeOf(T));
|
||||
const biased_exponent = @as(TBits, exponent + floatExponentMax(T));
|
||||
return @as(T, @bitCast((biased_exponent << floatMantissaBits(T)) | @as(TBits, mantissa)));
|
||||
}
|
||||
@ -209,7 +191,7 @@ pub inline fn floatEps(comptime T: type) T {
|
||||
pub inline fn floatEpsAt(comptime T: type, x: T) T {
|
||||
switch (@typeInfo(T)) {
|
||||
.float => |F| {
|
||||
const U: type = @Type(.{ .int = .{ .signedness = .unsigned, .bits = F.bits } });
|
||||
const U: type = @Int(.unsigned, F.bits);
|
||||
const u: U = @bitCast(x);
|
||||
const y: T = @bitCast(u ^ 1);
|
||||
return @abs(x - y);
|
||||
|
||||
@ -33,10 +33,7 @@ pub fn log2(x: anytype) @TypeOf(x) {
|
||||
return result;
|
||||
},
|
||||
.int => |int_info| math.log2_int(switch (int_info.signedness) {
|
||||
.signed => @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = int_info.bits -| 1,
|
||||
} }),
|
||||
.signed => @Int(.unsigned, int_info.bits -| 1),
|
||||
.unsigned => T,
|
||||
}, @intCast(x)),
|
||||
else => @compileError("log2 not implemented for " ++ @typeName(T)),
|
||||
|
||||
@ -65,7 +65,7 @@ test "log_int" {
|
||||
// Test all unsigned integers with 2, 3, ..., 64 bits.
|
||||
// We cannot test 0 or 1 bits since base must be > 1.
|
||||
inline for (2..64 + 1) |bits| {
|
||||
const T = @Type(.{ .int = .{ .signedness = .unsigned, .bits = @intCast(bits) } });
|
||||
const T = @Int(.unsigned, @intCast(bits));
|
||||
|
||||
// for base = 2, 3, ..., min(maxInt(T),1024)
|
||||
var base: T = 1;
|
||||
|
||||
@ -6,10 +6,7 @@ const expect = std.testing.expect;
|
||||
pub fn signbit(x: anytype) bool {
|
||||
return switch (@typeInfo(@TypeOf(x))) {
|
||||
.int, .comptime_int => x,
|
||||
.float => |float| @as(@Type(.{ .int = .{
|
||||
.signedness = .signed,
|
||||
.bits = float.bits,
|
||||
} }), @bitCast(x)),
|
||||
.float => |float| @as(@Int(.signed, float.bits), @bitCast(x)),
|
||||
.comptime_float => @as(i128, @bitCast(@as(f128, x))), // any float type will do
|
||||
else => @compileError("std.math.signbit does not support " ++ @typeName(@TypeOf(x))),
|
||||
} < 0;
|
||||
|
||||
@ -80,7 +80,7 @@ test sqrt_int {
|
||||
/// Returns the return type `sqrt` will return given an operand of type `T`.
|
||||
pub fn Sqrt(comptime T: type) type {
|
||||
return switch (@typeInfo(T)) {
|
||||
.int => |int| @Type(.{ .int = .{ .signedness = .unsigned, .bits = (int.bits + 1) / 2 } }),
|
||||
.int => |int| @Int(.unsigned, (int.bits + 1) / 2),
|
||||
else => T,
|
||||
};
|
||||
}
|
||||
|
||||
163
lib/std/mem.zig
163
lib/std/mem.zig
@ -846,17 +846,18 @@ fn Span(comptime T: type) type {
|
||||
return ?Span(optional_info.child);
|
||||
},
|
||||
.pointer => |ptr_info| {
|
||||
var new_ptr_info = ptr_info;
|
||||
switch (ptr_info.size) {
|
||||
.c => {
|
||||
new_ptr_info.sentinel_ptr = &@as(ptr_info.child, 0);
|
||||
new_ptr_info.is_allowzero = false;
|
||||
},
|
||||
.many => if (ptr_info.sentinel() == null) @compileError("invalid type given to std.mem.span: " ++ @typeName(T)),
|
||||
const new_sentinel: ?ptr_info.child = switch (ptr_info.size) {
|
||||
.one, .slice => @compileError("invalid type given to std.mem.span: " ++ @typeName(T)),
|
||||
}
|
||||
new_ptr_info.size = .slice;
|
||||
return @Type(.{ .pointer = new_ptr_info });
|
||||
.many => ptr_info.sentinel() orelse @compileError("invalid type given to std.mem.span: " ++ @typeName(T)),
|
||||
.c => 0,
|
||||
};
|
||||
return @Pointer(.slice, .{
|
||||
.@"const" = ptr_info.is_const,
|
||||
.@"volatile" = ptr_info.is_volatile,
|
||||
.@"allowzero" = ptr_info.is_allowzero and ptr_info.size != .c,
|
||||
.@"align" = ptr_info.alignment,
|
||||
.@"addrspace" = ptr_info.address_space,
|
||||
}, ptr_info.child, new_sentinel);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@ -910,45 +911,18 @@ fn SliceTo(comptime T: type, comptime end: std.meta.Elem(T)) type {
|
||||
return ?SliceTo(optional_info.child, end);
|
||||
},
|
||||
.pointer => |ptr_info| {
|
||||
var new_ptr_info = ptr_info;
|
||||
new_ptr_info.size = .slice;
|
||||
switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => |array_info| {
|
||||
new_ptr_info.child = array_info.child;
|
||||
// The return type must only be sentinel terminated if we are guaranteed
|
||||
// to find the value searched for, which is only the case if it matches
|
||||
// the sentinel of the type passed.
|
||||
if (array_info.sentinel()) |s| {
|
||||
if (end == s) {
|
||||
new_ptr_info.sentinel_ptr = &end;
|
||||
} else {
|
||||
new_ptr_info.sentinel_ptr = null;
|
||||
}
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
.many, .slice => {
|
||||
// The return type must only be sentinel terminated if we are guaranteed
|
||||
// to find the value searched for, which is only the case if it matches
|
||||
// the sentinel of the type passed.
|
||||
if (ptr_info.sentinel()) |s| {
|
||||
if (end == s) {
|
||||
new_ptr_info.sentinel_ptr = &end;
|
||||
} else {
|
||||
new_ptr_info.sentinel_ptr = null;
|
||||
}
|
||||
}
|
||||
},
|
||||
.c => {
|
||||
new_ptr_info.sentinel_ptr = &end;
|
||||
// C pointers are always allowzero, but we don't want the return type to be.
|
||||
assert(new_ptr_info.is_allowzero);
|
||||
new_ptr_info.is_allowzero = false;
|
||||
},
|
||||
}
|
||||
return @Type(.{ .pointer = new_ptr_info });
|
||||
const Elem = std.meta.Elem(T);
|
||||
const have_sentinel: bool = switch (ptr_info.size) {
|
||||
.one, .slice, .many => if (std.meta.sentinel(T)) |s| s == end else false,
|
||||
.c => false,
|
||||
};
|
||||
return @Pointer(.slice, .{
|
||||
.@"const" = ptr_info.is_const,
|
||||
.@"volatile" = ptr_info.is_volatile,
|
||||
.@"allowzero" = ptr_info.is_allowzero and ptr_info.size != .c,
|
||||
.@"align" = ptr_info.alignment,
|
||||
.@"addrspace" = ptr_info.address_space,
|
||||
}, Elem, if (have_sentinel) end else null);
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@ -3951,38 +3925,25 @@ test reverse {
|
||||
}
|
||||
}
|
||||
fn ReverseIterator(comptime T: type) type {
|
||||
const Pointer = blk: {
|
||||
switch (@typeInfo(T)) {
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.one => switch (@typeInfo(ptr_info.child)) {
|
||||
.array => |array_info| {
|
||||
var new_ptr_info = ptr_info;
|
||||
new_ptr_info.size = .many;
|
||||
new_ptr_info.child = array_info.child;
|
||||
new_ptr_info.sentinel_ptr = array_info.sentinel_ptr;
|
||||
break :blk @Type(.{ .pointer = new_ptr_info });
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
.slice => {
|
||||
var new_ptr_info = ptr_info;
|
||||
new_ptr_info.size = .many;
|
||||
break :blk @Type(.{ .pointer = new_ptr_info });
|
||||
},
|
||||
else => {},
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
@compileError("expected slice or pointer to array, found '" ++ @typeName(T) ++ "'");
|
||||
const ptr = switch (@typeInfo(T)) {
|
||||
.pointer => |ptr| ptr,
|
||||
else => @compileError("expected slice or pointer to array, found '" ++ @typeName(T) ++ "'"),
|
||||
};
|
||||
const Element = std.meta.Elem(Pointer);
|
||||
const ElementPointer = @Type(.{ .pointer = ptr: {
|
||||
var ptr = @typeInfo(Pointer).pointer;
|
||||
ptr.size = .one;
|
||||
ptr.child = Element;
|
||||
ptr.sentinel_ptr = null;
|
||||
break :ptr ptr;
|
||||
} });
|
||||
switch (ptr.size) {
|
||||
.slice => {},
|
||||
.one => if (@typeInfo(ptr.child) != .array) @compileError("expected slice or pointer to array, found '" ++ @typeName(T) ++ "'"),
|
||||
.many, .c => @compileError("expected slice or pointer to array, found '" ++ @typeName(T) ++ "'"),
|
||||
}
|
||||
const Element = std.meta.Elem(T);
|
||||
const attrs: std.builtin.Type.Pointer.Attributes = .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
.@"allowzero" = ptr.is_allowzero,
|
||||
.@"align" = ptr.alignment,
|
||||
.@"addrspace" = ptr.address_space,
|
||||
};
|
||||
const Pointer = @Pointer(.many, attrs, Element, std.meta.sentinel(T));
|
||||
const ElementPointer = @Pointer(.one, attrs, Element, null);
|
||||
return struct {
|
||||
ptr: Pointer,
|
||||
index: usize,
|
||||
@ -4342,19 +4303,14 @@ fn CopyPtrAttrs(
|
||||
comptime size: std.builtin.Type.Pointer.Size,
|
||||
comptime child: type,
|
||||
) type {
|
||||
const info = @typeInfo(source).pointer;
|
||||
return @Type(.{
|
||||
.pointer = .{
|
||||
.size = size,
|
||||
.is_const = info.is_const,
|
||||
.is_volatile = info.is_volatile,
|
||||
.is_allowzero = info.is_allowzero,
|
||||
.alignment = info.alignment,
|
||||
.address_space = info.address_space,
|
||||
.child = child,
|
||||
.sentinel_ptr = null,
|
||||
},
|
||||
});
|
||||
const ptr = @typeInfo(source).pointer;
|
||||
return @Pointer(size, .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
.@"allowzero" = ptr.is_allowzero,
|
||||
.@"align" = ptr.alignment,
|
||||
.@"addrspace" = ptr.address_space,
|
||||
}, child, null);
|
||||
}
|
||||
|
||||
fn AsBytesReturnType(comptime P: type) type {
|
||||
@ -4936,19 +4892,14 @@ test "freeing empty string with null-terminated sentinel" {
|
||||
/// Returns a slice with the given new alignment,
|
||||
/// all other pointer attributes copied from `AttributeSource`.
|
||||
fn AlignedSlice(comptime AttributeSource: type, comptime new_alignment: usize) type {
|
||||
const info = @typeInfo(AttributeSource).pointer;
|
||||
return @Type(.{
|
||||
.pointer = .{
|
||||
.size = .slice,
|
||||
.is_const = info.is_const,
|
||||
.is_volatile = info.is_volatile,
|
||||
.is_allowzero = info.is_allowzero,
|
||||
.alignment = new_alignment,
|
||||
.address_space = info.address_space,
|
||||
.child = info.child,
|
||||
.sentinel_ptr = null,
|
||||
},
|
||||
});
|
||||
const ptr = @typeInfo(AttributeSource).pointer;
|
||||
return @Pointer(.slice, .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
.@"allowzero" = ptr.is_allowzero,
|
||||
.@"align" = new_alignment,
|
||||
.@"addrspace" = ptr.address_space,
|
||||
}, ptr.child, null);
|
||||
}
|
||||
|
||||
/// Returns the largest slice in the given bytes that conforms to the new alignment,
|
||||
|
||||
186
lib/std/meta.zig
186
lib/std/meta.zig
@ -171,58 +171,34 @@ pub fn Sentinel(comptime T: type, comptime sentinel_val: Elem(T)) type {
|
||||
switch (@typeInfo(T)) {
|
||||
.pointer => |info| switch (info.size) {
|
||||
.one => switch (@typeInfo(info.child)) {
|
||||
.array => |array_info| return @Type(.{
|
||||
.pointer = .{
|
||||
.size = info.size,
|
||||
.is_const = info.is_const,
|
||||
.is_volatile = info.is_volatile,
|
||||
.alignment = info.alignment,
|
||||
.address_space = info.address_space,
|
||||
.child = @Type(.{
|
||||
.array = .{
|
||||
.len = array_info.len,
|
||||
.child = array_info.child,
|
||||
.sentinel_ptr = @as(?*const anyopaque, @ptrCast(&sentinel_val)),
|
||||
},
|
||||
}),
|
||||
.is_allowzero = info.is_allowzero,
|
||||
.sentinel_ptr = info.sentinel_ptr,
|
||||
},
|
||||
}),
|
||||
.array => |array_info| return @Pointer(.one, .{
|
||||
.@"const" = info.is_const,
|
||||
.@"volatile" = info.is_volatile,
|
||||
.@"allowzero" = info.is_allowzero,
|
||||
.@"align" = info.alignment,
|
||||
.@"addrspace" = info.address_space,
|
||||
}, [array_info.len:sentinel_val]array_info.child, null),
|
||||
else => {},
|
||||
},
|
||||
.many, .slice => return @Type(.{
|
||||
.pointer = .{
|
||||
.size = info.size,
|
||||
.is_const = info.is_const,
|
||||
.is_volatile = info.is_volatile,
|
||||
.alignment = info.alignment,
|
||||
.address_space = info.address_space,
|
||||
.child = info.child,
|
||||
.is_allowzero = info.is_allowzero,
|
||||
.sentinel_ptr = @as(?*const anyopaque, @ptrCast(&sentinel_val)),
|
||||
},
|
||||
}),
|
||||
.many, .slice => |size| return @Pointer(size, .{
|
||||
.@"const" = info.is_const,
|
||||
.@"volatile" = info.is_volatile,
|
||||
.@"allowzero" = info.is_allowzero,
|
||||
.@"align" = info.alignment,
|
||||
.@"addrspace" = info.address_space,
|
||||
}, info.child, sentinel_val),
|
||||
else => {},
|
||||
},
|
||||
.optional => |info| switch (@typeInfo(info.child)) {
|
||||
.pointer => |ptr_info| switch (ptr_info.size) {
|
||||
.many => return @Type(.{
|
||||
.optional = .{
|
||||
.child = @Type(.{
|
||||
.pointer = .{
|
||||
.size = ptr_info.size,
|
||||
.is_const = ptr_info.is_const,
|
||||
.is_volatile = ptr_info.is_volatile,
|
||||
.alignment = ptr_info.alignment,
|
||||
.address_space = ptr_info.address_space,
|
||||
.many => return ?@Pointer(.many, .{
|
||||
.@"const" = ptr_info.is_const,
|
||||
.@"volatile" = ptr_info.is_volatile,
|
||||
.@"allowzero" = ptr_info.is_allowzero,
|
||||
.@"align" = ptr_info.alignment,
|
||||
.@"addrspace" = ptr_info.address_space,
|
||||
.child = ptr_info.child,
|
||||
.is_allowzero = ptr_info.is_allowzero,
|
||||
.sentinel_ptr = @as(?*const anyopaque, @ptrCast(&sentinel_val)),
|
||||
},
|
||||
}),
|
||||
},
|
||||
}),
|
||||
}, ptr_info.child, sentinel_val),
|
||||
else => {},
|
||||
},
|
||||
else => {},
|
||||
@ -487,46 +463,22 @@ test tags {
|
||||
|
||||
/// Returns an enum with a variant named after each field of `T`.
|
||||
pub fn FieldEnum(comptime T: type) type {
|
||||
const field_infos = fields(T);
|
||||
const field_names = fieldNames(T);
|
||||
|
||||
if (field_infos.len == 0) {
|
||||
return @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u0,
|
||||
.fields = &.{},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
if (@typeInfo(T) == .@"union") {
|
||||
if (@typeInfo(T).@"union".tag_type) |tag_type| {
|
||||
for (std.enums.values(tag_type), 0..) |v, i| {
|
||||
switch (@typeInfo(T)) {
|
||||
.@"union" => |@"union"| if (@"union".tag_type) |EnumTag| {
|
||||
for (std.enums.values(EnumTag), 0..) |v, i| {
|
||||
if (@intFromEnum(v) != i) break; // enum values not consecutive
|
||||
if (!std.mem.eql(u8, @tagName(v), field_infos[i].name)) break; // fields out of order
|
||||
if (!std.mem.eql(u8, @tagName(v), field_names[i])) break; // fields out of order
|
||||
} else {
|
||||
return tag_type;
|
||||
}
|
||||
return EnumTag;
|
||||
}
|
||||
},
|
||||
else => {},
|
||||
}
|
||||
|
||||
var enumFields: [field_infos.len]std.builtin.Type.EnumField = undefined;
|
||||
var decls = [_]std.builtin.Type.Declaration{};
|
||||
inline for (field_infos, 0..) |field, i| {
|
||||
enumFields[i] = .{
|
||||
.name = field.name,
|
||||
.value = i,
|
||||
};
|
||||
}
|
||||
return @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = std.math.IntFittingRange(0, field_infos.len - 1),
|
||||
.fields = &enumFields,
|
||||
.decls = &decls,
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
const IntTag = std.math.IntFittingRange(0, field_names.len -| 1);
|
||||
return @Enum(IntTag, .exhaustive, field_names, &std.simd.iota(IntTag, field_names.len));
|
||||
}
|
||||
|
||||
fn expectEqualEnum(expected: anytype, actual: @TypeOf(expected)) !void {
|
||||
@ -583,20 +535,11 @@ test FieldEnum {
|
||||
}
|
||||
|
||||
pub fn DeclEnum(comptime T: type) type {
|
||||
const fieldInfos = std.meta.declarations(T);
|
||||
var enumDecls: [fieldInfos.len]std.builtin.Type.EnumField = undefined;
|
||||
var decls = [_]std.builtin.Type.Declaration{};
|
||||
inline for (fieldInfos, 0..) |field, i| {
|
||||
enumDecls[i] = .{ .name = field.name, .value = i };
|
||||
}
|
||||
return @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = std.math.IntFittingRange(0, if (fieldInfos.len == 0) 0 else fieldInfos.len - 1),
|
||||
.fields = &enumDecls,
|
||||
.decls = &decls,
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
const decls = declarations(T);
|
||||
var names: [decls.len][]const u8 = undefined;
|
||||
for (&names, decls) |*name, decl| name.* = decl.name;
|
||||
const IntTag = std.math.IntFittingRange(0, decls.len -| 1);
|
||||
return @Enum(IntTag, .exhaustive, &names, &std.simd.iota(IntTag, decls.len));
|
||||
}
|
||||
|
||||
test DeclEnum {
|
||||
@ -868,25 +811,26 @@ pub fn declList(comptime Namespace: type, comptime Decl: type) []const *const De
|
||||
}
|
||||
}
|
||||
|
||||
/// Deprecated: use @Int
|
||||
pub fn Int(comptime signedness: std.builtin.Signedness, comptime bit_count: u16) type {
|
||||
return @Type(.{
|
||||
.int = .{
|
||||
.signedness = signedness,
|
||||
.bits = bit_count,
|
||||
},
|
||||
});
|
||||
return @Int(signedness, bit_count);
|
||||
}
|
||||
|
||||
pub fn Float(comptime bit_count: u8) type {
|
||||
return @Type(.{
|
||||
.float = .{ .bits = bit_count },
|
||||
});
|
||||
return switch (bit_count) {
|
||||
16 => f16,
|
||||
32 => f32,
|
||||
64 => f64,
|
||||
80 => f80,
|
||||
128 => f128,
|
||||
else => @compileError("invalid float bit count"),
|
||||
};
|
||||
}
|
||||
|
||||
test Float {
|
||||
try testing.expectEqual(f16, Float(16));
|
||||
try testing.expectEqual(f32, Float(32));
|
||||
try testing.expectEqual(f64, Float(64));
|
||||
try testing.expectEqual(f80, Float(80));
|
||||
try testing.expectEqual(f128, Float(128));
|
||||
}
|
||||
|
||||
@ -912,42 +856,14 @@ pub fn ArgsTuple(comptime Function: type) type {
|
||||
argument_field_list[i] = T;
|
||||
}
|
||||
|
||||
return CreateUniqueTuple(argument_field_list.len, argument_field_list);
|
||||
return Tuple(&argument_field_list);
|
||||
}
|
||||
|
||||
/// For a given anonymous list of types, returns a new tuple type
|
||||
/// with those types as fields.
|
||||
/// Deprecated; use `@Tuple` instead.
|
||||
///
|
||||
/// Examples:
|
||||
/// - `Tuple(&[_]type {})` ⇒ `tuple { }`
|
||||
/// - `Tuple(&[_]type {f32})` ⇒ `tuple { f32 }`
|
||||
/// - `Tuple(&[_]type {f32,u32})` ⇒ `tuple { f32, u32 }`
|
||||
/// To be removed after Zig 0.16.0 releases.
|
||||
pub fn Tuple(comptime types: []const type) type {
|
||||
return CreateUniqueTuple(types.len, types[0..types.len].*);
|
||||
}
|
||||
|
||||
fn CreateUniqueTuple(comptime N: comptime_int, comptime types: [N]type) type {
|
||||
var tuple_fields: [types.len]std.builtin.Type.StructField = undefined;
|
||||
inline for (types, 0..) |T, i| {
|
||||
@setEvalBranchQuota(10_000);
|
||||
var num_buf: [128]u8 = undefined;
|
||||
tuple_fields[i] = .{
|
||||
.name = std.fmt.bufPrintSentinel(&num_buf, "{d}", .{i}, 0) catch unreachable,
|
||||
.type = T,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(T),
|
||||
};
|
||||
}
|
||||
|
||||
return @Type(.{
|
||||
.@"struct" = .{
|
||||
.is_tuple = true,
|
||||
.layout = .auto,
|
||||
.decls = &.{},
|
||||
.fields = &tuple_fields,
|
||||
},
|
||||
});
|
||||
return @Tuple(types);
|
||||
}
|
||||
|
||||
const TupleTester = struct {
|
||||
|
||||
@ -20,24 +20,16 @@ pub fn TrailerFlags(comptime Fields: type) type {
|
||||
|
||||
pub const ActiveFields = std.enums.EnumFieldStruct(FieldEnum, bool, false);
|
||||
pub const FieldValues = blk: {
|
||||
var fields: [bit_count]Type.StructField = undefined;
|
||||
for (@typeInfo(Fields).@"struct".fields, 0..) |struct_field, i| {
|
||||
fields[i] = Type.StructField{
|
||||
.name = struct_field.name,
|
||||
.type = ?struct_field.type,
|
||||
.default_value_ptr = &@as(?struct_field.type, null),
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(?struct_field.type),
|
||||
};
|
||||
var field_names: [bit_count][]const u8 = undefined;
|
||||
var field_types: [bit_count]type = undefined;
|
||||
var field_attrs: [bit_count]std.builtin.Type.StructField.Attributes = undefined;
|
||||
for (@typeInfo(Fields).@"struct".fields, &field_names, &field_types, &field_attrs) |field, *new_name, *NewType, *new_attrs| {
|
||||
new_name.* = field.name;
|
||||
NewType.* = ?field.type;
|
||||
const default: ?field.type = null;
|
||||
new_attrs.* = .{ .default_value_ptr = &default };
|
||||
}
|
||||
break :blk @Type(.{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
},
|
||||
});
|
||||
break :blk @Struct(.auto, null, &field_names, &field_types, &field_attrs);
|
||||
};
|
||||
|
||||
pub const Self = @This();
|
||||
|
||||
@ -32,12 +32,17 @@ pub fn MultiArrayList(comptime T: type) type {
|
||||
const Elem = switch (@typeInfo(T)) {
|
||||
.@"struct" => T,
|
||||
.@"union" => |u| struct {
|
||||
pub const Bare = @Type(.{ .@"union" = .{
|
||||
.layout = u.layout,
|
||||
.tag_type = null,
|
||||
.fields = u.fields,
|
||||
.decls = &.{},
|
||||
} });
|
||||
pub const Bare = Bare: {
|
||||
var field_names: [u.fields.len][]const u8 = undefined;
|
||||
var field_types: [u.fields.len]type = undefined;
|
||||
var field_attrs: [u.fields.len]std.builtin.Type.UnionField.Attributes = undefined;
|
||||
for (u.fields, &field_names, &field_types, &field_attrs) |field, *name, *Type, *attrs| {
|
||||
name.* = field.name;
|
||||
Type.* = field.type;
|
||||
attrs.* = .{ .@"align" = field.alignment };
|
||||
}
|
||||
break :Bare @Union(u.layout, null, &field_names, &field_types, &field_attrs);
|
||||
};
|
||||
pub const Tag =
|
||||
u.tag_type orelse @compileError("MultiArrayList does not support untagged unions");
|
||||
tags: Tag,
|
||||
@ -609,20 +614,18 @@ pub fn MultiArrayList(comptime T: type) type {
|
||||
}
|
||||
|
||||
const Entry = entry: {
|
||||
var entry_fields: [fields.len]std.builtin.Type.StructField = undefined;
|
||||
for (&entry_fields, sizes.fields) |*entry_field, i| entry_field.* = .{
|
||||
.name = fields[i].name ++ "_ptr",
|
||||
.type = *fields[i].type,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = fields[i].is_comptime,
|
||||
.alignment = fields[i].alignment,
|
||||
var field_names: [fields.len][]const u8 = undefined;
|
||||
var field_types: [fields.len]type = undefined;
|
||||
var field_attrs: [fields.len]std.builtin.Type.StructField.Attributes = undefined;
|
||||
for (sizes.fields, &field_names, &field_types, &field_attrs) |i, *name, *Type, *attrs| {
|
||||
name.* = fields[i].name ++ "_ptr";
|
||||
Type.* = *fields[i].type;
|
||||
attrs.* = .{
|
||||
.@"comptime" = fields[i].is_comptime,
|
||||
.@"align" = fields[i].alignment,
|
||||
};
|
||||
break :entry @Type(.{ .@"struct" = .{
|
||||
.layout = .@"extern",
|
||||
.fields = &entry_fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
}
|
||||
break :entry @Struct(.@"extern", null, &field_names, &field_types, &field_attrs);
|
||||
};
|
||||
/// This function is used in the debugger pretty formatters in tools/ to fetch the
|
||||
/// child field order and entry type to facilitate fancy debug printing for this type.
|
||||
@ -1023,23 +1026,9 @@ test "struct with many fields" {
|
||||
const ManyFields = struct {
|
||||
fn Type(count: comptime_int) type {
|
||||
@setEvalBranchQuota(50000);
|
||||
var fields: [count]std.builtin.Type.StructField = undefined;
|
||||
for (0..count) |i| {
|
||||
fields[i] = .{
|
||||
.name = std.fmt.comptimePrint("a{}", .{i}),
|
||||
.type = u32,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(u32),
|
||||
};
|
||||
}
|
||||
const info: std.builtin.Type = .{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} };
|
||||
return @Type(info);
|
||||
var field_names: [count][]const u8 = undefined;
|
||||
for (&field_names, 0..) |*n, i| n.* = std.fmt.comptimePrint("a{d}", .{i});
|
||||
return @Struct(.@"extern", null, &field_names, &@splat(u32), &@splat(.{}));
|
||||
}
|
||||
|
||||
fn doTest(ally: std.mem.Allocator, count: comptime_int) !void {
|
||||
|
||||
@ -41,7 +41,7 @@ pub fn syscall_fork() u64 {
|
||||
\\ 2:
|
||||
: [ret] "={o0}" (-> u64),
|
||||
: [number] "{g1}" (@intFromEnum(SYS.fork)),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall0(number: SYS) u64 {
|
||||
@ -53,7 +53,7 @@ pub fn syscall0(number: SYS) u64 {
|
||||
\\ 1:
|
||||
: [ret] "={o0}" (-> u64),
|
||||
: [number] "{g1}" (@intFromEnum(number)),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall1(number: SYS, arg1: u64) u64 {
|
||||
@ -66,7 +66,7 @@ pub fn syscall1(number: SYS, arg1: u64) u64 {
|
||||
: [ret] "={o0}" (-> u64),
|
||||
: [number] "{g1}" (@intFromEnum(number)),
|
||||
[arg1] "{o0}" (arg1),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall2(number: SYS, arg1: u64, arg2: u64) u64 {
|
||||
@ -80,7 +80,7 @@ pub fn syscall2(number: SYS, arg1: u64, arg2: u64) u64 {
|
||||
: [number] "{g1}" (@intFromEnum(number)),
|
||||
[arg1] "{o0}" (arg1),
|
||||
[arg2] "{o1}" (arg2),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall3(number: SYS, arg1: u64, arg2: u64, arg3: u64) u64 {
|
||||
@ -95,7 +95,7 @@ pub fn syscall3(number: SYS, arg1: u64, arg2: u64, arg3: u64) u64 {
|
||||
[arg1] "{o0}" (arg1),
|
||||
[arg2] "{o1}" (arg2),
|
||||
[arg3] "{o2}" (arg3),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall4(number: SYS, arg1: u64, arg2: u64, arg3: u64, arg4: u64) u64 {
|
||||
@ -111,7 +111,7 @@ pub fn syscall4(number: SYS, arg1: u64, arg2: u64, arg3: u64, arg4: u64) u64 {
|
||||
[arg2] "{o1}" (arg2),
|
||||
[arg3] "{o2}" (arg3),
|
||||
[arg4] "{o3}" (arg4),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall5(number: SYS, arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u64) u64 {
|
||||
@ -128,7 +128,7 @@ pub fn syscall5(number: SYS, arg1: u64, arg2: u64, arg3: u64, arg4: u64, arg5: u
|
||||
[arg3] "{o2}" (arg3),
|
||||
[arg4] "{o3}" (arg4),
|
||||
[arg5] "{o4}" (arg5),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn syscall6(
|
||||
@ -154,7 +154,7 @@ pub fn syscall6(
|
||||
[arg4] "{o3}" (arg4),
|
||||
[arg5] "{o4}" (arg5),
|
||||
[arg6] "{o5}" (arg6),
|
||||
: .{ .memory = true, .icc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub fn clone() callconv(.naked) u64 {
|
||||
@ -220,7 +220,7 @@ pub fn restore_rt() callconv(.c) void {
|
||||
return asm volatile ("t 0x6d"
|
||||
:
|
||||
: [number] "{g1}" (@intFromEnum(SYS.rt_sigreturn)),
|
||||
: .{ .memory = true, .icc = true, .o0 = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
: .{ .memory = true, .xcc = true, .o0 = true, .o1 = true, .o2 = true, .o3 = true, .o4 = true, .o5 = true, .o7 = true });
|
||||
}
|
||||
|
||||
pub const VDSO = struct {
|
||||
|
||||
@ -124,7 +124,7 @@ pub const Options = struct {
|
||||
|
||||
logFn: fn (
|
||||
comptime message_level: log.Level,
|
||||
comptime scope: @TypeOf(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void = log.defaultLog,
|
||||
|
||||
@ -773,7 +773,6 @@ pub const EnvVar = enum {
|
||||
pub const SimpleComptimeReason = enum(u32) {
|
||||
// Evaluating at comptime because a builtin operand must be comptime-known.
|
||||
// These messages all mention a specific builtin.
|
||||
operand_Type,
|
||||
operand_setEvalBranchQuota,
|
||||
operand_setFloatMode,
|
||||
operand_branchHint,
|
||||
@ -809,25 +808,34 @@ pub const SimpleComptimeReason = enum(u32) {
|
||||
// Evaluating at comptime because types must be comptime-known.
|
||||
// Reasons other than `.type` are just more specific messages.
|
||||
type,
|
||||
int_signedness,
|
||||
int_bit_width,
|
||||
array_sentinel,
|
||||
array_length,
|
||||
pointer_size,
|
||||
pointer_attrs,
|
||||
pointer_sentinel,
|
||||
slice_sentinel,
|
||||
array_length,
|
||||
vector_length,
|
||||
error_set_contents,
|
||||
struct_fields,
|
||||
enum_fields,
|
||||
union_fields,
|
||||
function_ret_ty,
|
||||
function_parameters,
|
||||
fn_ret_ty,
|
||||
fn_param_types,
|
||||
fn_param_attrs,
|
||||
fn_attrs,
|
||||
struct_layout,
|
||||
struct_field_names,
|
||||
struct_field_types,
|
||||
struct_field_attrs,
|
||||
union_layout,
|
||||
union_field_names,
|
||||
union_field_types,
|
||||
union_field_attrs,
|
||||
tuple_field_types,
|
||||
enum_field_names,
|
||||
enum_field_values,
|
||||
|
||||
// Evaluating at comptime because decl/field name must be comptime-known.
|
||||
decl_name,
|
||||
field_name,
|
||||
struct_field_name,
|
||||
enum_field_name,
|
||||
union_field_name,
|
||||
tuple_field_name,
|
||||
tuple_field_index,
|
||||
|
||||
// Evaluating at comptime because it is an attribute of a global declaration.
|
||||
@ -856,7 +864,6 @@ pub const SimpleComptimeReason = enum(u32) {
|
||||
pub fn message(r: SimpleComptimeReason) []const u8 {
|
||||
return switch (r) {
|
||||
// zig fmt: off
|
||||
.operand_Type => "operand to '@Type' must be comptime-known",
|
||||
.operand_setEvalBranchQuota => "operand to '@setEvalBranchQuota' must be comptime-known",
|
||||
.operand_setFloatMode => "operand to '@setFloatMode' must be comptime-known",
|
||||
.operand_branchHint => "operand to '@branchHint' must be comptime-known",
|
||||
@ -888,24 +895,33 @@ pub const SimpleComptimeReason = enum(u32) {
|
||||
.clobber => "clobber must be comptime-known",
|
||||
|
||||
.type => "types must be comptime-known",
|
||||
.int_signedness => "integer signedness must be comptime-known",
|
||||
.int_bit_width => "integer bit width must be comptime-known",
|
||||
.array_sentinel => "array sentinel value must be comptime-known",
|
||||
.array_length => "array length must be comptime-known",
|
||||
.pointer_size => "pointer size must be comptime-known",
|
||||
.pointer_attrs => "pointer attributes must be comptime-known",
|
||||
.pointer_sentinel => "pointer sentinel value must be comptime-known",
|
||||
.slice_sentinel => "slice sentinel value must be comptime-known",
|
||||
.array_length => "array length must be comptime-known",
|
||||
.vector_length => "vector length must be comptime-known",
|
||||
.error_set_contents => "error set contents must be comptime-known",
|
||||
.struct_fields => "struct fields must be comptime-known",
|
||||
.enum_fields => "enum fields must be comptime-known",
|
||||
.union_fields => "union fields must be comptime-known",
|
||||
.function_ret_ty => "function return type must be comptime-known",
|
||||
.function_parameters => "function parameters must be comptime-known",
|
||||
.fn_ret_ty => "function return type must be comptime-known",
|
||||
.fn_param_types => "function parameter types must be comptime-known",
|
||||
.fn_param_attrs => "function parameter attributes must be comptime-known",
|
||||
.fn_attrs => "function attributes must be comptime-known",
|
||||
.struct_layout => "struct layout must be comptime-known",
|
||||
.struct_field_names => "struct field names must be comptime-known",
|
||||
.struct_field_types => "struct field types must be comptime-known",
|
||||
.struct_field_attrs => "struct field attributes must be comptime-known",
|
||||
.union_layout => "union layout must be comptime-known",
|
||||
.union_field_names => "union field names must be comptime-known",
|
||||
.union_field_types => "union field types must be comptime-known",
|
||||
.union_field_attrs => "union field attributes must be comptime-known",
|
||||
.tuple_field_types => "tuple field types must be comptime-known",
|
||||
.enum_field_names => "enum field names must be comptime-known",
|
||||
.enum_field_values => "enum field values must be comptime-known",
|
||||
|
||||
.decl_name => "declaration name must be comptime-known",
|
||||
.field_name => "field name must be comptime-known",
|
||||
.struct_field_name => "struct field name must be comptime-known",
|
||||
.enum_field_name => "enum field name must be comptime-known",
|
||||
.union_field_name => "union field name must be comptime-known",
|
||||
.tuple_field_name => "tuple field name must be comptime-known",
|
||||
.tuple_field_index => "tuple field index must be comptime-known",
|
||||
|
||||
.container_var_init => "initializer of container-level variable must be comptime-known",
|
||||
|
||||
@ -833,7 +833,7 @@ fn expr(gz: *GenZir, scope: *Scope, ri: ResultInfo, node: Ast.Node.Index) InnerE
|
||||
=> {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const params = tree.builtinCallParams(&buf, node).?;
|
||||
return builtinCall(gz, scope, ri, node, params, false);
|
||||
return builtinCall(gz, scope, ri, node, params, false, .anon);
|
||||
},
|
||||
|
||||
.call_one,
|
||||
@ -1194,14 +1194,20 @@ fn nameStratExpr(
|
||||
},
|
||||
.builtin_call_two,
|
||||
.builtin_call_two_comma,
|
||||
.builtin_call,
|
||||
.builtin_call_comma,
|
||||
=> {
|
||||
const builtin_token = tree.nodeMainToken(node);
|
||||
const builtin_name = tree.tokenSlice(builtin_token);
|
||||
if (!std.mem.eql(u8, builtin_name, "@Type")) return null;
|
||||
const info = BuiltinFn.list.get(builtin_name) orelse return null;
|
||||
switch (info.tag) {
|
||||
.Enum, .Struct, .Union => {
|
||||
var buf: [2]Ast.Node.Index = undefined;
|
||||
const params = tree.builtinCallParams(&buf, node).?;
|
||||
if (params.len != 1) return null; // let `builtinCall` error
|
||||
return try builtinReify(gz, scope, ri, node, params[0], name_strat);
|
||||
return try builtinCall(gz, scope, ri, node, params, false, name_strat);
|
||||
},
|
||||
else => return null,
|
||||
}
|
||||
},
|
||||
else => return null,
|
||||
}
|
||||
@ -1406,7 +1412,7 @@ fn fnProtoExprInner(
|
||||
.none;
|
||||
|
||||
const ret_ty_node = fn_proto.ast.return_type.unwrap().?;
|
||||
const ret_ty = try comptimeExpr(&block_scope, scope, coerced_type_ri, ret_ty_node, .function_ret_ty);
|
||||
const ret_ty = try comptimeExpr(&block_scope, scope, coerced_type_ri, ret_ty_node, .fn_ret_ty);
|
||||
|
||||
const result = try block_scope.addFunc(.{
|
||||
.src_node = fn_proto.ast.proto_node,
|
||||
@ -2629,7 +2635,7 @@ fn blockExprStmts(gz: *GenZir, parent_scope: *Scope, statements: []const Ast.Nod
|
||||
const params = tree.builtinCallParams(&buf, inner_node).?;
|
||||
|
||||
try emitDbgNode(gz, inner_node);
|
||||
const result = try builtinCall(gz, scope, .{ .rl = .none }, inner_node, params, allow_branch_hint);
|
||||
const result = try builtinCall(gz, scope, .{ .rl = .none }, inner_node, params, allow_branch_hint, .anon);
|
||||
noreturn_src_node = try addEnsureResult(gz, result, inner_node);
|
||||
},
|
||||
|
||||
@ -2707,6 +2713,7 @@ fn addEnsureResult(gz: *GenZir, maybe_unused_result: Zir.Inst.Ref, statement: As
|
||||
.elem_type,
|
||||
.indexable_ptr_elem_type,
|
||||
.splat_op_result_ty,
|
||||
.reify_int,
|
||||
.vector_type,
|
||||
.indexable_ptr_len,
|
||||
.anyframe_type,
|
||||
@ -8942,7 +8949,7 @@ fn unionInit(
|
||||
params: []const Ast.Node.Index,
|
||||
) InnerError!Zir.Inst.Ref {
|
||||
const union_type = try typeExpr(gz, scope, params[0]);
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[1], .union_field_name);
|
||||
const field_name = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[1], .union_field_names);
|
||||
const field_type = try gz.addPlNode(.field_type_ref, node, Zir.Inst.FieldTypeRef{
|
||||
.container_type = union_type,
|
||||
.field_name = field_name,
|
||||
@ -9210,6 +9217,7 @@ fn builtinCall(
|
||||
node: Ast.Node.Index,
|
||||
params: []const Ast.Node.Index,
|
||||
allow_branch_hint: bool,
|
||||
reify_name_strat: Zir.Inst.NameStrategy,
|
||||
) InnerError!Zir.Inst.Ref {
|
||||
const astgen = gz.astgen;
|
||||
const tree = astgen.tree;
|
||||
@ -9443,9 +9451,140 @@ fn builtinCall(
|
||||
return rvalue(gz, ri, try gz.addNodeExtended(.in_comptime, node), node);
|
||||
},
|
||||
|
||||
.Type => {
|
||||
return builtinReify(gz, scope, ri, node, params[0], .anon);
|
||||
.EnumLiteral => return rvalue(gz, ri, .enum_literal_type, node),
|
||||
.Int => {
|
||||
const signedness_ty = try gz.addBuiltinValue(node, .signedness);
|
||||
const result = try gz.addPlNode(.reify_int, node, Zir.Inst.Bin{
|
||||
.lhs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = signedness_ty } }, params[0], .int_signedness),
|
||||
.rhs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .u16_type } }, params[1], .int_bit_width),
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.Tuple => {
|
||||
const result = try gz.addExtendedPayload(.reify_tuple, Zir.Inst.UnNode{
|
||||
.node = gz.nodeIndexToRelative(node),
|
||||
.operand = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_type_type } }, params[0], .tuple_field_types),
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.Pointer => {
|
||||
const ptr_size_ty = try gz.addBuiltinValue(node, .pointer_size);
|
||||
const ptr_attrs_ty = try gz.addBuiltinValue(node, .pointer_attributes);
|
||||
const size = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = ptr_size_ty } }, params[0], .pointer_size);
|
||||
const attrs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = ptr_attrs_ty } }, params[1], .pointer_attrs);
|
||||
const elem_ty = try typeExpr(gz, scope, params[2]);
|
||||
const sentinel_ty = try gz.addExtendedPayload(.reify_pointer_sentinel_ty, Zir.Inst.UnNode{
|
||||
.node = gz.nodeIndexToRelative(params[2]),
|
||||
.operand = elem_ty,
|
||||
});
|
||||
const sentinel = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = sentinel_ty } }, params[3], .pointer_sentinel);
|
||||
const result = try gz.addExtendedPayload(.reify_pointer, Zir.Inst.ReifyPointer{
|
||||
.node = gz.nodeIndexToRelative(node),
|
||||
.size = size,
|
||||
.attrs = attrs,
|
||||
.elem_ty = elem_ty,
|
||||
.sentinel = sentinel,
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.Fn => {
|
||||
const fn_attrs_ty = try gz.addBuiltinValue(node, .fn_attributes);
|
||||
const param_types = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_type_type } }, params[0], .fn_param_types);
|
||||
const param_attrs_ty = try gz.addExtendedPayloadSmall(
|
||||
.reify_slice_arg_ty,
|
||||
@intFromEnum(Zir.Inst.ReifySliceArgInfo.type_to_fn_param_attrs),
|
||||
Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(params[0]), .operand = param_types },
|
||||
);
|
||||
const param_attrs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = param_attrs_ty } }, params[1], .fn_param_attrs);
|
||||
const ret_ty = try comptimeExpr(gz, scope, coerced_type_ri, params[2], .fn_ret_ty);
|
||||
const fn_attrs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = fn_attrs_ty } }, params[3], .fn_attrs);
|
||||
const result = try gz.addExtendedPayload(.reify_fn, Zir.Inst.ReifyFn{
|
||||
.node = gz.nodeIndexToRelative(node),
|
||||
.param_types = param_types,
|
||||
.param_attrs = param_attrs,
|
||||
.ret_ty = ret_ty,
|
||||
.fn_attrs = fn_attrs,
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.Struct => {
|
||||
const container_layout_ty = try gz.addBuiltinValue(node, .container_layout);
|
||||
const layout = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = container_layout_ty } }, params[0], .struct_layout);
|
||||
const backing_ty = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .optional_type_type } }, params[1], .type);
|
||||
const field_names = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_slice_const_u8_type } }, params[2], .struct_field_names);
|
||||
const field_types_ty = try gz.addExtendedPayloadSmall(
|
||||
.reify_slice_arg_ty,
|
||||
@intFromEnum(Zir.Inst.ReifySliceArgInfo.string_to_struct_field_type),
|
||||
Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(params[2]), .operand = field_names },
|
||||
);
|
||||
const field_attrs_ty = try gz.addExtendedPayloadSmall(
|
||||
.reify_slice_arg_ty,
|
||||
@intFromEnum(Zir.Inst.ReifySliceArgInfo.string_to_struct_field_attrs),
|
||||
Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(params[2]), .operand = field_names },
|
||||
);
|
||||
const field_types = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_types_ty } }, params[3], .struct_field_types);
|
||||
const field_attrs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_attrs_ty } }, params[4], .struct_field_attrs);
|
||||
const result = try gz.addExtendedPayloadSmall(.reify_struct, @intFromEnum(reify_name_strat), Zir.Inst.ReifyStruct{
|
||||
.src_line = gz.astgen.source_line,
|
||||
.node = node,
|
||||
.layout = layout,
|
||||
.backing_ty = backing_ty,
|
||||
.field_names = field_names,
|
||||
.field_types = field_types,
|
||||
.field_attrs = field_attrs,
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.Union => {
|
||||
const container_layout_ty = try gz.addBuiltinValue(node, .container_layout);
|
||||
const layout = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = container_layout_ty } }, params[0], .union_layout);
|
||||
const arg_ty = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .optional_type_type } }, params[1], .type);
|
||||
const field_names = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_slice_const_u8_type } }, params[2], .union_field_names);
|
||||
const field_types_ty = try gz.addExtendedPayloadSmall(
|
||||
.reify_slice_arg_ty,
|
||||
@intFromEnum(Zir.Inst.ReifySliceArgInfo.string_to_union_field_type),
|
||||
Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(params[2]), .operand = field_names },
|
||||
);
|
||||
const field_attrs_ty = try gz.addExtendedPayloadSmall(
|
||||
.reify_slice_arg_ty,
|
||||
@intFromEnum(Zir.Inst.ReifySliceArgInfo.string_to_union_field_attrs),
|
||||
Zir.Inst.UnNode{ .node = gz.nodeIndexToRelative(params[2]), .operand = field_names },
|
||||
);
|
||||
const field_types = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_types_ty } }, params[3], .union_field_types);
|
||||
const field_attrs = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_attrs_ty } }, params[4], .union_field_attrs);
|
||||
const result = try gz.addExtendedPayloadSmall(.reify_union, @intFromEnum(reify_name_strat), Zir.Inst.ReifyUnion{
|
||||
.src_line = gz.astgen.source_line,
|
||||
.node = node,
|
||||
.layout = layout,
|
||||
.arg_ty = arg_ty,
|
||||
.field_names = field_names,
|
||||
.field_types = field_types,
|
||||
.field_attrs = field_attrs,
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
.Enum => {
|
||||
const enum_mode_ty = try gz.addBuiltinValue(node, .enum_mode);
|
||||
const tag_ty = try typeExpr(gz, scope, params[0]);
|
||||
const mode = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = enum_mode_ty } }, params[1], .type);
|
||||
const field_names = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = .slice_const_slice_const_u8_type } }, params[2], .enum_field_names);
|
||||
const field_values_ty = try gz.addExtendedPayload(.reify_enum_value_slice_ty, Zir.Inst.BinNode{
|
||||
.node = gz.nodeIndexToRelative(node),
|
||||
.lhs = tag_ty,
|
||||
.rhs = field_names,
|
||||
});
|
||||
const field_values = try comptimeExpr(gz, scope, .{ .rl = .{ .coerced_ty = field_values_ty } }, params[3], .enum_field_values);
|
||||
const result = try gz.addExtendedPayloadSmall(.reify_enum, @intFromEnum(reify_name_strat), Zir.Inst.ReifyEnum{
|
||||
.src_line = gz.astgen.source_line,
|
||||
.node = node,
|
||||
.tag_ty = tag_ty,
|
||||
.mode = mode,
|
||||
.field_names = field_names,
|
||||
.field_values = field_values,
|
||||
});
|
||||
return rvalue(gz, ri, result, node);
|
||||
},
|
||||
|
||||
.panic => {
|
||||
try emitDbgNode(gz, node);
|
||||
return simpleUnOp(gz, scope, ri, node, .{ .rl = .{ .coerced_ty = .slice_const_u8_type } }, params[0], .panic);
|
||||
@ -9764,41 +9903,6 @@ fn builtinCall(
|
||||
},
|
||||
}
|
||||
}
|
||||
fn builtinReify(
|
||||
gz: *GenZir,
|
||||
scope: *Scope,
|
||||
ri: ResultInfo,
|
||||
node: Ast.Node.Index,
|
||||
arg_node: Ast.Node.Index,
|
||||
name_strat: Zir.Inst.NameStrategy,
|
||||
) InnerError!Zir.Inst.Ref {
|
||||
const astgen = gz.astgen;
|
||||
const gpa = astgen.gpa;
|
||||
|
||||
const type_info_ty = try gz.addBuiltinValue(node, .type_info);
|
||||
const operand = try expr(gz, scope, .{ .rl = .{ .coerced_ty = type_info_ty } }, arg_node);
|
||||
|
||||
try gz.instructions.ensureUnusedCapacity(gpa, 1);
|
||||
try astgen.instructions.ensureUnusedCapacity(gpa, 1);
|
||||
|
||||
const payload_index = try astgen.addExtra(Zir.Inst.Reify{
|
||||
.node = node, // Absolute node index -- see the definition of `Reify`.
|
||||
.operand = operand,
|
||||
.src_line = astgen.source_line,
|
||||
});
|
||||
const new_index: Zir.Inst.Index = @enumFromInt(astgen.instructions.len);
|
||||
astgen.instructions.appendAssumeCapacity(.{
|
||||
.tag = .extended,
|
||||
.data = .{ .extended = .{
|
||||
.opcode = .reify,
|
||||
.small = @intFromEnum(name_strat),
|
||||
.operand = payload_index,
|
||||
} },
|
||||
});
|
||||
gz.instructions.appendAssumeCapacity(new_index);
|
||||
const result = new_index.toRef();
|
||||
return rvalue(gz, ri, result, node);
|
||||
}
|
||||
|
||||
fn hasDeclOrField(
|
||||
gz: *GenZir,
|
||||
|
||||
@ -866,6 +866,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
// These builtins take no args and do not consume the result pointer.
|
||||
.src,
|
||||
.This,
|
||||
.EnumLiteral,
|
||||
.return_address,
|
||||
.error_return_trace,
|
||||
.frame,
|
||||
@ -906,7 +907,7 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
.embed_file,
|
||||
.error_name,
|
||||
.set_runtime_safety,
|
||||
.Type,
|
||||
.Tuple,
|
||||
.c_undef,
|
||||
.c_include,
|
||||
.wasm_memory_size,
|
||||
@ -1058,6 +1059,48 @@ fn builtinCall(astrl: *AstRlAnnotate, block: ?*Block, ri: ResultInfo, node: Ast.
|
||||
_ = try astrl.expr(args[3], block, ResultInfo.none);
|
||||
return false;
|
||||
},
|
||||
.Int => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
return false;
|
||||
},
|
||||
.Pointer => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[2], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[3], block, ResultInfo.type_only);
|
||||
return false;
|
||||
},
|
||||
.Fn => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[2], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[3], block, ResultInfo.type_only);
|
||||
return false;
|
||||
},
|
||||
.Struct => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[2], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[3], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[4], block, ResultInfo.type_only);
|
||||
return false;
|
||||
},
|
||||
.Union => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[2], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[3], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[4], block, ResultInfo.type_only);
|
||||
return false;
|
||||
},
|
||||
.Enum => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[2], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[3], block, ResultInfo.type_only);
|
||||
return false;
|
||||
},
|
||||
.Vector => {
|
||||
_ = try astrl.expr(args[0], block, ResultInfo.type_only);
|
||||
_ = try astrl.expr(args[1], block, ResultInfo.type_only);
|
||||
|
||||
@ -110,7 +110,14 @@ pub const Tag = enum {
|
||||
This,
|
||||
trap,
|
||||
truncate,
|
||||
Type,
|
||||
EnumLiteral,
|
||||
Int,
|
||||
Tuple,
|
||||
Pointer,
|
||||
Fn,
|
||||
Struct,
|
||||
Union,
|
||||
Enum,
|
||||
type_info,
|
||||
type_name,
|
||||
TypeOf,
|
||||
@ -937,12 +944,61 @@ pub const list = list: {
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Type",
|
||||
"@EnumLiteral",
|
||||
.{
|
||||
.tag = .Type,
|
||||
.tag = .EnumLiteral,
|
||||
.param_count = 0,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Int",
|
||||
.{
|
||||
.tag = .Int,
|
||||
.param_count = 2,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Tuple",
|
||||
.{
|
||||
.tag = .Tuple,
|
||||
.param_count = 1,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Pointer",
|
||||
.{
|
||||
.tag = .Pointer,
|
||||
.param_count = 4,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Fn",
|
||||
.{
|
||||
.tag = .Fn,
|
||||
.param_count = 4,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Struct",
|
||||
.{
|
||||
.tag = .Struct,
|
||||
.param_count = 5,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Union",
|
||||
.{
|
||||
.tag = .Union,
|
||||
.param_count = 5,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@Enum",
|
||||
.{
|
||||
.tag = .Enum,
|
||||
.param_count = 4,
|
||||
},
|
||||
},
|
||||
.{
|
||||
"@typeInfo",
|
||||
.{
|
||||
|
||||
@ -260,6 +260,10 @@ pub const Inst = struct {
|
||||
/// `[N:S]T` syntax. Source location is the array type expression node.
|
||||
/// Uses the `pl_node` union field. Payload is `ArrayTypeSentinel`.
|
||||
array_type_sentinel,
|
||||
/// `@Int` builtin.
|
||||
/// Uses the `pl_node` union field with `Bin` payload.
|
||||
/// lhs is signedness, rhs is bit count.
|
||||
reify_int,
|
||||
/// `@Vector` builtin.
|
||||
/// Uses the `pl_node` union field with `Bin` payload.
|
||||
/// lhs is length, rhs is element type.
|
||||
@ -1112,6 +1116,7 @@ pub const Inst = struct {
|
||||
.array_mul,
|
||||
.array_type,
|
||||
.array_type_sentinel,
|
||||
.reify_int,
|
||||
.vector_type,
|
||||
.elem_type,
|
||||
.indexable_ptr_elem_type,
|
||||
@ -1409,6 +1414,7 @@ pub const Inst = struct {
|
||||
.array_mul,
|
||||
.array_type,
|
||||
.array_type_sentinel,
|
||||
.reify_int,
|
||||
.vector_type,
|
||||
.elem_type,
|
||||
.indexable_ptr_elem_type,
|
||||
@ -1644,6 +1650,7 @@ pub const Inst = struct {
|
||||
.array_mul = .pl_node,
|
||||
.array_type = .pl_node,
|
||||
.array_type_sentinel = .pl_node,
|
||||
.reify_int = .pl_node,
|
||||
.vector_type = .pl_node,
|
||||
.elem_type = .un_node,
|
||||
.indexable_ptr_elem_type = .un_node,
|
||||
@ -2035,10 +2042,43 @@ pub const Inst = struct {
|
||||
/// Implement builtin `@errorFromInt`.
|
||||
/// `operand` is payload index to `UnNode`.
|
||||
error_from_int,
|
||||
/// Implement builtin `@Type`.
|
||||
/// `operand` is payload index to `Reify`.
|
||||
/// Given a comptime-known operand of type `[]const A`, returns the type `*const [operand.len]B`.
|
||||
/// The types `A` and `B` are determined from `ReifySliceArgInfo`.
|
||||
/// This instruction is used to provide result types to arguments of `@Fn`, `@Struct`, etc.
|
||||
/// `operand` is payload index to `UnNode`.
|
||||
/// `small` is a bitcast `ReifySliceArgInfo`.
|
||||
reify_slice_arg_ty,
|
||||
/// Like `reify_slice_arg_ty` for the specific case of `[]const []const u8` to `[]const TagInt`,
|
||||
/// as needed for `@Enum`.
|
||||
/// `operand` is payload index to `BinNode`. lhs is the type `TagInt`. rhs is the `[]const []const u8` value.
|
||||
/// `small` is unused.
|
||||
reify_enum_value_slice_ty,
|
||||
/// Given a comptime-known operand of type `type`, returns the type `?operand` if possible, otherwise `?noreturn`.
|
||||
/// Used for the final arg of `@Pointer` to allow reifying pointers to opaque types.
|
||||
/// `operand` is payload index to `UnNode`.
|
||||
/// `small` is unused.
|
||||
reify_pointer_sentinel_ty,
|
||||
/// Implements builtin `@Tuple`.
|
||||
/// `operand` is payload index to `UnNode`.
|
||||
reify_tuple,
|
||||
/// Implements builtin `@Pointer`.
|
||||
/// `operand` is payload index to `ReifyPointer`.
|
||||
reify_pointer,
|
||||
/// Implements builtin `@Fn`.
|
||||
/// `operand` is payload index to `ReifyFn`.
|
||||
reify_fn,
|
||||
/// Implements builtin `@Struct`.
|
||||
/// `operand` is payload index to `ReifyStruct`.
|
||||
/// `small` contains `NameStrategy`.
|
||||
reify,
|
||||
reify_struct,
|
||||
/// Implements builtin `@Union`.
|
||||
/// `operand` is payload index to `ReifyUnion`.
|
||||
/// `small` contains `NameStrategy`.
|
||||
reify_union,
|
||||
/// Implements builtin `@Enum`.
|
||||
/// `operand` is payload index to `ReifyEnum`.
|
||||
/// `small` contains `NameStrategy`.
|
||||
reify_enum,
|
||||
/// Implements the `@cmpxchgStrong` and `@cmpxchgWeak` builtins.
|
||||
/// `small` 0=>weak 1=>strong
|
||||
/// `operand` is payload index to `Cmpxchg`.
|
||||
@ -2226,6 +2266,11 @@ pub const Inst = struct {
|
||||
manyptr_const_u8_sentinel_0_type,
|
||||
slice_const_u8_type,
|
||||
slice_const_u8_sentinel_0_type,
|
||||
manyptr_const_slice_const_u8_type,
|
||||
slice_const_slice_const_u8_type,
|
||||
optional_type_type,
|
||||
manyptr_const_type_type,
|
||||
slice_const_type_type,
|
||||
vector_8_i8_type,
|
||||
vector_16_i8_type,
|
||||
vector_32_i8_type,
|
||||
@ -3169,6 +3214,23 @@ pub const Inst = struct {
|
||||
rhs: Ref,
|
||||
};
|
||||
|
||||
pub const ReifySliceArgInfo = enum(u16) {
|
||||
/// Input element type is `type`.
|
||||
/// Output element type is `std.builtin.Type.Fn.Param.Attributes`.
|
||||
type_to_fn_param_attrs,
|
||||
/// Input element type is `[]const u8`.
|
||||
/// Output element type is `type`.
|
||||
string_to_struct_field_type,
|
||||
/// Identical to `string_to_struct_field_type` aside from emitting slightly different error messages.
|
||||
string_to_union_field_type,
|
||||
/// Input element type is `[]const u8`.
|
||||
/// Output element type is `std.builtin.Type.StructField.Attributes`.
|
||||
string_to_struct_field_attrs,
|
||||
/// Input element type is `[]const u8`.
|
||||
/// Output element type is `std.builtin.Type.UnionField.Attributes`.
|
||||
string_to_union_field_attrs,
|
||||
};
|
||||
|
||||
pub const UnNode = struct {
|
||||
node: Ast.Node.Offset,
|
||||
operand: Ref,
|
||||
@ -3179,12 +3241,55 @@ pub const Inst = struct {
|
||||
index: u32,
|
||||
};
|
||||
|
||||
pub const Reify = struct {
|
||||
pub const ReifyPointer = struct {
|
||||
node: Ast.Node.Offset,
|
||||
size: Ref,
|
||||
attrs: Ref,
|
||||
elem_ty: Ref,
|
||||
sentinel: Ref,
|
||||
};
|
||||
|
||||
pub const ReifyFn = struct {
|
||||
node: Ast.Node.Offset,
|
||||
param_types: Ref,
|
||||
param_attrs: Ref,
|
||||
ret_ty: Ref,
|
||||
fn_attrs: Ref,
|
||||
};
|
||||
|
||||
pub const ReifyStruct = struct {
|
||||
src_line: u32,
|
||||
/// This node is absolute, because `reify` instructions are tracked across updates, and
|
||||
/// this simplifies the logic for getting source locations for types.
|
||||
node: Ast.Node.Index,
|
||||
operand: Ref,
|
||||
layout: Ref,
|
||||
backing_ty: Ref,
|
||||
field_names: Ref,
|
||||
field_types: Ref,
|
||||
field_attrs: Ref,
|
||||
};
|
||||
|
||||
pub const ReifyUnion = struct {
|
||||
src_line: u32,
|
||||
/// This node is absolute, because `reify` instructions are tracked across updates, and
|
||||
/// this simplifies the logic for getting source locations for types.
|
||||
node: Ast.Node.Index,
|
||||
layout: Ref,
|
||||
arg_ty: Ref,
|
||||
field_names: Ref,
|
||||
field_types: Ref,
|
||||
field_attrs: Ref,
|
||||
};
|
||||
|
||||
pub const ReifyEnum = struct {
|
||||
src_line: u32,
|
||||
/// This node is absolute, because `reify` instructions are tracked across updates, and
|
||||
/// this simplifies the logic for getting source locations for types.
|
||||
node: Ast.Node.Index,
|
||||
tag_ty: Ref,
|
||||
mode: Ref,
|
||||
field_names: Ref,
|
||||
field_values: Ref,
|
||||
};
|
||||
|
||||
/// Trailing:
|
||||
@ -3496,14 +3601,19 @@ pub const Inst = struct {
|
||||
calling_convention,
|
||||
address_space,
|
||||
float_mode,
|
||||
signedness,
|
||||
reduce_op,
|
||||
call_modifier,
|
||||
prefetch_options,
|
||||
export_options,
|
||||
extern_options,
|
||||
type_info,
|
||||
branch_hint,
|
||||
clobbers,
|
||||
pointer_size,
|
||||
pointer_attributes,
|
||||
fn_attributes,
|
||||
container_layout,
|
||||
enum_mode,
|
||||
// Values
|
||||
calling_convention_c,
|
||||
calling_convention_inline,
|
||||
@ -4190,6 +4300,7 @@ fn findTrackableInner(
|
||||
.array_mul,
|
||||
.array_type,
|
||||
.array_type_sentinel,
|
||||
.reify_int,
|
||||
.vector_type,
|
||||
.elem_type,
|
||||
.indexable_ptr_elem_type,
|
||||
@ -4432,6 +4543,12 @@ fn findTrackableInner(
|
||||
.select,
|
||||
.int_from_error,
|
||||
.error_from_int,
|
||||
.reify_slice_arg_ty,
|
||||
.reify_enum_value_slice_ty,
|
||||
.reify_pointer_sentinel_ty,
|
||||
.reify_tuple,
|
||||
.reify_pointer,
|
||||
.reify_fn,
|
||||
.cmpxchg,
|
||||
.c_va_arg,
|
||||
.c_va_copy,
|
||||
@ -4463,7 +4580,11 @@ fn findTrackableInner(
|
||||
},
|
||||
|
||||
// Reifications and opaque declarations need tracking, but have no body.
|
||||
.reify, .opaque_decl => return contents.other.append(gpa, inst),
|
||||
.reify_enum,
|
||||
.reify_struct,
|
||||
.reify_union,
|
||||
.opaque_decl,
|
||||
=> return contents.other.append(gpa, inst),
|
||||
|
||||
// Struct declarations need tracking and have bodies.
|
||||
.struct_decl => {
|
||||
@ -5246,7 +5367,9 @@ pub fn assertTrackable(zir: Zir, inst_idx: Zir.Inst.Index) void {
|
||||
.union_decl,
|
||||
.enum_decl,
|
||||
.opaque_decl,
|
||||
.reify,
|
||||
.reify_enum,
|
||||
.reify_struct,
|
||||
.reify_union,
|
||||
=> {}, // tracked in order, as the owner instructions of explicit container types
|
||||
else => unreachable, // assertion failure; not trackable
|
||||
},
|
||||
|
||||
@ -81,23 +81,15 @@ fn ToUnsigned(comptime T: type) type {
|
||||
}
|
||||
|
||||
/// Constructs a [*c] pointer with the const and volatile annotations
|
||||
/// from SelfType for pointing to a C flexible array of ElementType.
|
||||
pub fn FlexibleArrayType(comptime SelfType: type, comptime ElementType: type) type {
|
||||
switch (@typeInfo(SelfType)) {
|
||||
.pointer => |ptr| {
|
||||
return @Type(.{ .pointer = .{
|
||||
.size = .c,
|
||||
.is_const = ptr.is_const,
|
||||
.is_volatile = ptr.is_volatile,
|
||||
.alignment = @alignOf(ElementType),
|
||||
.address_space = .generic,
|
||||
.child = ElementType,
|
||||
.is_allowzero = true,
|
||||
.sentinel_ptr = null,
|
||||
} });
|
||||
},
|
||||
else => |info| @compileError("Invalid self type \"" ++ @tagName(info) ++ "\" for flexible array getter: " ++ @typeName(SelfType)),
|
||||
}
|
||||
/// from Self for pointing to a C flexible array of Element.
|
||||
pub fn FlexibleArrayType(comptime Self: type, comptime Element: type) type {
|
||||
return switch (@typeInfo(Self)) {
|
||||
.pointer => |ptr| @Pointer(.c, .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
}, Element, null),
|
||||
else => |info| @compileError("Invalid self type \"" ++ @tagName(info) ++ "\" for flexible array getter: " ++ @typeName(Self)),
|
||||
};
|
||||
}
|
||||
|
||||
/// Promote the type of an integer literal until it fits as C would.
|
||||
@ -219,7 +211,7 @@ fn castInt(comptime DestType: type, target: anytype) DestType {
|
||||
const dest = @typeInfo(DestType).int;
|
||||
const source = @typeInfo(@TypeOf(target)).int;
|
||||
|
||||
const Int = @Type(.{ .int = .{ .bits = dest.bits, .signedness = source.signedness } });
|
||||
const Int = @Int(source.signedness, dest.bits);
|
||||
|
||||
if (dest.bits < source.bits)
|
||||
return @as(DestType, @bitCast(@as(Int, @truncate(target))))
|
||||
|
||||
@ -8614,39 +8614,18 @@ pub const Metadata = packed struct(u32) {
|
||||
nodes: anytype,
|
||||
w: *Writer,
|
||||
) !void {
|
||||
comptime var fmt_str: []const u8 = "";
|
||||
const names = comptime std.meta.fieldNames(@TypeOf(nodes));
|
||||
comptime var fields: [2 + names.len]std.builtin.Type.StructField = undefined;
|
||||
inline for (fields[0..2], .{ "distinct", "node" }) |*field, name| {
|
||||
fmt_str = fmt_str ++ "{[" ++ name ++ "]s}";
|
||||
field.* = .{
|
||||
.name = name,
|
||||
.type = []const u8,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf([]const u8),
|
||||
};
|
||||
}
|
||||
fmt_str = fmt_str ++ "(";
|
||||
inline for (fields[2..], names) |*field, name| {
|
||||
fmt_str = fmt_str ++ "{[" ++ name ++ "]f}";
|
||||
const T = std.fmt.Alt(FormatData, format);
|
||||
field.* = .{
|
||||
.name = name,
|
||||
.type = T,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(T),
|
||||
};
|
||||
}
|
||||
|
||||
comptime var fmt_str: []const u8 = "{[distinct]s}{[node]s}(";
|
||||
inline for (names) |name| fmt_str = fmt_str ++ "{[" ++ name ++ "]f}";
|
||||
fmt_str = fmt_str ++ ")\n";
|
||||
|
||||
var fmt_args: @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} }) = undefined;
|
||||
const field_names = @as([]const []const u8, &.{ "distinct", "node" }) ++ names;
|
||||
comptime var field_types: [2 + names.len]type = undefined;
|
||||
@memset(field_types[0..2], []const u8);
|
||||
@memset(field_types[2..], std.fmt.Alt(FormatData, format));
|
||||
|
||||
var fmt_args: @Struct(.auto, null, field_names, &field_types, &@splat(.{})) = undefined;
|
||||
fmt_args.distinct = @tagName(distinct);
|
||||
fmt_args.node = @tagName(node);
|
||||
inline for (names) |name| @field(fmt_args, name) = try formatter.fmt(
|
||||
|
||||
@ -1062,6 +1062,11 @@ pub const Inst = struct {
|
||||
manyptr_const_u8_sentinel_0_type = @intFromEnum(InternPool.Index.manyptr_const_u8_sentinel_0_type),
|
||||
slice_const_u8_type = @intFromEnum(InternPool.Index.slice_const_u8_type),
|
||||
slice_const_u8_sentinel_0_type = @intFromEnum(InternPool.Index.slice_const_u8_sentinel_0_type),
|
||||
manyptr_const_slice_const_u8_type = @intFromEnum(InternPool.Index.manyptr_const_slice_const_u8_type),
|
||||
slice_const_slice_const_u8_type = @intFromEnum(InternPool.Index.slice_const_slice_const_u8_type),
|
||||
optional_type_type = @intFromEnum(InternPool.Index.optional_type_type),
|
||||
manyptr_const_type_type = @intFromEnum(InternPool.Index.manyptr_const_type_type),
|
||||
slice_const_type_type = @intFromEnum(InternPool.Index.slice_const_type_type),
|
||||
vector_8_i8_type = @intFromEnum(InternPool.Index.vector_8_i8_type),
|
||||
vector_16_i8_type = @intFromEnum(InternPool.Index.vector_16_i8_type),
|
||||
vector_32_i8_type = @intFromEnum(InternPool.Index.vector_32_i8_type),
|
||||
|
||||
@ -1153,23 +1153,17 @@ const Local = struct {
|
||||
fn PtrArrayElem(comptime len: usize) type {
|
||||
const elem_info = @typeInfo(Elem).@"struct";
|
||||
const elem_fields = elem_info.fields;
|
||||
var new_fields: [elem_fields.len]std.builtin.Type.StructField = undefined;
|
||||
for (&new_fields, elem_fields) |*new_field, elem_field| {
|
||||
const T = *[len]elem_field.type;
|
||||
new_field.* = .{
|
||||
.name = elem_field.name,
|
||||
.type = T,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(T),
|
||||
};
|
||||
var new_names: [elem_fields.len][]const u8 = undefined;
|
||||
var new_types: [elem_fields.len]type = undefined;
|
||||
for (elem_fields, &new_names, &new_types) |elem_field, *new_name, *NewType| {
|
||||
new_name.* = elem_field.name;
|
||||
NewType.* = *[len]elem_field.type;
|
||||
}
|
||||
if (elem_info.is_tuple) {
|
||||
return @Tuple(&new_types);
|
||||
} else {
|
||||
return @Struct(.auto, null, &new_names, &new_types, &@splat(.{}));
|
||||
}
|
||||
return @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &new_fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = elem_info.is_tuple,
|
||||
} });
|
||||
}
|
||||
fn PtrElem(comptime opts: struct {
|
||||
size: std.builtin.Type.Pointer.Size,
|
||||
@ -1177,32 +1171,17 @@ const Local = struct {
|
||||
}) type {
|
||||
const elem_info = @typeInfo(Elem).@"struct";
|
||||
const elem_fields = elem_info.fields;
|
||||
var new_fields: [elem_fields.len]std.builtin.Type.StructField = undefined;
|
||||
for (&new_fields, elem_fields) |*new_field, elem_field| {
|
||||
const T = @Type(.{ .pointer = .{
|
||||
.size = opts.size,
|
||||
.is_const = opts.is_const,
|
||||
.is_volatile = false,
|
||||
.alignment = @alignOf(elem_field.type),
|
||||
.address_space = .generic,
|
||||
.child = elem_field.type,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = null,
|
||||
} });
|
||||
new_field.* = .{
|
||||
.name = elem_field.name,
|
||||
.type = T,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(T),
|
||||
};
|
||||
var new_names: [elem_fields.len][]const u8 = undefined;
|
||||
var new_types: [elem_fields.len]type = undefined;
|
||||
for (elem_fields, &new_names, &new_types) |elem_field, *new_name, *NewType| {
|
||||
new_name.* = elem_field.name;
|
||||
NewType.* = @Pointer(opts.size, .{ .@"const" = opts.is_const }, elem_field.type, null);
|
||||
}
|
||||
if (elem_info.is_tuple) {
|
||||
return @Tuple(&new_types);
|
||||
} else {
|
||||
return @Struct(.auto, null, &new_names, &new_types, &@splat(.{}));
|
||||
}
|
||||
return @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &new_fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = elem_info.is_tuple,
|
||||
} });
|
||||
}
|
||||
|
||||
pub fn addOne(mutable: Mutable) Allocator.Error!PtrElem(.{ .size = .one }) {
|
||||
@ -2017,8 +1996,7 @@ pub const Key = union(enum) {
|
||||
error_union_type: ErrorUnionType,
|
||||
simple_type: SimpleType,
|
||||
/// This represents a struct that has been explicitly declared in source code,
|
||||
/// or was created with `@Type`. It is unique and based on a declaration.
|
||||
/// It may be a tuple, if declared like this: `struct {A, B, C}`.
|
||||
/// or was created with `@Struct`. It is unique and based on a declaration.
|
||||
struct_type: NamespaceType,
|
||||
/// This is a tuple type. Tuples are logically similar to structs, but have some
|
||||
/// important differences in semantics; they do not undergo staged type resolution,
|
||||
@ -2175,7 +2153,7 @@ pub const Key = union(enum) {
|
||||
/// The union for which this is a tag type.
|
||||
union_type: Index,
|
||||
},
|
||||
/// This type originates from a reification via `@Type`, or from an anonymous initialization.
|
||||
/// This type originates from a reification via `@Enum`, `@Struct`, `@Union` or from an anonymous initialization.
|
||||
/// It is hashed based on its ZIR instruction index and fields, attributes, etc.
|
||||
/// To avoid making this key overly complex, the type-specific data is hashed by Sema.
|
||||
reified: struct {
|
||||
@ -4641,6 +4619,13 @@ pub const Index = enum(u32) {
|
||||
slice_const_u8_type,
|
||||
slice_const_u8_sentinel_0_type,
|
||||
|
||||
manyptr_const_slice_const_u8_type,
|
||||
slice_const_slice_const_u8_type,
|
||||
|
||||
optional_type_type,
|
||||
manyptr_const_type_type,
|
||||
slice_const_type_type,
|
||||
|
||||
vector_8_i8_type,
|
||||
vector_16_i8_type,
|
||||
vector_32_i8_type,
|
||||
@ -5201,6 +5186,45 @@ pub const static_keys: [static_len]Key = .{
|
||||
},
|
||||
} },
|
||||
|
||||
// [*]const []const u8
|
||||
.{ .ptr_type = .{
|
||||
.child = .slice_const_u8_type,
|
||||
.flags = .{
|
||||
.size = .many,
|
||||
.is_const = true,
|
||||
},
|
||||
} },
|
||||
|
||||
// []const []const u8
|
||||
.{ .ptr_type = .{
|
||||
.child = .slice_const_u8_type,
|
||||
.flags = .{
|
||||
.size = .slice,
|
||||
.is_const = true,
|
||||
},
|
||||
} },
|
||||
|
||||
// ?type
|
||||
.{ .opt_type = .type_type },
|
||||
|
||||
// [*]const type
|
||||
.{ .ptr_type = .{
|
||||
.child = .type_type,
|
||||
.flags = .{
|
||||
.size = .many,
|
||||
.is_const = true,
|
||||
},
|
||||
} },
|
||||
|
||||
// []const type
|
||||
.{ .ptr_type = .{
|
||||
.child = .type_type,
|
||||
.flags = .{
|
||||
.size = .slice,
|
||||
.is_const = true,
|
||||
},
|
||||
} },
|
||||
|
||||
// @Vector(8, i8)
|
||||
.{ .vector_type = .{ .len = 8, .child = .i8_type } },
|
||||
// @Vector(16, i8)
|
||||
@ -10225,16 +10249,8 @@ pub fn getGeneratedTagEnumType(
|
||||
}
|
||||
|
||||
pub const OpaqueTypeInit = struct {
|
||||
key: union(enum) {
|
||||
declared: struct {
|
||||
zir_index: TrackedInst.Index,
|
||||
captures: []const CaptureValue,
|
||||
},
|
||||
reified: struct {
|
||||
zir_index: TrackedInst.Index,
|
||||
// No type hash since reifid opaques have no data other than the `@Type` location
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
pub fn getOpaqueType(
|
||||
@ -10243,16 +10259,10 @@ pub fn getOpaqueType(
|
||||
tid: Zcu.PerThread.Id,
|
||||
ini: OpaqueTypeInit,
|
||||
) Allocator.Error!WipNamespaceType.Result {
|
||||
var gop = try ip.getOrPutKey(gpa, tid, .{ .opaque_type = switch (ini.key) {
|
||||
.declared => |d| .{ .declared = .{
|
||||
.zir_index = d.zir_index,
|
||||
.captures = .{ .external = d.captures },
|
||||
} },
|
||||
.reified => |r| .{ .reified = .{
|
||||
.zir_index = r.zir_index,
|
||||
.type_hash = 0,
|
||||
} },
|
||||
} });
|
||||
var gop = try ip.getOrPutKey(gpa, tid, .{ .opaque_type = .{ .declared = .{
|
||||
.zir_index = ini.zir_index,
|
||||
.captures = .{ .external = ini.captures },
|
||||
} } });
|
||||
defer gop.deinit();
|
||||
if (gop == .existing) return .{ .existing = gop.existing };
|
||||
|
||||
@ -10261,30 +10271,19 @@ pub fn getOpaqueType(
|
||||
const extra = local.getMutableExtra(gpa);
|
||||
try items.ensureUnusedCapacity(1);
|
||||
|
||||
try extra.ensureUnusedCapacity(@typeInfo(Tag.TypeOpaque).@"struct".fields.len + switch (ini.key) {
|
||||
.declared => |d| d.captures.len,
|
||||
.reified => 0,
|
||||
});
|
||||
try extra.ensureUnusedCapacity(@typeInfo(Tag.TypeOpaque).@"struct".fields.len + ini.captures.len);
|
||||
const extra_index = addExtraAssumeCapacity(extra, Tag.TypeOpaque{
|
||||
.name = undefined, // set by `finish`
|
||||
.name_nav = undefined, // set by `finish`
|
||||
.namespace = undefined, // set by `finish`
|
||||
.zir_index = switch (ini.key) {
|
||||
inline else => |x| x.zir_index,
|
||||
},
|
||||
.captures_len = switch (ini.key) {
|
||||
.declared => |d| @intCast(d.captures.len),
|
||||
.reified => std.math.maxInt(u32),
|
||||
},
|
||||
.zir_index = ini.zir_index,
|
||||
.captures_len = @intCast(ini.captures.len),
|
||||
});
|
||||
items.appendAssumeCapacity(.{
|
||||
.tag = .type_opaque,
|
||||
.data = extra_index,
|
||||
});
|
||||
switch (ini.key) {
|
||||
.declared => |d| extra.appendSliceAssumeCapacity(.{@ptrCast(d.captures)}),
|
||||
.reified => {},
|
||||
}
|
||||
extra.appendSliceAssumeCapacity(.{@ptrCast(ini.captures)});
|
||||
return .{
|
||||
.wip = .{
|
||||
.tid = tid,
|
||||
@ -10555,6 +10554,8 @@ pub fn slicePtrType(ip: *const InternPool, index: Index) Index {
|
||||
switch (index) {
|
||||
.slice_const_u8_type => return .manyptr_const_u8_type,
|
||||
.slice_const_u8_sentinel_0_type => return .manyptr_const_u8_sentinel_0_type,
|
||||
.slice_const_slice_const_u8_type => return .manyptr_const_slice_const_u8_type,
|
||||
.slice_const_type_type => return .manyptr_const_type_type,
|
||||
else => {},
|
||||
}
|
||||
const item = index.unwrap(ip).getItem(ip);
|
||||
@ -12013,8 +12014,13 @@ pub fn typeOf(ip: *const InternPool, index: Index) Index {
|
||||
.manyptr_u8_type,
|
||||
.manyptr_const_u8_type,
|
||||
.manyptr_const_u8_sentinel_0_type,
|
||||
.manyptr_const_slice_const_u8_type,
|
||||
.slice_const_u8_type,
|
||||
.slice_const_u8_sentinel_0_type,
|
||||
.slice_const_slice_const_u8_type,
|
||||
.optional_type_type,
|
||||
.manyptr_const_type_type,
|
||||
.slice_const_type_type,
|
||||
.vector_8_i8_type,
|
||||
.vector_16_i8_type,
|
||||
.vector_32_i8_type,
|
||||
@ -12355,8 +12361,12 @@ pub fn zigTypeTag(ip: *const InternPool, index: Index) std.builtin.TypeId {
|
||||
.manyptr_u8_type,
|
||||
.manyptr_const_u8_type,
|
||||
.manyptr_const_u8_sentinel_0_type,
|
||||
.manyptr_const_slice_const_u8_type,
|
||||
.slice_const_u8_type,
|
||||
.slice_const_u8_sentinel_0_type,
|
||||
.slice_const_slice_const_u8_type,
|
||||
.manyptr_const_type_type,
|
||||
.slice_const_type_type,
|
||||
=> .pointer,
|
||||
|
||||
.vector_8_i8_type,
|
||||
@ -12408,6 +12418,7 @@ pub fn zigTypeTag(ip: *const InternPool, index: Index) std.builtin.TypeId {
|
||||
.vector_8_f64_type,
|
||||
=> .vector,
|
||||
|
||||
.optional_type_type => .optional,
|
||||
.optional_noreturn_type => .optional,
|
||||
.anyerror_void_error_union_type => .error_union,
|
||||
.empty_tuple_type => .@"struct",
|
||||
|
||||
2505
src/Sema.zig
2505
src/Sema.zig
File diff suppressed because it is too large
Load Diff
10
src/Type.zig
10
src/Type.zig
@ -317,7 +317,7 @@ pub fn print(ty: Type, writer: *std.Io.Writer, pt: Zcu.PerThread, ctx: ?*Compari
|
||||
.undefined,
|
||||
=> try writer.print("@TypeOf({s})", .{@tagName(s)}),
|
||||
|
||||
.enum_literal => try writer.writeAll("@Type(.enum_literal)"),
|
||||
.enum_literal => try writer.writeAll("@EnumLiteral()"),
|
||||
|
||||
.generic_poison => unreachable,
|
||||
},
|
||||
@ -3509,7 +3509,9 @@ pub fn typeDeclSrcLine(ty: Type, zcu: *Zcu) ?u32 {
|
||||
.union_decl => zir.extraData(Zir.Inst.UnionDecl, inst.data.extended.operand).data.src_line,
|
||||
.enum_decl => zir.extraData(Zir.Inst.EnumDecl, inst.data.extended.operand).data.src_line,
|
||||
.opaque_decl => zir.extraData(Zir.Inst.OpaqueDecl, inst.data.extended.operand).data.src_line,
|
||||
.reify => zir.extraData(Zir.Inst.Reify, inst.data.extended.operand).data.src_line,
|
||||
.reify_enum => zir.extraData(Zir.Inst.ReifyEnum, inst.data.extended.operand).data.src_line,
|
||||
.reify_struct => zir.extraData(Zir.Inst.ReifyStruct, inst.data.extended.operand).data.src_line,
|
||||
.reify_union => zir.extraData(Zir.Inst.ReifyUnion, inst.data.extended.operand).data.src_line,
|
||||
else => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
@ -4280,6 +4282,10 @@ pub const manyptr_const_u8: Type = .{ .ip_index = .manyptr_const_u8_type };
|
||||
pub const manyptr_const_u8_sentinel_0: Type = .{ .ip_index = .manyptr_const_u8_sentinel_0_type };
|
||||
pub const slice_const_u8: Type = .{ .ip_index = .slice_const_u8_type };
|
||||
pub const slice_const_u8_sentinel_0: Type = .{ .ip_index = .slice_const_u8_sentinel_0_type };
|
||||
pub const slice_const_slice_const_u8: Type = .{ .ip_index = .slice_const_slice_const_u8_type };
|
||||
pub const slice_const_type: Type = .{ .ip_index = .slice_const_type_type };
|
||||
pub const optional_type: Type = .{ .ip_index = .optional_type_type };
|
||||
pub const optional_noreturn: Type = .{ .ip_index = .optional_noreturn_type };
|
||||
|
||||
pub const vector_8_i8: Type = .{ .ip_index = .vector_8_i8_type };
|
||||
pub const vector_16_i8: Type = .{ .ip_index = .vector_16_i8_type };
|
||||
|
||||
@ -2824,6 +2824,29 @@ pub fn resolveLazy(
|
||||
.val = resolved_val,
|
||||
}));
|
||||
},
|
||||
.error_union => |eu| switch (eu.val) {
|
||||
.err_name => return val,
|
||||
.payload => |payload| {
|
||||
const resolved_payload = try Value.fromInterned(payload).resolveLazy(arena, pt);
|
||||
if (resolved_payload.toIntern() == payload) return val;
|
||||
return .fromInterned(try pt.intern(.{ .error_union = .{
|
||||
.ty = eu.ty,
|
||||
.val = .{ .payload = resolved_payload.toIntern() },
|
||||
} }));
|
||||
},
|
||||
},
|
||||
.opt => |opt| switch (opt.val) {
|
||||
.none => return val,
|
||||
else => |payload| {
|
||||
const resolved_payload = try Value.fromInterned(payload).resolveLazy(arena, pt);
|
||||
if (resolved_payload.toIntern() == payload) return val;
|
||||
return .fromInterned(try pt.intern(.{ .opt = .{
|
||||
.ty = opt.ty,
|
||||
.val = resolved_payload.toIntern(),
|
||||
} }));
|
||||
},
|
||||
},
|
||||
|
||||
else => return val,
|
||||
}
|
||||
}
|
||||
|
||||
24
src/Zcu.zig
24
src/Zcu.zig
@ -416,10 +416,13 @@ pub const BuiltinDecl = enum {
|
||||
Type,
|
||||
@"Type.Fn",
|
||||
@"Type.Fn.Param",
|
||||
@"Type.Fn.Param.Attributes",
|
||||
@"Type.Fn.Attributes",
|
||||
@"Type.Int",
|
||||
@"Type.Float",
|
||||
@"Type.Pointer",
|
||||
@"Type.Pointer.Size",
|
||||
@"Type.Pointer.Attributes",
|
||||
@"Type.Array",
|
||||
@"Type.Vector",
|
||||
@"Type.Optional",
|
||||
@ -427,10 +430,13 @@ pub const BuiltinDecl = enum {
|
||||
@"Type.ErrorUnion",
|
||||
@"Type.EnumField",
|
||||
@"Type.Enum",
|
||||
@"Type.Enum.Mode",
|
||||
@"Type.Union",
|
||||
@"Type.UnionField",
|
||||
@"Type.UnionField.Attributes",
|
||||
@"Type.Struct",
|
||||
@"Type.StructField",
|
||||
@"Type.StructField.Attributes",
|
||||
@"Type.ContainerLayout",
|
||||
@"Type.Opaque",
|
||||
@"Type.Declaration",
|
||||
@ -495,10 +501,13 @@ pub const BuiltinDecl = enum {
|
||||
.Type,
|
||||
.@"Type.Fn",
|
||||
.@"Type.Fn.Param",
|
||||
.@"Type.Fn.Param.Attributes",
|
||||
.@"Type.Fn.Attributes",
|
||||
.@"Type.Int",
|
||||
.@"Type.Float",
|
||||
.@"Type.Pointer",
|
||||
.@"Type.Pointer.Size",
|
||||
.@"Type.Pointer.Attributes",
|
||||
.@"Type.Array",
|
||||
.@"Type.Vector",
|
||||
.@"Type.Optional",
|
||||
@ -506,10 +515,13 @@ pub const BuiltinDecl = enum {
|
||||
.@"Type.ErrorUnion",
|
||||
.@"Type.EnumField",
|
||||
.@"Type.Enum",
|
||||
.@"Type.Enum.Mode",
|
||||
.@"Type.Union",
|
||||
.@"Type.UnionField",
|
||||
.@"Type.UnionField.Attributes",
|
||||
.@"Type.Struct",
|
||||
.@"Type.StructField",
|
||||
.@"Type.StructField.Attributes",
|
||||
.@"Type.ContainerLayout",
|
||||
.@"Type.Opaque",
|
||||
.@"Type.Declaration",
|
||||
@ -1745,28 +1757,28 @@ pub const SrcLoc = struct {
|
||||
const node = node_off.toAbsolute(src_loc.base_node);
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const full = tree.fullFnProto(&buf, node).?;
|
||||
return tree.nodeToSpan(full.ast.align_expr.unwrap().?);
|
||||
return tree.nodeToSpan(full.ast.align_expr.unwrap() orelse node);
|
||||
},
|
||||
.node_offset_fn_type_addrspace => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(zcu);
|
||||
const node = node_off.toAbsolute(src_loc.base_node);
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const full = tree.fullFnProto(&buf, node).?;
|
||||
return tree.nodeToSpan(full.ast.addrspace_expr.unwrap().?);
|
||||
return tree.nodeToSpan(full.ast.addrspace_expr.unwrap() orelse node);
|
||||
},
|
||||
.node_offset_fn_type_section => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(zcu);
|
||||
const node = node_off.toAbsolute(src_loc.base_node);
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const full = tree.fullFnProto(&buf, node).?;
|
||||
return tree.nodeToSpan(full.ast.section_expr.unwrap().?);
|
||||
return tree.nodeToSpan(full.ast.section_expr.unwrap() orelse node);
|
||||
},
|
||||
.node_offset_fn_type_cc => |node_off| {
|
||||
const tree = try src_loc.file_scope.getTree(zcu);
|
||||
const node = node_off.toAbsolute(src_loc.base_node);
|
||||
var buf: [1]Ast.Node.Index = undefined;
|
||||
const full = tree.fullFnProto(&buf, node).?;
|
||||
return tree.nodeToSpan(full.ast.callconv_expr.unwrap().?);
|
||||
return tree.nodeToSpan(full.ast.callconv_expr.unwrap() orelse node);
|
||||
},
|
||||
|
||||
.node_offset_fn_type_ret_ty => |node_off| {
|
||||
@ -2684,7 +2696,9 @@ pub const LazySrcLoc = struct {
|
||||
.union_decl => zir.extraData(Zir.Inst.UnionDecl, inst.data.extended.operand).data.src_node,
|
||||
.enum_decl => zir.extraData(Zir.Inst.EnumDecl, inst.data.extended.operand).data.src_node,
|
||||
.opaque_decl => zir.extraData(Zir.Inst.OpaqueDecl, inst.data.extended.operand).data.src_node,
|
||||
.reify => zir.extraData(Zir.Inst.Reify, inst.data.extended.operand).data.node,
|
||||
.reify_enum => zir.extraData(Zir.Inst.ReifyEnum, inst.data.extended.operand).data.node,
|
||||
.reify_struct => zir.extraData(Zir.Inst.ReifyStruct, inst.data.extended.operand).data.node,
|
||||
.reify_union => zir.extraData(Zir.Inst.ReifyUnion, inst.data.extended.operand).data.node,
|
||||
else => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
|
||||
@ -120,23 +120,13 @@ const matchers = matchers: {
|
||||
);
|
||||
var symbols: Symbols: {
|
||||
const symbols = @typeInfo(@TypeOf(instruction.symbols)).@"struct".fields;
|
||||
var symbol_fields: [symbols.len]std.builtin.Type.StructField = undefined;
|
||||
for (&symbol_fields, symbols) |*symbol_field, symbol| {
|
||||
const Storage = zonCast(SymbolSpec, @field(instruction.symbols, symbol.name), .{}).Storage();
|
||||
symbol_field.* = .{
|
||||
.name = symbol.name,
|
||||
.type = Storage,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(Storage),
|
||||
};
|
||||
var field_names: [symbols.len][]const u8 = undefined;
|
||||
var field_types: [symbols.len]type = undefined;
|
||||
for (symbols, &field_names, &field_types) |symbol, *field_name, *FieldType| {
|
||||
field_name.* = symbol.name;
|
||||
FieldType.* = zonCast(SymbolSpec, @field(instruction.symbols, symbol.name), .{}).Storage();
|
||||
}
|
||||
break :Symbols @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &symbol_fields,
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
break :Symbols @Struct(.auto, null, &field_names, &field_types, &@splat(.{}));
|
||||
} = undefined;
|
||||
const Symbol = std.meta.FieldEnum(@TypeOf(instruction.symbols));
|
||||
comptime var unused_symbols: std.enums.EnumSet(Symbol) = .initFull();
|
||||
@ -334,7 +324,7 @@ const SymbolSpec = union(enum) {
|
||||
.reg => aarch64.encoding.Register,
|
||||
.arrangement => aarch64.encoding.Register.Arrangement,
|
||||
.systemreg => aarch64.encoding.Register.System,
|
||||
.imm => |imm_spec| @Type(.{ .int = imm_spec.type }),
|
||||
.imm => |imm_spec| @Int(imm_spec.type.signedness, imm_spec.type.bits),
|
||||
.fimm => f16,
|
||||
.extend => Instruction.DataProcessingRegister.AddSubtractExtendedRegister.Option,
|
||||
.shift => Instruction.DataProcessingRegister.Shift.Op,
|
||||
@ -413,13 +403,13 @@ const SymbolSpec = union(enum) {
|
||||
return systemreg;
|
||||
},
|
||||
.imm => |imm_spec| {
|
||||
const imm = std.fmt.parseInt(@Type(.{ .int = .{
|
||||
.signedness = imm_spec.type.signedness,
|
||||
.bits = switch (imm_spec.adjust) {
|
||||
const imm = std.fmt.parseInt(@Int(
|
||||
imm_spec.type.signedness,
|
||||
switch (imm_spec.adjust) {
|
||||
.none, .neg_wrap => imm_spec.type.bits,
|
||||
.dec => imm_spec.type.bits + 1,
|
||||
},
|
||||
} }), token, 0) catch {
|
||||
), token, 0) catch {
|
||||
log.debug("invalid immediate: \"{f}\"", .{std.zig.fmtString(token)});
|
||||
return null;
|
||||
};
|
||||
|
||||
@ -8928,12 +8928,16 @@ pub const Value = struct {
|
||||
constant: Constant,
|
||||
|
||||
pub const Tag = @typeInfo(Parent).@"union".tag_type.?;
|
||||
pub const Payload = @Type(.{ .@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = null,
|
||||
.fields = @typeInfo(Parent).@"union".fields,
|
||||
.decls = &.{},
|
||||
} });
|
||||
pub const Payload = Payload: {
|
||||
const fields = @typeInfo(Parent).@"union".fields;
|
||||
var types: [fields.len]type = undefined;
|
||||
var names: [fields.len][]const u8 = undefined;
|
||||
for (fields, &types, &names) |f, *ty, *name| {
|
||||
ty.* = f.type;
|
||||
name.* = f.name;
|
||||
}
|
||||
break :Payload @Union(.auto, null, &names, &types, &@splat(.{}));
|
||||
};
|
||||
};
|
||||
|
||||
pub const Location = union(enum(u1)) {
|
||||
@ -8949,12 +8953,16 @@ pub const Value = struct {
|
||||
},
|
||||
|
||||
pub const Tag = @typeInfo(Location).@"union".tag_type.?;
|
||||
pub const Payload = @Type(.{ .@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = null,
|
||||
.fields = @typeInfo(Location).@"union".fields,
|
||||
.decls = &.{},
|
||||
} });
|
||||
pub const Payload = Payload: {
|
||||
const fields = @typeInfo(Location).@"union".fields;
|
||||
var types: [fields.len]type = undefined;
|
||||
var names: [fields.len][]const u8 = undefined;
|
||||
for (fields, &types, &names) |f, *ty, *name| {
|
||||
ty.* = f.type;
|
||||
name.* = f.name;
|
||||
}
|
||||
break :Payload @Union(.auto, null, &names, &types, &@splat(.{}));
|
||||
};
|
||||
};
|
||||
|
||||
pub const Indirect = packed struct(u32) {
|
||||
@ -11210,7 +11218,7 @@ pub const Value = struct {
|
||||
.storage = .{ .u64 = switch (size) {
|
||||
else => unreachable,
|
||||
inline 1...8 => |ct_size| std.mem.readInt(
|
||||
@Type(.{ .int = .{ .signedness = .unsigned, .bits = 8 * ct_size } }),
|
||||
@Int(.unsigned, 8 * ct_size),
|
||||
buffer[@intCast(offset)..][0..ct_size],
|
||||
isel.target.cpu.arch.endian(),
|
||||
),
|
||||
@ -11438,7 +11446,7 @@ fn writeKeyToMemory(isel: *Select, constant_key: InternPool.Key, buffer: []u8) e
|
||||
switch (buffer.len) {
|
||||
else => unreachable,
|
||||
inline 1...4 => |size| std.mem.writeInt(
|
||||
@Type(.{ .int = .{ .signedness = .unsigned, .bits = 8 * size } }),
|
||||
@Int(.unsigned, 8 * size),
|
||||
buffer[0..size],
|
||||
@intCast(error_int),
|
||||
isel.target.cpu.arch.endian(),
|
||||
|
||||
@ -5672,6 +5672,10 @@ fn airAsm(f: *Function, inst: Air.Inst.Index) !CValue {
|
||||
c_name_buf[0] = '$';
|
||||
@memcpy((&c_name_buf)[1..][0..field_name.len], field_name);
|
||||
break :name (&c_name_buf)[0 .. 1 + field_name.len];
|
||||
} else if (target.cpu.arch.isSPARC() and
|
||||
(mem.eql(u8, field_name, "ccr") or mem.eql(u8, field_name, "icc") or mem.eql(u8, field_name, "xcc"))) name: {
|
||||
// C compilers just use `icc` to encompass all of these.
|
||||
break :name "icc";
|
||||
} else field_name;
|
||||
|
||||
try w.print(" {f}", .{fmtStringLiteral(name, null)});
|
||||
|
||||
@ -1416,6 +1416,9 @@ pub const Pool = struct {
|
||||
.null_type,
|
||||
.undefined_type,
|
||||
.enum_literal_type,
|
||||
.optional_type_type,
|
||||
.manyptr_const_type_type,
|
||||
.slice_const_type_type,
|
||||
=> return .void,
|
||||
.u1_type, .u8_type => return .u8,
|
||||
.i8_type => return .i8,
|
||||
@ -1525,6 +1528,73 @@ pub const Pool = struct {
|
||||
return pool.fromFields(allocator, .@"struct", &fields, kind);
|
||||
},
|
||||
|
||||
.manyptr_const_slice_const_u8_type => {
|
||||
const target = &mod.resolved_target.result;
|
||||
var fields: [2]Info.Field = .{
|
||||
.{
|
||||
.name = .{ .index = .ptr },
|
||||
.ctype = try pool.getPointer(allocator, .{
|
||||
.elem_ctype = .u8,
|
||||
.@"const" = true,
|
||||
.nonstring = true,
|
||||
}),
|
||||
.alignas = AlignAs.fromAbiAlignment(Type.ptrAbiAlignment(target)),
|
||||
},
|
||||
.{
|
||||
.name = .{ .index = .len },
|
||||
.ctype = .usize,
|
||||
.alignas = AlignAs.fromAbiAlignment(
|
||||
.fromByteUnits(std.zig.target.intAlignment(target, target.ptrBitWidth())),
|
||||
),
|
||||
},
|
||||
};
|
||||
const slice_const_u8 = try pool.fromFields(allocator, .@"struct", &fields, kind);
|
||||
return pool.getPointer(allocator, .{
|
||||
.elem_ctype = slice_const_u8,
|
||||
.@"const" = true,
|
||||
});
|
||||
},
|
||||
.slice_const_slice_const_u8_type => {
|
||||
const target = &mod.resolved_target.result;
|
||||
var fields: [2]Info.Field = .{
|
||||
.{
|
||||
.name = .{ .index = .ptr },
|
||||
.ctype = try pool.getPointer(allocator, .{
|
||||
.elem_ctype = .u8,
|
||||
.@"const" = true,
|
||||
.nonstring = true,
|
||||
}),
|
||||
.alignas = AlignAs.fromAbiAlignment(Type.ptrAbiAlignment(target)),
|
||||
},
|
||||
.{
|
||||
.name = .{ .index = .len },
|
||||
.ctype = .usize,
|
||||
.alignas = AlignAs.fromAbiAlignment(
|
||||
.fromByteUnits(std.zig.target.intAlignment(target, target.ptrBitWidth())),
|
||||
),
|
||||
},
|
||||
};
|
||||
const slice_const_u8 = try pool.fromFields(allocator, .@"struct", &fields, .forward);
|
||||
fields = .{
|
||||
.{
|
||||
.name = .{ .index = .ptr },
|
||||
.ctype = try pool.getPointer(allocator, .{
|
||||
.elem_ctype = slice_const_u8,
|
||||
.@"const" = true,
|
||||
}),
|
||||
.alignas = AlignAs.fromAbiAlignment(Type.ptrAbiAlignment(target)),
|
||||
},
|
||||
.{
|
||||
.name = .{ .index = .len },
|
||||
.ctype = .usize,
|
||||
.alignas = AlignAs.fromAbiAlignment(
|
||||
.fromByteUnits(std.zig.target.intAlignment(target, target.ptrBitWidth())),
|
||||
),
|
||||
},
|
||||
};
|
||||
return pool.fromFields(allocator, .@"struct", &fields, kind);
|
||||
},
|
||||
|
||||
.vector_8_i8_type => {
|
||||
const vector_ctype = try pool.getVector(allocator, .{
|
||||
.elem_ctype = .i8,
|
||||
|
||||
@ -189867,9 +189867,7 @@ const Select = struct {
|
||||
}
|
||||
|
||||
fn adjustedImm(op: Select.Operand, comptime SignedImm: type, s: *const Select) SignedImm {
|
||||
const UnsignedImm = @Type(.{
|
||||
.int = .{ .signedness = .unsigned, .bits = @typeInfo(SignedImm).int.bits },
|
||||
});
|
||||
const UnsignedImm = @Int(.unsigned, @typeInfo(SignedImm).int.bits);
|
||||
const lhs: SignedImm = lhs: switch (op.flags.adjust.lhs) {
|
||||
.none => 0,
|
||||
.ptr_size => @divExact(s.cg.target.ptrBitWidth(), 8),
|
||||
@ -189934,10 +189932,10 @@ const Select = struct {
|
||||
const RefImm = switch (size) {
|
||||
else => comptime unreachable,
|
||||
.none => Imm,
|
||||
.byte, .word, .dword, .qword => @Type(comptime .{ .int = .{
|
||||
.signedness = @typeInfo(Imm).int.signedness,
|
||||
.bits = size.bitSize(undefined),
|
||||
} }),
|
||||
.byte, .word, .dword, .qword => @Int(
|
||||
@typeInfo(Imm).int.signedness,
|
||||
size.bitSize(undefined),
|
||||
),
|
||||
};
|
||||
break :lhs @bitCast(@as(Imm, @intCast(@as(RefImm, switch (adjust) {
|
||||
else => comptime unreachable,
|
||||
|
||||
@ -708,7 +708,7 @@ pub fn emitMir(emit: *Emit) Error!void {
|
||||
switch (reloc.source_length) {
|
||||
else => unreachable,
|
||||
inline 1, 4 => |source_length| std.mem.writeInt(
|
||||
@Type(.{ .int = .{ .signedness = .signed, .bits = @as(u16, 8) * source_length } }),
|
||||
@Int(.signed, @as(u16, 8) * source_length),
|
||||
inst_bytes[reloc.source_offset..][0..source_length],
|
||||
@intCast(disp),
|
||||
.little,
|
||||
|
||||
@ -51,10 +51,7 @@ pub const Diags = struct {
|
||||
|
||||
const Int = blk: {
|
||||
const bits = @typeInfo(@This()).@"struct".fields.len;
|
||||
break :blk @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = bits,
|
||||
} });
|
||||
break :blk @Int(.unsigned, bits);
|
||||
};
|
||||
|
||||
pub fn anySet(ef: Flags) bool {
|
||||
|
||||
@ -4490,7 +4490,12 @@ fn updateContainerTypeWriterError(
|
||||
.enum_decl => @as(Zir.Inst.EnumDecl.Small, @bitCast(decl_inst.data.extended.small)).name_strategy,
|
||||
.union_decl => @as(Zir.Inst.UnionDecl.Small, @bitCast(decl_inst.data.extended.small)).name_strategy,
|
||||
.opaque_decl => @as(Zir.Inst.OpaqueDecl.Small, @bitCast(decl_inst.data.extended.small)).name_strategy,
|
||||
.reify => @as(Zir.Inst.NameStrategy, @enumFromInt(decl_inst.data.extended.small)),
|
||||
|
||||
.reify_enum,
|
||||
.reify_struct,
|
||||
.reify_union,
|
||||
=> @enumFromInt(decl_inst.data.extended.small),
|
||||
|
||||
else => unreachable,
|
||||
},
|
||||
else => unreachable,
|
||||
@ -5125,25 +5130,23 @@ pub fn resolveRelocs(dwarf: *Dwarf) RelocError!void {
|
||||
|
||||
fn DeclValEnum(comptime T: type) type {
|
||||
const decls = @typeInfo(T).@"struct".decls;
|
||||
@setEvalBranchQuota(7 * decls.len);
|
||||
var fields: [decls.len]std.builtin.Type.EnumField = undefined;
|
||||
@setEvalBranchQuota(10 * decls.len);
|
||||
var field_names: [decls.len][]const u8 = undefined;
|
||||
var fields_len = 0;
|
||||
var min_value: ?comptime_int = null;
|
||||
var max_value: ?comptime_int = null;
|
||||
for (decls) |decl| {
|
||||
if (std.mem.startsWith(u8, decl.name, "HP_") or std.mem.endsWith(u8, decl.name, "_user")) continue;
|
||||
const value = @field(T, decl.name);
|
||||
fields[fields_len] = .{ .name = decl.name, .value = value };
|
||||
field_names[fields_len] = decl.name;
|
||||
fields_len += 1;
|
||||
if (min_value == null or min_value.? > value) min_value = value;
|
||||
if (max_value == null or max_value.? < value) max_value = value;
|
||||
}
|
||||
return @Type(.{ .@"enum" = .{
|
||||
.tag_type = std.math.IntFittingRange(min_value orelse 0, max_value orelse 0),
|
||||
.fields = fields[0..fields_len],
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
} });
|
||||
const TagInt = std.math.IntFittingRange(min_value orelse 0, max_value orelse 0);
|
||||
var field_vals: [fields_len]TagInt = undefined;
|
||||
for (field_names[0..fields_len], &field_vals) |name, *val| val.* = @field(T, name);
|
||||
return @Enum(TagInt, .exhaustive, field_names[0..fields_len], &field_vals);
|
||||
}
|
||||
|
||||
const AbbrevCode = enum {
|
||||
@ -6377,10 +6380,12 @@ fn freeCommonEntry(
|
||||
|
||||
fn writeInt(dwarf: *Dwarf, buf: []u8, int: u64) void {
|
||||
switch (buf.len) {
|
||||
inline 0...8 => |len| std.mem.writeInt(@Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = len * 8,
|
||||
} }), buf[0..len], @intCast(int), dwarf.endian),
|
||||
inline 0...8 => |len| std.mem.writeInt(
|
||||
@Int(.unsigned, len * 8),
|
||||
buf[0..len],
|
||||
@intCast(int),
|
||||
dwarf.endian,
|
||||
),
|
||||
else => unreachable,
|
||||
}
|
||||
}
|
||||
|
||||
@ -108,10 +108,7 @@ pub const Node = extern struct {
|
||||
has_content: bool,
|
||||
/// Whether a moved event on this node bubbles down to children.
|
||||
bubbles_moved: bool,
|
||||
unused: @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = 32 - @bitSizeOf(std.mem.Alignment) - 6,
|
||||
} }) = 0,
|
||||
unused: @Int(.unsigned, 32 - @bitSizeOf(std.mem.Alignment) - 6) = 0,
|
||||
};
|
||||
|
||||
pub const Location = union(enum(u1)) {
|
||||
@ -122,19 +119,14 @@ pub const Node = extern struct {
|
||||
},
|
||||
large: extern struct {
|
||||
index: usize,
|
||||
unused: @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = 64 - @bitSizeOf(usize),
|
||||
} }) = 0,
|
||||
unused: @Int(.unsigned, 64 - @bitSizeOf(usize)) = 0,
|
||||
},
|
||||
|
||||
pub const Tag = @typeInfo(Location).@"union".tag_type.?;
|
||||
pub const Payload = @Type(.{ .@"union" = .{
|
||||
.layout = .@"extern",
|
||||
.tag_type = null,
|
||||
.fields = @typeInfo(Location).@"union".fields,
|
||||
.decls = &.{},
|
||||
} });
|
||||
pub const Payload = extern union {
|
||||
small: @FieldType(Location, "small"),
|
||||
large: @FieldType(Location, "large"),
|
||||
};
|
||||
|
||||
pub fn resolve(loc: Location, mf: *const MappedFile) [2]u64 {
|
||||
return switch (loc) {
|
||||
|
||||
@ -136,7 +136,7 @@ var log_scopes: std.ArrayList([]const u8) = .empty;
|
||||
|
||||
pub fn log(
|
||||
comptime level: std.log.Level,
|
||||
comptime scope: @Type(.enum_literal),
|
||||
comptime scope: @EnumLiteral(),
|
||||
comptime format: []const u8,
|
||||
args: anytype,
|
||||
) void {
|
||||
|
||||
@ -399,6 +399,7 @@ const Writer = struct {
|
||||
.splat,
|
||||
.reduce,
|
||||
.bitcast,
|
||||
.reify_int,
|
||||
.vector_type,
|
||||
.max,
|
||||
.min,
|
||||
@ -568,6 +569,8 @@ const Writer = struct {
|
||||
.work_group_id,
|
||||
.branch_hint,
|
||||
.float_op_result_ty,
|
||||
.reify_tuple,
|
||||
.reify_pointer_sentinel_ty,
|
||||
=> {
|
||||
const inst_data = self.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
try self.writeInstRef(stream, inst_data.operand);
|
||||
@ -575,23 +578,13 @@ const Writer = struct {
|
||||
try self.writeSrcNode(stream, inst_data.node);
|
||||
},
|
||||
|
||||
.reify => {
|
||||
const inst_data = self.code.extraData(Zir.Inst.Reify, extended.operand).data;
|
||||
try stream.print("line({d}), ", .{inst_data.src_line});
|
||||
try self.writeInstRef(stream, inst_data.operand);
|
||||
try stream.writeAll(")) ");
|
||||
const prev_parent_decl_node = self.parent_decl_node;
|
||||
self.parent_decl_node = inst_data.node;
|
||||
defer self.parent_decl_node = prev_parent_decl_node;
|
||||
try self.writeSrcNode(stream, .zero);
|
||||
},
|
||||
|
||||
.builtin_extern,
|
||||
.c_define,
|
||||
.error_cast,
|
||||
.wasm_memory_grow,
|
||||
.prefetch,
|
||||
.c_va_arg,
|
||||
.reify_enum_value_slice_ty,
|
||||
=> {
|
||||
const inst_data = self.code.extraData(Zir.Inst.BinNode, extended.operand).data;
|
||||
try self.writeInstRef(stream, inst_data.lhs);
|
||||
@ -601,6 +594,95 @@ const Writer = struct {
|
||||
try self.writeSrcNode(stream, inst_data.node);
|
||||
},
|
||||
|
||||
.reify_slice_arg_ty => {
|
||||
const reify_slice_arg_info: Zir.Inst.ReifySliceArgInfo = @enumFromInt(extended.operand);
|
||||
const extra = self.code.extraData(Zir.Inst.UnNode, extended.operand).data;
|
||||
try stream.print("{t}, ", .{reify_slice_arg_info});
|
||||
try self.writeInstRef(stream, extra.operand);
|
||||
try stream.writeAll(")) ");
|
||||
try self.writeSrcNode(stream, extra.node);
|
||||
},
|
||||
|
||||
.reify_pointer => {
|
||||
const extra = self.code.extraData(Zir.Inst.ReifyPointer, extended.operand).data;
|
||||
try self.writeInstRef(stream, extra.size);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.attrs);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.elem_ty);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.sentinel);
|
||||
try stream.writeAll(")) ");
|
||||
try self.writeSrcNode(stream, extra.node);
|
||||
},
|
||||
.reify_fn => {
|
||||
const extra = self.code.extraData(Zir.Inst.ReifyFn, extended.operand).data;
|
||||
try self.writeInstRef(stream, extra.param_types);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.param_attrs);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.ret_ty);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.fn_attrs);
|
||||
try stream.writeAll(")) ");
|
||||
try self.writeSrcNode(stream, extra.node);
|
||||
},
|
||||
.reify_struct => {
|
||||
const extra = self.code.extraData(Zir.Inst.ReifyStruct, extended.operand).data;
|
||||
const name_strat: Zir.Inst.NameStrategy = @enumFromInt(extended.small);
|
||||
try stream.print("line({d}), {t}, ", .{ extra.src_line, name_strat });
|
||||
try self.writeInstRef(stream, extra.layout);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.backing_ty);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_names);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_types);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_attrs);
|
||||
try stream.writeAll(")) ");
|
||||
const prev_parent_decl_node = self.parent_decl_node;
|
||||
self.parent_decl_node = extra.node;
|
||||
defer self.parent_decl_node = prev_parent_decl_node;
|
||||
try self.writeSrcNode(stream, .zero);
|
||||
},
|
||||
.reify_union => {
|
||||
const extra = self.code.extraData(Zir.Inst.ReifyUnion, extended.operand).data;
|
||||
const name_strat: Zir.Inst.NameStrategy = @enumFromInt(extended.small);
|
||||
try stream.print("line({d}), {t}, ", .{ extra.src_line, name_strat });
|
||||
try self.writeInstRef(stream, extra.layout);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.arg_ty);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_names);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_types);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_attrs);
|
||||
try stream.writeAll(")) ");
|
||||
const prev_parent_decl_node = self.parent_decl_node;
|
||||
self.parent_decl_node = extra.node;
|
||||
defer self.parent_decl_node = prev_parent_decl_node;
|
||||
try self.writeSrcNode(stream, .zero);
|
||||
},
|
||||
.reify_enum => {
|
||||
const extra = self.code.extraData(Zir.Inst.ReifyEnum, extended.operand).data;
|
||||
const name_strat: Zir.Inst.NameStrategy = @enumFromInt(extended.small);
|
||||
try stream.print("line({d}), {t}, ", .{ extra.src_line, name_strat });
|
||||
try self.writeInstRef(stream, extra.tag_ty);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.mode);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_names);
|
||||
try stream.writeAll(", ");
|
||||
try self.writeInstRef(stream, extra.field_values);
|
||||
try stream.writeAll(")) ");
|
||||
const prev_parent_decl_node = self.parent_decl_node;
|
||||
self.parent_decl_node = extra.node;
|
||||
defer self.parent_decl_node = prev_parent_decl_node;
|
||||
try self.writeSrcNode(stream, .zero);
|
||||
},
|
||||
|
||||
.cmpxchg => try self.writeCmpxchg(stream, extended),
|
||||
.ptr_cast_full => try self.writePtrCastFull(stream, extended),
|
||||
.ptr_cast_no_dest => try self.writePtrCastNoDest(stream, extended),
|
||||
|
||||
BIN
stage1/zig1.wasm
BIN
stage1/zig1.wasm
Binary file not shown.
@ -1264,12 +1264,9 @@ test "reference to inferred local variable works as expected" {
|
||||
try expect(crasher_local.lets_crash != a.lets_crash);
|
||||
}
|
||||
|
||||
test "@Type returned from block" {
|
||||
test "@Int returned from block" {
|
||||
const T = comptime b: {
|
||||
break :b @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = 8,
|
||||
} });
|
||||
break :b @Int(.unsigned, 8);
|
||||
};
|
||||
try std.testing.expect(T == u8);
|
||||
}
|
||||
|
||||
@ -119,21 +119,12 @@ test "Saturating Shift Left where lhs is of a computed type" {
|
||||
if (builtin.zig_backend == .stage2_riscv64) return error.SkipZigTest;
|
||||
|
||||
const S = struct {
|
||||
fn getIntShiftType(comptime T: type) type {
|
||||
var unsigned_shift_type = @typeInfo(std.math.Log2Int(T)).int;
|
||||
unsigned_shift_type.signedness = .signed;
|
||||
|
||||
return @Type(.{
|
||||
.int = unsigned_shift_type,
|
||||
});
|
||||
}
|
||||
|
||||
pub fn FixedPoint(comptime ValueType: type) type {
|
||||
return struct {
|
||||
value: ValueType,
|
||||
exponent: ShiftType,
|
||||
|
||||
const ShiftType: type = getIntShiftType(ValueType);
|
||||
const ShiftType = @Int(.signed, @typeInfo(std.math.Log2Int(ValueType)).int.bits);
|
||||
|
||||
pub fn shiftExponent(self: @This(), shift: ShiftType) @This() {
|
||||
const shiftAbs = @abs(shift);
|
||||
|
||||
@ -355,7 +355,7 @@ test "inline call doesn't re-evaluate non generic struct" {
|
||||
try comptime @call(.always_inline, S.foo, ArgTuple{.{ .a = 123, .b = 45 }});
|
||||
}
|
||||
|
||||
test "Enum constructed by @Type passed as generic argument" {
|
||||
test "Enum constructed by @Enum passed as generic argument" {
|
||||
const S = struct {
|
||||
const E = std.meta.FieldEnum(struct {
|
||||
prev_pos: bool,
|
||||
|
||||
@ -2446,9 +2446,14 @@ test "peer type resolution: pointer attributes are combined correctly" {
|
||||
};
|
||||
|
||||
const NonAllowZero = comptime blk: {
|
||||
var ti = @typeInfo(@TypeOf(r1, r2, r3, r4));
|
||||
ti.pointer.is_allowzero = false;
|
||||
break :blk @Type(ti);
|
||||
const ptr = @typeInfo(@TypeOf(r1, r2, r3, r4)).pointer;
|
||||
break :blk @Pointer(ptr.size, .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
.@"allowzero" = false,
|
||||
.@"align" = ptr.alignment,
|
||||
.@"addrspace" = ptr.address_space,
|
||||
}, ptr.child, ptr.sentinel());
|
||||
};
|
||||
try expectEqualSlices(u8, std.mem.span(@volatileCast(@as(NonAllowZero, @ptrCast(r1)))), "foo");
|
||||
try expectEqualSlices(u8, std.mem.span(@volatileCast(@as(NonAllowZero, @ptrCast(r2)))), "bar");
|
||||
|
||||
@ -1283,10 +1283,7 @@ test "Non-exhaustive enum backed by comptime_int" {
|
||||
test "matching captures causes enum equivalence" {
|
||||
const S = struct {
|
||||
fn Nonexhaustive(comptime I: type) type {
|
||||
const UTag = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = @typeInfo(I).int.bits,
|
||||
} });
|
||||
const UTag = @Int(.unsigned, @typeInfo(I).int.bits);
|
||||
return enum(UTag) { _ };
|
||||
}
|
||||
};
|
||||
|
||||
@ -556,10 +556,10 @@ test "lazy values passed to anytype parameter" {
|
||||
|
||||
test "pass and return comptime-only types" {
|
||||
const S = struct {
|
||||
fn returnNull(comptime x: @Type(.null)) @Type(.null) {
|
||||
fn returnNull(comptime x: @TypeOf(null)) @TypeOf(null) {
|
||||
return x;
|
||||
}
|
||||
fn returnUndefined(comptime x: @Type(.undefined)) @Type(.undefined) {
|
||||
fn returnUndefined(comptime x: @TypeOf(undefined)) @TypeOf(undefined) {
|
||||
return x;
|
||||
}
|
||||
};
|
||||
|
||||
@ -263,15 +263,7 @@ test "generic function instantiation turns into comptime call" {
|
||||
|
||||
pub fn FieldEnum(comptime T: type) type {
|
||||
_ = T;
|
||||
var enumFields: [1]std.builtin.Type.EnumField = .{.{ .name = "A", .value = 0 }};
|
||||
return @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u0,
|
||||
.fields = &enumFields,
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
return @Enum(u0, .exhaustive, &.{"A"}, &.{0});
|
||||
}
|
||||
};
|
||||
try S.doTheTest();
|
||||
|
||||
@ -338,14 +338,14 @@ test "peer type resolution with @TypeOf doesn't trigger dependency loop check" {
|
||||
|
||||
test "@sizeOf reified union zero-size payload fields" {
|
||||
comptime {
|
||||
try std.testing.expect(0 == @sizeOf(@Type(@typeInfo(union {}))));
|
||||
try std.testing.expect(0 == @sizeOf(@Type(@typeInfo(union { a: void }))));
|
||||
try std.testing.expect(0 == @sizeOf(@Union(.auto, null, &.{}, &.{}, &.{})));
|
||||
try std.testing.expect(0 == @sizeOf(@Union(.auto, null, &.{"a"}, &.{void}, &.{.{}})));
|
||||
if (builtin.mode == .Debug or builtin.mode == .ReleaseSafe) {
|
||||
try std.testing.expect(1 == @sizeOf(@Type(@typeInfo(union { a: void, b: void }))));
|
||||
try std.testing.expect(1 == @sizeOf(@Type(@typeInfo(union { a: void, b: void, c: void }))));
|
||||
try std.testing.expect(1 == @sizeOf(@Union(.auto, null, &.{ "a", "b" }, &.{ void, void }, &.{ .{}, .{} })));
|
||||
try std.testing.expect(1 == @sizeOf(@Union(.auto, null, &.{ "a", "b", "c" }, &.{ void, void, void }, &.{ .{}, .{}, .{} })));
|
||||
} else {
|
||||
try std.testing.expect(0 == @sizeOf(@Type(@typeInfo(union { a: void, b: void }))));
|
||||
try std.testing.expect(0 == @sizeOf(@Type(@typeInfo(union { a: void, b: void, c: void }))));
|
||||
try std.testing.expect(0 == @sizeOf(@Union(.auto, null, &.{ "a", "b" }, &.{ void, void }, &.{ .{}, .{} })));
|
||||
try std.testing.expect(0 == @sizeOf(@Union(.auto, null, &.{ "a", "b", "c" }, &.{ void, void, void }, &.{ .{}, .{}, .{} })));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -2034,10 +2034,7 @@ test "matching captures causes struct equivalence" {
|
||||
fn UnsignedWrapper(comptime I: type) type {
|
||||
const bits = @typeInfo(I).int.bits;
|
||||
return struct {
|
||||
x: @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = bits,
|
||||
} }),
|
||||
x: @Int(.unsigned, bits),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@ -843,7 +843,8 @@ test "switch capture peer type resolution for in-memory coercible payloads" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
const T1 = c_int;
|
||||
const T2 = @Type(@typeInfo(T1));
|
||||
const t1_info = @typeInfo(T1).int;
|
||||
const T2 = @Int(t1_info.signedness, t1_info.bits);
|
||||
|
||||
comptime assert(T1 != T2);
|
||||
|
||||
@ -865,7 +866,8 @@ test "switch pointer capture peer type resolution" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
|
||||
const T1 = c_int;
|
||||
const T2 = @Type(@typeInfo(T1));
|
||||
const t1_info = @typeInfo(T1).int;
|
||||
const T2 = @Int(t1_info.signedness, t1_info.bits);
|
||||
|
||||
comptime assert(T1 != T2);
|
||||
|
||||
|
||||
@ -230,10 +230,7 @@ test "switch loop on larger than pointer integer" {
|
||||
if (builtin.zig_backend == .stage2_c) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
var entry: @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = @bitSizeOf(usize) + 1,
|
||||
} }) = undefined;
|
||||
var entry: @Int(.unsigned, @bitSizeOf(usize) + 1) = undefined;
|
||||
entry = 0;
|
||||
loop: switch (entry) {
|
||||
0 => {
|
||||
|
||||
@ -130,29 +130,7 @@ test "array-like initializer for tuple types" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
const T = @Type(.{
|
||||
.@"struct" = .{
|
||||
.is_tuple = true,
|
||||
.layout = .auto,
|
||||
.decls = &.{},
|
||||
.fields = &.{
|
||||
.{
|
||||
.name = "0",
|
||||
.type = i32,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(i32),
|
||||
},
|
||||
.{
|
||||
.name = "1",
|
||||
.type = u8,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(u8),
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
const T = @Tuple(&.{ i32, u8 });
|
||||
const S = struct {
|
||||
fn doTheTest() !void {
|
||||
var obj: T = .{ -1234, 128 };
|
||||
@ -320,20 +298,7 @@ test "zero sized struct in tuple handled correctly" {
|
||||
const Self = @This();
|
||||
const Inner = struct {};
|
||||
|
||||
data: @Type(.{
|
||||
.@"struct" = .{
|
||||
.is_tuple = true,
|
||||
.layout = .auto,
|
||||
.decls = &.{},
|
||||
.fields = &.{.{
|
||||
.name = "0",
|
||||
.type = Inner,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(Inner),
|
||||
}},
|
||||
},
|
||||
}),
|
||||
data: @Tuple(&.{Inner}),
|
||||
|
||||
pub fn do(this: Self) usize {
|
||||
return @sizeOf(@TypeOf(this));
|
||||
@ -470,12 +435,7 @@ test "coerce anon tuple to tuple" {
|
||||
}
|
||||
|
||||
test "empty tuple type" {
|
||||
const S = @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &.{},
|
||||
.decls = &.{},
|
||||
.is_tuple = true,
|
||||
} });
|
||||
const S = @Tuple(&.{});
|
||||
|
||||
const s: S = .{};
|
||||
try expect(s.len == 0);
|
||||
@ -616,18 +576,7 @@ test "OPV tuple fields aren't comptime" {
|
||||
const t_info = @typeInfo(T);
|
||||
try expect(!t_info.@"struct".fields[0].is_comptime);
|
||||
|
||||
const T2 = @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &.{.{
|
||||
.name = "0",
|
||||
.type = void,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(void),
|
||||
}},
|
||||
.decls = &.{},
|
||||
.is_tuple = true,
|
||||
} });
|
||||
const T2 = @Tuple(&.{void});
|
||||
const t2_info = @typeInfo(T2);
|
||||
try expect(!t2_info.@"struct".fields[0].is_comptime);
|
||||
}
|
||||
|
||||
@ -4,63 +4,17 @@ const Type = std.builtin.Type;
|
||||
const testing = std.testing;
|
||||
const assert = std.debug.assert;
|
||||
|
||||
fn testTypes(comptime types: []const type) !void {
|
||||
inline for (types) |testType| {
|
||||
try testing.expect(testType == @Type(@typeInfo(testType)));
|
||||
}
|
||||
}
|
||||
|
||||
test "Type.MetaType" {
|
||||
try testing.expect(type == @Type(.{ .type = {} }));
|
||||
try testTypes(&[_]type{type});
|
||||
}
|
||||
|
||||
test "Type.Void" {
|
||||
try testing.expect(void == @Type(.{ .void = {} }));
|
||||
try testTypes(&[_]type{void});
|
||||
}
|
||||
|
||||
test "Type.Bool" {
|
||||
try testing.expect(bool == @Type(.{ .bool = {} }));
|
||||
try testTypes(&[_]type{bool});
|
||||
}
|
||||
|
||||
test "Type.NoReturn" {
|
||||
try testing.expect(noreturn == @Type(.{ .noreturn = {} }));
|
||||
try testTypes(&[_]type{noreturn});
|
||||
}
|
||||
|
||||
test "Type.Int" {
|
||||
try testing.expect(u1 == @Type(.{ .int = .{ .signedness = .unsigned, .bits = 1 } }));
|
||||
try testing.expect(i1 == @Type(.{ .int = .{ .signedness = .signed, .bits = 1 } }));
|
||||
try testing.expect(u8 == @Type(.{ .int = .{ .signedness = .unsigned, .bits = 8 } }));
|
||||
try testing.expect(i8 == @Type(.{ .int = .{ .signedness = .signed, .bits = 8 } }));
|
||||
try testing.expect(u64 == @Type(.{ .int = .{ .signedness = .unsigned, .bits = 64 } }));
|
||||
try testing.expect(i64 == @Type(.{ .int = .{ .signedness = .signed, .bits = 64 } }));
|
||||
try testTypes(&[_]type{ u8, u32, i64 });
|
||||
}
|
||||
|
||||
test "Type.ComptimeFloat" {
|
||||
try testTypes(&[_]type{comptime_float});
|
||||
}
|
||||
test "Type.ComptimeInt" {
|
||||
try testTypes(&[_]type{comptime_int});
|
||||
}
|
||||
test "Type.Undefined" {
|
||||
try testTypes(&[_]type{@TypeOf(undefined)});
|
||||
}
|
||||
test "Type.Null" {
|
||||
try testTypes(&[_]type{@TypeOf(null)});
|
||||
}
|
||||
|
||||
test "Type.EnumLiteral" {
|
||||
try testTypes(&[_]type{
|
||||
@TypeOf(.Dummy),
|
||||
});
|
||||
try testing.expect(u1 == @Int(.unsigned, 1));
|
||||
try testing.expect(i1 == @Int(.signed, 1));
|
||||
try testing.expect(u8 == @Int(.unsigned, 8));
|
||||
try testing.expect(i8 == @Int(.signed, 8));
|
||||
try testing.expect(u64 == @Int(.unsigned, 64));
|
||||
try testing.expect(i64 == @Int(.signed, 64));
|
||||
}
|
||||
|
||||
test "Type.Pointer" {
|
||||
try testTypes(&[_]type{
|
||||
inline for (&[_]type{
|
||||
// One Value Pointer Types
|
||||
*u8, *const u8,
|
||||
*volatile u8, *const volatile u8,
|
||||
@ -101,62 +55,30 @@ test "Type.Pointer" {
|
||||
[*c]align(4) volatile u8, [*c]align(4) const volatile u8,
|
||||
[*c]align(8) u8, [*c]align(8) const u8,
|
||||
[*c]align(8) volatile u8, [*c]align(8) const volatile u8,
|
||||
});
|
||||
}) |testType| {
|
||||
const ptr = @typeInfo(testType).pointer;
|
||||
try testing.expect(testType == @Pointer(ptr.size, .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
.@"allowzero" = ptr.is_allowzero,
|
||||
.@"align" = ptr.alignment,
|
||||
.@"addrspace" = ptr.address_space,
|
||||
}, ptr.child, ptr.sentinel()));
|
||||
}
|
||||
}
|
||||
|
||||
test "Type.Float" {
|
||||
try testing.expect(f16 == @Type(.{ .float = .{ .bits = 16 } }));
|
||||
try testing.expect(f32 == @Type(.{ .float = .{ .bits = 32 } }));
|
||||
try testing.expect(f64 == @Type(.{ .float = .{ .bits = 64 } }));
|
||||
try testing.expect(f80 == @Type(.{ .float = .{ .bits = 80 } }));
|
||||
try testing.expect(f128 == @Type(.{ .float = .{ .bits = 128 } }));
|
||||
try testTypes(&[_]type{ f16, f32, f64, f80, f128 });
|
||||
test "@Pointer create slice without sentinel" {
|
||||
const Slice = @Pointer(.slice, .{ .@"const" = true, .@"align" = 8 }, ?*i32, null);
|
||||
try testing.expect(Slice == []align(8) const ?*i32);
|
||||
}
|
||||
|
||||
test "Type.Array" {
|
||||
try testing.expect([123]u8 == @Type(.{
|
||||
.array = .{
|
||||
.len = 123,
|
||||
.child = u8,
|
||||
.sentinel_ptr = null,
|
||||
},
|
||||
}));
|
||||
try testing.expect([2]u32 == @Type(.{
|
||||
.array = .{
|
||||
.len = 2,
|
||||
.child = u32,
|
||||
.sentinel_ptr = null,
|
||||
},
|
||||
}));
|
||||
try testing.expect([2:0]u32 == @Type(.{
|
||||
.array = .{
|
||||
.len = 2,
|
||||
.child = u32,
|
||||
.sentinel_ptr = &@as(u32, 0),
|
||||
},
|
||||
}));
|
||||
try testTypes(&[_]type{ [1]u8, [30]usize, [7]bool });
|
||||
test "@Pointer create slice with null sentinel" {
|
||||
const Slice = @Pointer(.slice, .{ .@"const" = true, .@"align" = 8 }, ?*i32, @as(?*i32, null));
|
||||
try testing.expect(Slice == [:null]align(8) const ?*i32);
|
||||
}
|
||||
|
||||
test "@Type create slice with null sentinel" {
|
||||
const Slice = @Type(.{
|
||||
.pointer = .{
|
||||
.size = .slice,
|
||||
.is_const = true,
|
||||
.is_volatile = false,
|
||||
.is_allowzero = false,
|
||||
.alignment = 8,
|
||||
.address_space = .generic,
|
||||
.child = *i32,
|
||||
.sentinel_ptr = null,
|
||||
},
|
||||
});
|
||||
try testing.expect(Slice == []align(8) const *i32);
|
||||
}
|
||||
|
||||
test "@Type picks up the sentinel value from Type" {
|
||||
try testTypes(&[_]type{
|
||||
[11:0]u8, [4:10]u8,
|
||||
test "@Pointer on @typeInfo round-trips sentinels" {
|
||||
inline for (&[_]type{
|
||||
[*:0]u8, [*:0]const u8,
|
||||
[*:0]volatile u8, [*:0]const volatile u8,
|
||||
[*:0]align(4) u8, [*:0]align(4) const u8,
|
||||
@ -179,24 +101,16 @@ test "@Type picks up the sentinel value from Type" {
|
||||
[:0]allowzero align(4) u8, [:0]allowzero align(4) const u8,
|
||||
[:0]allowzero align(4) volatile u8, [:0]allowzero align(4) const volatile u8,
|
||||
[:4]allowzero align(4) volatile u8, [:4]allowzero align(4) const volatile u8,
|
||||
});
|
||||
}
|
||||
|
||||
test "Type.Optional" {
|
||||
try testTypes(&[_]type{
|
||||
?u8,
|
||||
?*u8,
|
||||
?[]u8,
|
||||
?[*]u8,
|
||||
?[*c]u8,
|
||||
});
|
||||
}
|
||||
|
||||
test "Type.ErrorUnion" {
|
||||
try testTypes(&[_]type{
|
||||
error{}!void,
|
||||
error{Error}!void,
|
||||
});
|
||||
}) |TestType| {
|
||||
const ptr = @typeInfo(TestType).pointer;
|
||||
try testing.expect(TestType == @Pointer(ptr.size, .{
|
||||
.@"const" = ptr.is_const,
|
||||
.@"volatile" = ptr.is_volatile,
|
||||
.@"allowzero" = ptr.is_allowzero,
|
||||
.@"align" = ptr.alignment,
|
||||
.@"addrspace" = ptr.address_space,
|
||||
}, ptr.child, ptr.sentinel()));
|
||||
}
|
||||
}
|
||||
|
||||
test "Type.Opaque" {
|
||||
@ -205,11 +119,7 @@ test "Type.Opaque" {
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
const Opaque = @Type(.{
|
||||
.@"opaque" = .{
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const Opaque = opaque {};
|
||||
try testing.expect(Opaque != opaque {});
|
||||
try testing.expectEqualSlices(
|
||||
Type.Declaration,
|
||||
@ -218,52 +128,17 @@ test "Type.Opaque" {
|
||||
);
|
||||
}
|
||||
|
||||
test "Type.Vector" {
|
||||
try testTypes(&[_]type{
|
||||
@Vector(0, u8),
|
||||
@Vector(4, u8),
|
||||
@Vector(8, *u8),
|
||||
@Vector(0, u8),
|
||||
@Vector(4, u8),
|
||||
@Vector(8, *u8),
|
||||
});
|
||||
}
|
||||
|
||||
test "Type.AnyFrame" {
|
||||
if (true) {
|
||||
// https://github.com/ziglang/zig/issues/6025
|
||||
return error.SkipZigTest;
|
||||
}
|
||||
|
||||
try testTypes(&[_]type{
|
||||
anyframe,
|
||||
anyframe->u8,
|
||||
anyframe->anyframe->u8,
|
||||
});
|
||||
}
|
||||
|
||||
fn add(a: i32, b: i32) i32 {
|
||||
return a + b;
|
||||
}
|
||||
|
||||
test "Type.ErrorSet" {
|
||||
try testing.expect(@Type(.{ .error_set = null }) == anyerror);
|
||||
|
||||
// error sets don't compare equal so just check if they compile
|
||||
inline for (.{ error{}, error{A}, error{ A, B, C } }) |T| {
|
||||
const info = @typeInfo(T);
|
||||
const T2 = @Type(info);
|
||||
try testing.expect(T == T2);
|
||||
}
|
||||
}
|
||||
|
||||
test "Type.Struct" {
|
||||
if (builtin.zig_backend == .stage2_aarch64) return error.SkipZigTest;
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_sparc64) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
const A = @Type(@typeInfo(struct { x: u8, y: u32 }));
|
||||
const A = @Struct(.auto, null, &.{ "x", "y" }, &.{ u8, u32 }, &@splat(.{}));
|
||||
const infoA = @typeInfo(A).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.auto, infoA.layout);
|
||||
try testing.expectEqualSlices(u8, "x", infoA.fields[0].name);
|
||||
@ -281,7 +156,13 @@ test "Type.Struct" {
|
||||
a.y += 1;
|
||||
try testing.expectEqual(@as(u32, 2), a.y);
|
||||
|
||||
const B = @Type(@typeInfo(extern struct { x: u8, y: u32 = 5 }));
|
||||
const B = @Struct(
|
||||
.@"extern",
|
||||
null,
|
||||
&.{ "x", "y" },
|
||||
&.{ u8, u32 },
|
||||
&.{ .{}, .{ .default_value_ptr = &@as(u32, 5) } },
|
||||
);
|
||||
const infoB = @typeInfo(B).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.@"extern", infoB.layout);
|
||||
try testing.expectEqualSlices(u8, "x", infoB.fields[0].name);
|
||||
@ -293,7 +174,16 @@ test "Type.Struct" {
|
||||
try testing.expectEqual(@as(usize, 0), infoB.decls.len);
|
||||
try testing.expectEqual(@as(bool, false), infoB.is_tuple);
|
||||
|
||||
const C = @Type(@typeInfo(packed struct { x: u8 = 3, y: u32 = 5 }));
|
||||
const C = @Struct(
|
||||
.@"packed",
|
||||
null,
|
||||
&.{ "x", "y" },
|
||||
&.{ u8, u32 },
|
||||
&.{
|
||||
.{ .default_value_ptr = &@as(u8, 3) },
|
||||
.{ .default_value_ptr = &@as(u32, 5) },
|
||||
},
|
||||
);
|
||||
const infoC = @typeInfo(C).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.@"packed", infoC.layout);
|
||||
try testing.expectEqualSlices(u8, "x", infoC.fields[0].name);
|
||||
@ -305,76 +195,23 @@ test "Type.Struct" {
|
||||
try testing.expectEqual(@as(usize, 0), infoC.decls.len);
|
||||
try testing.expectEqual(@as(bool, false), infoC.is_tuple);
|
||||
|
||||
// anon structs
|
||||
const D = @Type(@typeInfo(@TypeOf(.{ .x = 3, .y = 5 })));
|
||||
const infoD = @typeInfo(D).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.auto, infoD.layout);
|
||||
try testing.expectEqualSlices(u8, "x", infoD.fields[0].name);
|
||||
try testing.expectEqual(comptime_int, infoD.fields[0].type);
|
||||
try testing.expectEqual(@as(comptime_int, 3), infoD.fields[0].defaultValue().?);
|
||||
try testing.expectEqualSlices(u8, "y", infoD.fields[1].name);
|
||||
try testing.expectEqual(comptime_int, infoD.fields[1].type);
|
||||
try testing.expectEqual(@as(comptime_int, 5), infoD.fields[1].defaultValue().?);
|
||||
try testing.expectEqual(@as(usize, 0), infoD.decls.len);
|
||||
try testing.expectEqual(@as(bool, false), infoD.is_tuple);
|
||||
|
||||
// tuples
|
||||
const E = @Type(@typeInfo(@TypeOf(.{ 1, 2 })));
|
||||
const infoE = @typeInfo(E).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.auto, infoE.layout);
|
||||
try testing.expectEqualSlices(u8, "0", infoE.fields[0].name);
|
||||
try testing.expectEqual(comptime_int, infoE.fields[0].type);
|
||||
try testing.expectEqual(@as(comptime_int, 1), infoE.fields[0].defaultValue().?);
|
||||
try testing.expectEqualSlices(u8, "1", infoE.fields[1].name);
|
||||
try testing.expectEqual(comptime_int, infoE.fields[1].type);
|
||||
try testing.expectEqual(@as(comptime_int, 2), infoE.fields[1].defaultValue().?);
|
||||
try testing.expectEqual(@as(usize, 0), infoE.decls.len);
|
||||
try testing.expectEqual(@as(bool, true), infoE.is_tuple);
|
||||
|
||||
// empty struct
|
||||
const F = @Type(@typeInfo(struct {}));
|
||||
const F = @Struct(.auto, null, &.{}, &.{}, &.{});
|
||||
const infoF = @typeInfo(F).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.auto, infoF.layout);
|
||||
try testing.expect(infoF.fields.len == 0);
|
||||
try testing.expectEqual(@as(bool, false), infoF.is_tuple);
|
||||
|
||||
// empty tuple
|
||||
const G = @Type(@typeInfo(@TypeOf(.{})));
|
||||
const infoG = @typeInfo(G).@"struct";
|
||||
try testing.expectEqual(Type.ContainerLayout.auto, infoG.layout);
|
||||
try testing.expect(infoG.fields.len == 0);
|
||||
try testing.expectEqual(@as(bool, true), infoG.is_tuple);
|
||||
}
|
||||
|
||||
test "Type.Enum" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
const Foo = @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u8,
|
||||
.fields = &.{
|
||||
.{ .name = "a", .value = 1 },
|
||||
.{ .name = "b", .value = 5 },
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
const Foo = @Enum(u8, .exhaustive, &.{ "a", "b" }, &.{ 1, 5 });
|
||||
try testing.expectEqual(true, @typeInfo(Foo).@"enum".is_exhaustive);
|
||||
try testing.expectEqual(@as(u8, 1), @intFromEnum(Foo.a));
|
||||
try testing.expectEqual(@as(u8, 5), @intFromEnum(Foo.b));
|
||||
const Bar = @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u32,
|
||||
.fields = &.{
|
||||
.{ .name = "a", .value = 1 },
|
||||
.{ .name = "b", .value = 5 },
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = false,
|
||||
},
|
||||
});
|
||||
const Bar = @Enum(u32, .nonexhaustive, &.{ "a", "b" }, &.{ 1, 5 });
|
||||
try testing.expectEqual(false, @typeInfo(Bar).@"enum".is_exhaustive);
|
||||
try testing.expectEqual(@as(u32, 1), @intFromEnum(Bar.a));
|
||||
try testing.expectEqual(@as(u32, 5), @intFromEnum(Bar.b));
|
||||
@ -382,12 +219,7 @@ test "Type.Enum" {
|
||||
|
||||
{ // from https://github.com/ziglang/zig/issues/19985
|
||||
{ // enum with single field can be initialized.
|
||||
const E = @Type(.{ .@"enum" = .{
|
||||
.tag_type = u0,
|
||||
.is_exhaustive = true,
|
||||
.fields = &.{.{ .name = "foo", .value = 0 }},
|
||||
.decls = &.{},
|
||||
} });
|
||||
const E = @Enum(u0, .exhaustive, &.{"foo"}, &.{0});
|
||||
const s: struct { E } = .{.foo};
|
||||
try testing.expectEqual(.foo, s[0]);
|
||||
}
|
||||
@ -411,60 +243,20 @@ test "Type.Union" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
if (builtin.zig_backend == .stage2_spirv) return error.SkipZigTest;
|
||||
|
||||
const Untagged = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .@"extern",
|
||||
.tag_type = null,
|
||||
.fields = &.{
|
||||
.{ .name = "int", .type = i32, .alignment = @alignOf(f32) },
|
||||
.{ .name = "float", .type = f32, .alignment = @alignOf(f32) },
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const Untagged = @Union(.@"extern", null, &.{ "int", "float" }, &.{ i32, f32 }, &.{ .{}, .{} });
|
||||
var untagged = Untagged{ .int = 1 };
|
||||
untagged.float = 2.0;
|
||||
untagged.int = 3;
|
||||
try testing.expectEqual(@as(i32, 3), untagged.int);
|
||||
|
||||
const PackedUntagged = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .@"packed",
|
||||
.tag_type = null,
|
||||
.fields = &.{
|
||||
.{ .name = "signed", .type = i32, .alignment = 0 },
|
||||
.{ .name = "unsigned", .type = u32, .alignment = 0 },
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const PackedUntagged = @Union(.@"packed", null, &.{ "signed", "unsigned" }, &.{ i32, u32 }, &.{ .{}, .{} });
|
||||
var packed_untagged: PackedUntagged = .{ .signed = -1 };
|
||||
_ = &packed_untagged;
|
||||
try testing.expectEqual(@as(i32, -1), packed_untagged.signed);
|
||||
try testing.expectEqual(~@as(u32, 0), packed_untagged.unsigned);
|
||||
|
||||
const Tag = @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u1,
|
||||
.fields = &.{
|
||||
.{ .name = "signed", .value = 0 },
|
||||
.{ .name = "unsigned", .value = 1 },
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
const Tagged = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = Tag,
|
||||
.fields = &.{
|
||||
.{ .name = "signed", .type = i32, .alignment = @alignOf(i32) },
|
||||
.{ .name = "unsigned", .type = u32, .alignment = @alignOf(u32) },
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const Tag = @Enum(u1, .exhaustive, &.{ "signed", "unsigned" }, &.{ 0, 1 });
|
||||
const Tagged = @Union(.auto, Tag, &.{ "signed", "unsigned" }, &.{ i32, u32 }, &.{ .{}, .{} });
|
||||
var tagged = Tagged{ .signed = -1 };
|
||||
try testing.expectEqual(Tag.signed, @as(Tag, tagged));
|
||||
tagged = .{ .unsigned = 1 };
|
||||
@ -472,74 +264,26 @@ test "Type.Union" {
|
||||
}
|
||||
|
||||
test "Type.Union from Type.Enum" {
|
||||
const Tag = @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u0,
|
||||
.fields = &.{
|
||||
.{ .name = "working_as_expected", .value = 0 },
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
const T = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = Tag,
|
||||
.fields = &.{
|
||||
.{ .name = "working_as_expected", .type = u32, .alignment = @alignOf(u32) },
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const Tag = @Enum(u0, .exhaustive, &.{"working_as_expected"}, &.{0});
|
||||
const T = @Union(.auto, Tag, &.{"working_as_expected"}, &.{u32}, &.{.{}});
|
||||
_ = @typeInfo(T).@"union";
|
||||
}
|
||||
|
||||
test "Type.Union from regular enum" {
|
||||
const E = enum { working_as_expected };
|
||||
const T = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = E,
|
||||
.fields = &.{
|
||||
.{ .name = "working_as_expected", .type = u32, .alignment = @alignOf(u32) },
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const T = @Union(.auto, E, &.{"working_as_expected"}, &.{u32}, &.{.{}});
|
||||
_ = @typeInfo(T).@"union";
|
||||
}
|
||||
|
||||
test "Type.Union from empty regular enum" {
|
||||
const E = enum {};
|
||||
const U = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = E,
|
||||
.fields = &.{},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const U = @Union(.auto, E, &.{}, &.{}, &.{});
|
||||
try testing.expectEqual(@sizeOf(U), 0);
|
||||
}
|
||||
|
||||
test "Type.Union from empty Type.Enum" {
|
||||
const E = @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u0,
|
||||
.fields = &.{},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
},
|
||||
});
|
||||
const U = @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = E,
|
||||
.fields = &.{},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
const E = @Enum(u0, .exhaustive, &.{}, &.{});
|
||||
const U = @Union(.auto, E, &.{}, &.{}, &.{});
|
||||
try testing.expectEqual(@sizeOf(U), 0);
|
||||
}
|
||||
|
||||
@ -548,47 +292,22 @@ test "Type.Fn" {
|
||||
|
||||
const some_opaque = opaque {};
|
||||
const some_ptr = *some_opaque;
|
||||
const T = fn (c_int, some_ptr) callconv(.c) void;
|
||||
|
||||
{
|
||||
const fn_info = std.builtin.Type{ .@"fn" = .{
|
||||
.calling_convention = .c,
|
||||
.is_generic = false,
|
||||
.is_var_args = false,
|
||||
.return_type = void,
|
||||
.params = &.{
|
||||
.{ .is_generic = false, .is_noalias = false, .type = c_int },
|
||||
.{ .is_generic = false, .is_noalias = false, .type = some_ptr },
|
||||
},
|
||||
} };
|
||||
const A = @Fn(&.{ c_int, some_ptr }, &@splat(.{}), void, .{ .@"callconv" = .c });
|
||||
comptime assert(A == fn (c_int, some_ptr) callconv(.c) void);
|
||||
|
||||
const fn_type = @Type(fn_info);
|
||||
try std.testing.expectEqual(T, fn_type);
|
||||
}
|
||||
const B = @Fn(&.{ c_int, some_ptr, u32 }, &.{ .{}, .{ .@"noalias" = true }, .{} }, u64, .{});
|
||||
comptime assert(B == fn (c_int, noalias some_ptr, u32) u64);
|
||||
|
||||
{
|
||||
const fn_info = @typeInfo(T);
|
||||
const fn_type = @Type(fn_info);
|
||||
try std.testing.expectEqual(T, fn_type);
|
||||
}
|
||||
const C = @Fn(&.{?[*]u8}, &.{.{}}, *const anyopaque, .{ .@"callconv" = .c, .varargs = true });
|
||||
comptime assert(C == fn (?[*]u8, ...) callconv(.c) *const anyopaque);
|
||||
}
|
||||
|
||||
test "reified struct field name from optional payload" {
|
||||
comptime {
|
||||
const m_name: ?[1:0]u8 = "a".*;
|
||||
if (m_name) |*name| {
|
||||
const T = @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &.{.{
|
||||
.name = name,
|
||||
.type = u8,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = 1,
|
||||
}},
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
const T = @Struct(.auto, null, &.{name}, &.{u8}, &.{.{}});
|
||||
const t: T = .{ .a = 123 };
|
||||
try std.testing.expect(t.a == 123);
|
||||
}
|
||||
@ -598,20 +317,7 @@ test "reified struct field name from optional payload" {
|
||||
test "reified union uses @alignOf" {
|
||||
const S = struct {
|
||||
fn CreateUnion(comptime T: type) type {
|
||||
return @Type(.{
|
||||
.@"union" = .{
|
||||
.layout = .auto,
|
||||
.tag_type = null,
|
||||
.fields = &[_]std.builtin.Type.UnionField{
|
||||
.{
|
||||
.name = "field",
|
||||
.type = T,
|
||||
.alignment = @alignOf(T),
|
||||
},
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
return @Union(.auto, null, &.{"field"}, &.{T}, &.{.{}});
|
||||
}
|
||||
};
|
||||
_ = S.CreateUnion(struct {});
|
||||
@ -620,22 +326,13 @@ test "reified union uses @alignOf" {
|
||||
test "reified struct uses @alignOf" {
|
||||
const S = struct {
|
||||
fn NamespacedGlobals(comptime modules: anytype) type {
|
||||
return @Type(.{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.is_tuple = false,
|
||||
.fields = &.{
|
||||
.{
|
||||
.name = "globals",
|
||||
.type = modules.mach.globals,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(modules.mach.globals),
|
||||
},
|
||||
},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
return @Struct(
|
||||
.auto,
|
||||
null,
|
||||
&.{"globals"},
|
||||
&.{modules.mach.globals},
|
||||
&.{.{ .@"align" = @alignOf(modules.mach.globals) }},
|
||||
);
|
||||
}
|
||||
};
|
||||
_ = S.NamespacedGlobals(.{
|
||||
@ -645,56 +342,10 @@ test "reified struct uses @alignOf" {
|
||||
});
|
||||
}
|
||||
|
||||
test "reified error set initialized with field pointer" {
|
||||
const S = struct {
|
||||
const info = .{
|
||||
.args = [_]Type.Error{
|
||||
.{ .name = "bar" },
|
||||
},
|
||||
};
|
||||
const Foo = @Type(.{
|
||||
.error_set = &info.args,
|
||||
});
|
||||
};
|
||||
try testing.expect(S.Foo == error{bar});
|
||||
}
|
||||
test "reified function type params initialized with field pointer" {
|
||||
const S = struct {
|
||||
const fn_info = .{
|
||||
.params = [_]Type.Fn.Param{
|
||||
.{ .is_generic = false, .is_noalias = false, .type = u8 },
|
||||
},
|
||||
};
|
||||
const Bar = @Type(.{
|
||||
.@"fn" = .{
|
||||
.calling_convention = .auto,
|
||||
.is_generic = false,
|
||||
.is_var_args = false,
|
||||
.return_type = void,
|
||||
.params = &fn_info.params,
|
||||
},
|
||||
});
|
||||
};
|
||||
try testing.expect(@typeInfo(S.Bar) == .@"fn");
|
||||
}
|
||||
|
||||
test "empty struct assigned to reified struct field" {
|
||||
const S = struct {
|
||||
fn NamespacedComponents(comptime modules: anytype) type {
|
||||
return @Type(.{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.is_tuple = false,
|
||||
.fields = &.{.{
|
||||
.name = "components",
|
||||
.type = @TypeOf(modules.components),
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = @alignOf(@TypeOf(modules.components)),
|
||||
}},
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
return @Struct(.auto, null, &.{"components"}, &.{@TypeOf(modules.components)}, &.{.{}});
|
||||
}
|
||||
|
||||
fn namespacedComponents(comptime modules: anytype) NamespacedComponents(modules) {
|
||||
@ -710,16 +361,6 @@ test "empty struct assigned to reified struct field" {
|
||||
});
|
||||
}
|
||||
|
||||
test "@Type should resolve its children types" {
|
||||
const sparse = enum(u2) { a, b, c };
|
||||
const dense = enum(u2) { a, b, c, d };
|
||||
|
||||
comptime var sparse_info = @typeInfo(anyerror!sparse);
|
||||
sparse_info.error_union.payload = dense;
|
||||
const B = @Type(sparse_info);
|
||||
try testing.expectEqual(anyerror!dense, B);
|
||||
}
|
||||
|
||||
test "struct field names sliced at comptime from larger string" {
|
||||
if (builtin.zig_backend == .stage2_arm) return error.SkipZigTest; // TODO
|
||||
|
||||
@ -729,28 +370,14 @@ test "struct field names sliced at comptime from larger string" {
|
||||
\\f3
|
||||
;
|
||||
comptime {
|
||||
var fields: []const Type.StructField = &[0]Type.StructField{};
|
||||
var field_names: []const []const u8 = &.{};
|
||||
|
||||
var it = std.mem.tokenizeScalar(u8, text, '\n');
|
||||
while (it.next()) |name| {
|
||||
fields = fields ++ &[_]Type.StructField{.{
|
||||
.alignment = @alignOf(usize),
|
||||
.name = name ++ "",
|
||||
.type = usize,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
}};
|
||||
field_names = field_names ++ @as([]const []const u8, &.{name});
|
||||
}
|
||||
|
||||
const T = @Type(.{
|
||||
.@"struct" = .{
|
||||
.layout = .auto,
|
||||
.is_tuple = false,
|
||||
.fields = fields,
|
||||
.decls = &.{},
|
||||
},
|
||||
});
|
||||
|
||||
const T = @Struct(.auto, null, field_names, &@splat(usize), &@splat(.{}));
|
||||
const gen_fields = @typeInfo(T).@"struct".fields;
|
||||
try testing.expectEqual(3, gen_fields.len);
|
||||
try testing.expectEqualStrings("f1", gen_fields[0].name);
|
||||
@ -762,10 +389,7 @@ test "struct field names sliced at comptime from larger string" {
|
||||
test "matching captures causes opaque equivalence" {
|
||||
const S = struct {
|
||||
fn UnsignedId(comptime I: type) type {
|
||||
const U = @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = @typeInfo(I).int.bits,
|
||||
} });
|
||||
const U = @Int(.unsigned, @typeInfo(I).int.bits);
|
||||
return opaque {
|
||||
fn id(x: U) U {
|
||||
return x;
|
||||
@ -785,17 +409,9 @@ test "matching captures causes opaque equivalence" {
|
||||
}
|
||||
|
||||
test "reify enum where fields refers to part of array" {
|
||||
const fields: [3]std.builtin.Type.EnumField = .{
|
||||
.{ .name = "foo", .value = 0 },
|
||||
.{ .name = "bar", .value = 1 },
|
||||
undefined,
|
||||
};
|
||||
const E = @Type(.{ .@"enum" = .{
|
||||
.tag_type = u8,
|
||||
.fields = fields[0..2],
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
} });
|
||||
const field_names: [3][]const u8 = .{ "foo", "bar", undefined };
|
||||
const field_values: [3]u8 = .{ undefined, 0, 1 };
|
||||
const E = @Enum(u8, .exhaustive, field_names[0..2], field_values[1..3]);
|
||||
var a: E = undefined;
|
||||
var b: E = undefined;
|
||||
a = .foo;
|
||||
|
||||
@ -2198,14 +2198,8 @@ test "matching captures causes union equivalence" {
|
||||
fn SignedUnsigned(comptime I: type) type {
|
||||
const bits = @typeInfo(I).int.bits;
|
||||
return union {
|
||||
u: @Type(.{ .int = .{
|
||||
.signedness = .unsigned,
|
||||
.bits = bits,
|
||||
} }),
|
||||
i: @Type(.{ .int = .{
|
||||
.signedness = .signed,
|
||||
.bits = bits,
|
||||
} }),
|
||||
u: @Int(.unsigned, bits),
|
||||
i: @Int(.signed, bits),
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
@ -36,34 +36,28 @@ pub fn ChangeScalar(comptime Type: type, comptime NewScalar: type) type {
|
||||
}
|
||||
pub fn AsSignedness(comptime Type: type, comptime signedness: std.builtin.Signedness) type {
|
||||
return switch (@typeInfo(Scalar(Type))) {
|
||||
.int => |int| ChangeScalar(Type, @Type(.{ .int = .{
|
||||
.signedness = signedness,
|
||||
.bits = int.bits,
|
||||
} })),
|
||||
.int => |int| ChangeScalar(Type, @Int(signedness, int.bits)),
|
||||
.float => Type,
|
||||
else => @compileError(@typeName(Type)),
|
||||
};
|
||||
}
|
||||
pub fn AddOneBit(comptime Type: type) type {
|
||||
return ChangeScalar(Type, switch (@typeInfo(Scalar(Type))) {
|
||||
.int => |int| @Type(.{ .int = .{ .signedness = int.signedness, .bits = 1 + int.bits } }),
|
||||
.int => |int| @Int(int.signedness, 1 + int.bits),
|
||||
.float => Scalar(Type),
|
||||
else => @compileError(@typeName(Type)),
|
||||
});
|
||||
}
|
||||
pub fn DoubleBits(comptime Type: type) type {
|
||||
return ChangeScalar(Type, switch (@typeInfo(Scalar(Type))) {
|
||||
.int => |int| @Type(.{ .int = .{ .signedness = int.signedness, .bits = int.bits * 2 } }),
|
||||
.int => |int| @Int(int.signedness, int.bits * 2),
|
||||
.float => Scalar(Type),
|
||||
else => @compileError(@typeName(Type)),
|
||||
});
|
||||
}
|
||||
pub fn RoundBitsUp(comptime Type: type, comptime multiple: u16) type {
|
||||
return ChangeScalar(Type, switch (@typeInfo(Scalar(Type))) {
|
||||
.int => |int| @Type(.{ .int = .{
|
||||
.signedness = int.signedness,
|
||||
.bits = std.mem.alignForward(u16, int.bits, multiple),
|
||||
} }),
|
||||
.int => |int| @Int(int.signedness, std.mem.alignForward(u16, int.bits, multiple)),
|
||||
.float => Scalar(Type),
|
||||
else => @compileError(@typeName(Type)),
|
||||
});
|
||||
@ -83,10 +77,7 @@ pub fn splat(comptime Type: type, scalar: Scalar(Type)) Type {
|
||||
pub fn sign(rhs: anytype) ChangeScalar(@TypeOf(rhs), bool) {
|
||||
const Int = ChangeScalar(@TypeOf(rhs), switch (@typeInfo(Scalar(@TypeOf(rhs)))) {
|
||||
.int, .comptime_int => Scalar(@TypeOf(rhs)),
|
||||
.float => |float| @Type(.{ .int = .{
|
||||
.signedness = .signed,
|
||||
.bits = float.bits,
|
||||
} }),
|
||||
.float => |float| @Int(.signed, float.bits),
|
||||
else => @compileError(@typeName(@TypeOf(rhs))),
|
||||
});
|
||||
return @as(Int, @bitCast(rhs)) < splat(Int, 0);
|
||||
|
||||
@ -116,7 +116,7 @@ export fn testMutablePointer() void {
|
||||
// tmp.zig:85:26: note: ZON does not allow nested optionals
|
||||
// tmp.zig:90:29: error: type '*i32' is not available in ZON
|
||||
// tmp.zig:90:29: note: ZON does not allow mutable pointers
|
||||
// neg_inf.zon:1:1: error: expected type '@Type(.enum_literal)'
|
||||
// neg_inf.zon:1:1: error: expected type '@EnumLiteral()'
|
||||
// tmp.zig:37:38: note: imported here
|
||||
// neg_inf.zon:1:1: error: expected type '?u8'
|
||||
// tmp.zig:57:28: note: imported here
|
||||
|
||||
@ -70,7 +70,7 @@ export fn testVector() void {
|
||||
// tmp.zig:22:29: note: imported here
|
||||
// vec2.zon:1:2: error: expected type '?tmp.Enum'
|
||||
// tmp.zig:28:30: note: imported here
|
||||
// vec2.zon:1:2: error: expected type '?@Type(.enum_literal)'
|
||||
// vec2.zon:1:2: error: expected type '?@EnumLiteral()'
|
||||
// tmp.zig:33:39: note: imported here
|
||||
// vec2.zon:1:2: error: expected type '?[1]u8'
|
||||
// tmp.zig:38:31: note: imported here
|
||||
|
||||
@ -38,31 +38,11 @@ export fn i() void {
|
||||
}
|
||||
|
||||
export fn j() void {
|
||||
_ = @Type(.{ .@"struct" = .{
|
||||
.layout = .auto,
|
||||
.fields = &.{.{
|
||||
.name = "test",
|
||||
.type = u32,
|
||||
.default_value_ptr = null,
|
||||
.is_comptime = false,
|
||||
.alignment = 0,
|
||||
}},
|
||||
.decls = &.{},
|
||||
.is_tuple = false,
|
||||
} });
|
||||
_ = @Struct(.auto, null, &.{"test"}, &.{u32}, &.{.{ .@"align" = 0 }});
|
||||
}
|
||||
|
||||
export fn k() void {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .one,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = 0,
|
||||
.address_space = .generic,
|
||||
.child = u32,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = null,
|
||||
} });
|
||||
_ = @Pointer(.one, .{ .@"align" = 0 }, u32, null);
|
||||
}
|
||||
|
||||
// error
|
||||
@ -76,5 +56,5 @@ export fn k() void {
|
||||
// :29:17: error: alignment must be >= 1
|
||||
// :33:35: error: alignment must be >= 1
|
||||
// :37:34: error: alignment must be >= 1
|
||||
// :41:9: error: alignment must be >= 1
|
||||
// :56:9: error: alignment must be >= 1
|
||||
// :41:51: error: alignment must be >= 1
|
||||
// :45:25: error: alignment must be >= 1
|
||||
|
||||
@ -6,4 +6,4 @@ export fn entry() void {
|
||||
|
||||
// error
|
||||
//
|
||||
// :3:10: error: expected type 'error{Hi}', found '@Type(.enum_literal)'
|
||||
// :3:10: error: expected type 'error{Hi}', found '@EnumLiteral()'
|
||||
|
||||
@ -1,8 +0,0 @@
|
||||
const builtin = @import("std").builtin;
|
||||
comptime {
|
||||
_ = @Type(.{ .float = .{ .bits = 17 } });
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :3:9: error: 17-bit float unsupported
|
||||
@ -1,10 +0,0 @@
|
||||
export fn entry() void {
|
||||
_ = @Type(@typeInfo(enum {
|
||||
foo,
|
||||
pub const bar = 1;
|
||||
}));
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :2:9: error: reified enums must have no decls
|
||||
@ -6,4 +6,4 @@ export fn entry() void {
|
||||
|
||||
// error
|
||||
//
|
||||
// :3:19: error: expected type 'error{Foo}', found '@Type(.enum_literal)'
|
||||
// :3:19: error: expected type 'error{Foo}', found '@EnumLiteral()'
|
||||
|
||||
@ -9,40 +9,13 @@ export fn c() void {
|
||||
}
|
||||
|
||||
export fn d() void {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .slice,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = 1,
|
||||
.address_space = .generic,
|
||||
.child = anyopaque,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = null,
|
||||
} });
|
||||
_ = @Pointer(.slice, .{}, anyopaque, null);
|
||||
}
|
||||
export fn e() void {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .many,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = 1,
|
||||
.address_space = .generic,
|
||||
.child = anyopaque,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = null,
|
||||
} });
|
||||
_ = @Pointer(.many, .{}, anyopaque, null);
|
||||
}
|
||||
export fn f() void {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .c,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = 1,
|
||||
.address_space = .generic,
|
||||
.child = anyopaque,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = null,
|
||||
} });
|
||||
_ = @Pointer(.c, .{}, anyopaque, null);
|
||||
}
|
||||
|
||||
// error
|
||||
@ -51,5 +24,5 @@ export fn f() void {
|
||||
// :5:12: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
// :8:13: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
// :12:9: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
// :24:9: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
// :36:9: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
// :15:9: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
// :18:9: error: indexable pointer to opaque type 'anyopaque' not allowed
|
||||
|
||||
@ -1,16 +1,7 @@
|
||||
export fn entry() void {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .one,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = 1,
|
||||
.address_space = .generic,
|
||||
.child = u8,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = &@as(u8, 0),
|
||||
} });
|
||||
_ = @Pointer(.one, .{}, u8, 0);
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :2:9: error: sentinels are only allowed on slices and unknown-length pointers
|
||||
// :2:33: error: sentinels are only allowed on slices and unknown-length pointers
|
||||
|
||||
@ -36,4 +36,4 @@ const Union = union { foo: void };
|
||||
// :13:29: error: expected number, found 'tmp.Union'
|
||||
// :19:15: note: union declared here
|
||||
// :14:61: error: expected number, found 'fn () u8'
|
||||
// :15:25: error: expected number, found '@Type(.enum_literal)'
|
||||
// :15:25: error: expected number, found '@EnumLiteral()'
|
||||
|
||||
@ -1,10 +1,10 @@
|
||||
export fn entry() void {
|
||||
const V1 = @Vector(4, u8);
|
||||
const V2 = @Type(.{ .vector = .{ .len = 4, .child = V1 } });
|
||||
const V2 = @Vector(4, V1);
|
||||
const v: V2 = undefined;
|
||||
_ = v;
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :3:16: error: expected integer, float, bool, or pointer for the vector element type; found '@Vector(4, u8)'
|
||||
// :3:27: error: expected integer, float, bool, or pointer for the vector element type; found '@Vector(4, u8)'
|
||||
|
||||
@ -14,4 +14,4 @@ export fn entry() usize {
|
||||
//
|
||||
// :6:12: error: unable to resolve comptime value
|
||||
// :2:12: note: called at comptime from here
|
||||
// :1:13: note: struct fields must be comptime-known
|
||||
// :1:13: note: types must be comptime-known
|
||||
|
||||
@ -12,31 +12,10 @@ comptime {
|
||||
}
|
||||
|
||||
comptime {
|
||||
_ = @Type(.{ .array = .{ .child = S, .len = 0, .sentinel_ptr = &sentinel } });
|
||||
_ = @Pointer(.slice, .{}, S, sentinel);
|
||||
}
|
||||
comptime {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .slice,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = @alignOf(S),
|
||||
.address_space = .generic,
|
||||
.child = S,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = &sentinel,
|
||||
} });
|
||||
}
|
||||
comptime {
|
||||
_ = @Type(.{ .pointer = .{
|
||||
.size = .many,
|
||||
.is_const = false,
|
||||
.is_volatile = false,
|
||||
.alignment = @alignOf(S),
|
||||
.address_space = .generic,
|
||||
.child = S,
|
||||
.is_allowzero = false,
|
||||
.sentinel_ptr = &sentinel,
|
||||
} });
|
||||
_ = @Pointer(.many, .{}, S, sentinel);
|
||||
}
|
||||
|
||||
// error
|
||||
@ -47,9 +26,7 @@ comptime {
|
||||
// :1:11: note: struct declared here
|
||||
// :11:12: error: non-scalar sentinel type 'tmp.S'
|
||||
// :1:11: note: struct declared here
|
||||
// :15:9: error: non-scalar sentinel type 'tmp.S'
|
||||
// :15:34: error: non-scalar sentinel type 'tmp.S'
|
||||
// :1:11: note: struct declared here
|
||||
// :18:9: error: non-scalar sentinel type 'tmp.S'
|
||||
// :1:11: note: struct declared here
|
||||
// :30:9: error: non-scalar sentinel type 'tmp.S'
|
||||
// :18:33: error: non-scalar sentinel type 'tmp.S'
|
||||
// :1:11: note: struct declared here
|
||||
|
||||
@ -1,17 +1,8 @@
|
||||
comptime {
|
||||
const E = @Type(.{ .@"enum" = .{
|
||||
.tag_type = u1,
|
||||
.fields = &.{
|
||||
.{ .name = "f0", .value = 0 },
|
||||
.{ .name = "f1", .value = 1 },
|
||||
.{ .name = "f2", .value = 2 },
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = true,
|
||||
} });
|
||||
const E = @Enum(u1, .exhaustive, &.{ "f0", "f1", "f2" }, &.{ 0, 1, 2 });
|
||||
_ = E;
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :2:15: error: field 'f2' with enumeration value '2' is too large for backing int type 'u1'
|
||||
// :2:72: error: type 'u1' cannot represent integer value '2'
|
||||
|
||||
@ -1,18 +1,8 @@
|
||||
export fn entry() void {
|
||||
_ = @Type(.{
|
||||
.@"enum" = .{
|
||||
.tag_type = u32,
|
||||
.fields = &.{
|
||||
.{ .name = "A", .value = 0 },
|
||||
.{ .name = "A", .value = 1 },
|
||||
},
|
||||
.decls = &.{},
|
||||
.is_exhaustive = false,
|
||||
},
|
||||
});
|
||||
_ = @Enum(u32, .nonexhaustive, &.{ "A", "A" }, &.{ 0, 1 });
|
||||
}
|
||||
|
||||
// error
|
||||
//
|
||||
// :2:9: error: duplicate enum field 'A'
|
||||
// :2:9: note: other field here
|
||||
// :2:36: error: duplicate enum field 'A'
|
||||
// :2:36: note: other field here
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user