-
-
- - Index - -
- - - -
How to search effectively
-How To Search Effectively
-Matching
--
-
- Search is case-insensitive by default. -
- Using uppercase letters in your query will make the search - case-sensitive. -
- Given
ArrayListUnmanaged: --
-
- the following search terms (and their prefixes) will match:
-
-
-
array
- list
- unmanaged
-
- - the following search terms will NOT match:
-
-
-
stun
- ray
- managed
-
-
- - the following search terms (and their prefixes) will match:
-
- More precisely, the search system is based on a Radix Tree. The Radix Tree contains full decl names plus some suffixes, split by following the official style guide (e.g.
HashMapUnmanagedalso producesMapUnmanagedandUnmanaged, same with snake_case and camelCase names).
-
Multiple terms
- --
-
- When a search query contains multiple terms, order doesn't matter when
- all terms match within a single decl name (e.g. "map auto" will match
AutoHashMap).
- - Query term order does matter when matching different decls alognside
- a path (e.g. "js parse" matching
std.json.parse), in which - case the order of the terms will determine whether the match goes above or - below the "other results" line.
- - As an example, "fs create" will put above the line all things related to the creation of files and directories inside of `std.fs`, while still showing (but below the line) matches from `std.Bulild`. -
- As another example, "fs windows" will prioritize windows-related results in `std.fs`, while "windows fs" will prioritize "fs"-related results in `std.windows`. -
- This means that if you're searching inside a target namespace, you never have to read below the "other results" line. -
- Since matching doesn't have to be perfect, you can also target a group of namespaces to search into. For example "array orderedremove" will show you all "Array-" namespaces that support
orderedRemove.
- - Periods are replaced by spaces because the Radix Tree doesn't index full paths, and in practice you should expect the match scoring system to consistently give you what you're looking for even when your query path is split into multiple terms. -
Search Results
-No Results Found
-Here are some things you can try:
--
-
- Check out the Language Reference for the language itself. -
- Check out the Learn page for other helpful resources for learning Zig. -
- Use your search engine. -
Press ? to see keyboard shortcuts and Esc to return.
-Table of Contents
-Loading...
-This function is not tested or referenced.
-- This declaration is not tested or referenced, and it has therefore not been included in - semantic analysis, which means the only documentation available is whatever is in the - doc comments. -
-Parameters
- -Errors
-anyerror means the error set is known only at runtime.
-Fields
- -Namespaces
-Other Namespaces ⓘ
- -Types
-Global Variables
-Functions
-Values
-Error Sets
-Examples
-Usage Examples ⓘ
- -Tests
-Loading...
+[src]
+
+ Parameters
+Keyboard Shortcuts
-- ?
- Toggle this help modal
- Focus the search field
- ↑
- Move up in search results
- ↓
- Move down in search results
- ⏎
- Go to active search result
- p
- Open preferences
- Esc
- Clear focus; close this modal
Errors
+anyerror means the error set is known only at runtime.
+Search Results
+No Results Found
+Press escape to exit search and then '?' to see more options.
+Fields
+Preferences
--
-
-
Types
+-
+
Namespaces
+-
+
Global Variables
+Values
+Functions
+-
+
Error Sets
+-
+
Example Usage
+
+ Source Code
+
+ Keyboard Shortcuts
+- ?
- Show this help dialog
- Esc
- Clear focus; close this dialog
- s
- Focus the search field
- u
- Go to source code
- ↑
- Move up in search results
- ↓
- Move down in search results
- ⏎
- Go to active search result
" + section.name + "
"; - } - for (let guide of section.guides) { - html += "- " + (guide.title || guide.name) + " "; - html += guide.toc + "
- "+html+"
There are no doc comments for this declaration.
"; - } - domTldDocs.classList.remove("hidden"); - } - - function typeIsErrSet(typeIndex) { - let typeObj = getType(typeIndex); - return typeObj.kind === typeKinds.ErrorSet; - } - - function typeIsStructWithNoFields(typeIndex) { - let typeObj = getType(typeIndex); - if (typeObj.kind !== typeKinds.Struct) return false; - return typeObj.field_types.length == 0; - } - - function typeIsGenericFn(typeIndex) { - let typeObj = getType(typeIndex); - if (typeObj.kind !== typeKinds.Fn) { - return false; - } - return typeObj.generic_ret != null; - } - - function renderFn(fnDecl) { - if ("refPath" in fnDecl.value.expr) { - let last = fnDecl.value.expr.refPath.length - 1; - let lastExpr = fnDecl.value.expr.refPath[last]; - console.assert("declRef" in lastExpr); - fnDecl = getDecl(lastExpr.declRef); - } - - let value = resolveValue(fnDecl.value); - console.assert("type" in value.expr); - let typeObj = getType(value.expr.type); - - domFnProtoCode.innerHTML = renderTokens(ex(value.expr, { fnDecl: fnDecl })); - domFnSourceLink.classList.remove("hidden"); - domFnSourceLink.innerHTML = "[src]"; - - let docsSource = null; - let srcNode = getAstNode(fnDecl.src); - if (srcNode.docs != null) { - docsSource = srcNode.docs; - } - - renderFnParamDocs(fnDecl, typeObj); - - let retExpr = resolveValue({ expr: typeObj.ret }).expr; - if ("type" in retExpr) { - let retIndex = retExpr.type; - let errSetTypeIndex = null; - let retType = getType(retIndex); - if (retType.kind === typeKinds.ErrorSet) { - errSetTypeIndex = retIndex; - } else if (retType.kind === typeKinds.ErrorUnion) { - errSetTypeIndex = retType.err.type; - } - if (errSetTypeIndex != null) { - let errSetType = getType(errSetTypeIndex); - renderErrorSet(errSetType); - } - } - - let protoSrcIndex = fnDecl.src; - if (typeIsGenericFn(value.expr.type)) { - // does the generic_ret contain a container? - var resolvedGenericRet = resolveValue({ expr: typeObj.generic_ret }); - - if ("call" in resolvedGenericRet.expr) { - let call = zigAnalysis.calls[resolvedGenericRet.expr.call]; - let resolvedFunc = resolveValue({ expr: call.func }); - if (!("type" in resolvedFunc.expr)) return; - let callee = getType(resolvedFunc.expr.type); - if (!callee.generic_ret) return; - resolvedGenericRet = resolveValue({ expr: callee.generic_ret }); - } - - // TODO: see if unwrapping the `as` here is a good idea or not. - if ("as" in resolvedGenericRet.expr) { - resolvedGenericRet = { - expr: zigAnalysis.exprs[resolvedGenericRet.expr.as.exprArg], - }; - } - - if (!("type" in resolvedGenericRet.expr)) return; - const genericType = getType(resolvedGenericRet.expr.type); - if (isContainerType(genericType)) { - renderContainer(genericType); - } - - // old code - // let instantiations = nodesToFnsMap[protoSrcIndex]; - // let calls = nodesToCallsMap[protoSrcIndex]; - // if (instantiations == null && calls == null) { - // domFnNoExamples.classList.remove("hidden"); - // } else if (calls != null) { - // // if (fnObj.combined === undefined) fnObj.combined = allCompTimeFnCallsResult(calls); - // if (fnObj.combined != null) renderContainer(fnObj.combined); - - // resizeDomList(domListFnExamples, calls.length, ''); - - // for (let callI = 0; callI < calls.length; callI += 1) { - // let liDom = domListFnExamples.children[callI]; - // liDom.innerHTML = getCallHtml(fnDecl, calls[callI]); - // } - - // domFnExamples.classList.remove("hidden"); - // } else if (instantiations != null) { - // // TODO - // } - } else { - domFnExamples.classList.add("hidden"); - domFnNoExamples.classList.add("hidden"); - } - - let protoSrcNode = getAstNode(protoSrcIndex); - if ( - docsSource == null && - protoSrcNode != null && - protoSrcNode.docs != null - ) { - docsSource = protoSrcNode.docs; - } - if (docsSource != null) { - domTldDocs.innerHTML = markdown(docsSource, fnDecl); - domTldDocs.classList.remove("hidden"); - } - domFnProto.classList.remove("hidden"); - } - - function renderFnParamDocs(fnDecl, typeObj) { - let docCount = 0; - - let fnNode = getAstNode(fnDecl.src); - let fields = fnNode.fields; - if (fields === null) { - fields = getAstNode(typeObj.src).fields; - } - let isVarArgs = typeObj.is_var_args; - - for (let i = 0; i < fields.length; i += 1) { - let field = fields[i]; - let fieldNode = getAstNode(field); - if (fieldNode.docs != null) { - docCount += 1; - } - } - if (docCount == 0) { - return; - } - - resizeDomList(domListParams, docCount, ""); - let domIndex = 0; - - for (let i = 0; i < fields.length; i += 1) { - let field = fields[i]; - let fieldNode = getAstNode(field); - let docs = fieldNode.docs; - if (fieldNode.docs == null) { - continue; - } - let docsNonEmpty = docs !== ""; - let divDom = domListParams.children[domIndex]; - domIndex += 1; - - let value = typeObj.params[i]; - let preClass = docsNonEmpty ? ' class="fieldHasDocs"' : ""; - let html = "" + renderTokens((function*() {
- yield Tok.identifier(fieldNode.name);
- yield Tok.colon;
- yield Tok.space;
- if (isVarArgs && i === typeObj.params.length - 1) {
- yield Tok.period;
- yield Tok.period;
- yield Tok.period;
- } else {
- yield* ex(value, {});
- }
- yield Tok.comma;
- }()));
-
- html += "";
-
- if (docsNonEmpty) {
- html += '" + name + "(" - + zigAnalysis.typeKinds[typeObj.kind] + ")"; + const hdrNameSpan = domHdrName.children[0]; + const srcLink = domHdrName.children[1]; + hdrNameSpan.innerText = unwrapString(wasm_exports.decl_category_name(decl_index)); + srcLink.setAttribute('href', curNav.viewSourceHash); domHdrName.classList.remove("hidden"); - } - if (typeObj.kind == typeKinds.ErrorSet) { - renderErrorSet(typeObj); - } - } - function renderErrorSet(errSetType) { - if (errSetType.fields == null) { - domFnErrorsAnyError.classList.remove("hidden"); - } else { - let errorList = []; - for (let i = 0; i < errSetType.fields.length; i += 1) { - let errObj = errSetType.fields[i]; - //let srcObj = zigAnalysis.astNodes[errObj.src]; - errorList.push(errObj); - } - errorList.sort(function(a, b) { - return operatorCompare(a.name.toLowerCase(), b.name.toLowerCase()); - }); - - resizeDomListDl(domListFnErrors, errorList.length); - for (let i = 0; i < errorList.length; i += 1) { - let nameTdDom = domListFnErrors.children[i * 2 + 0]; - let descTdDom = domListFnErrors.children[i * 2 + 1]; - nameTdDom.textContent = errorList[i].name; - let docs = errorList[i].docs; - if (docs != null) { - descTdDom.innerHTML = markdown(docs); - } else { - descTdDom.textContent = ""; - } - } - domTableFnErrors.classList.remove("hidden"); - } - domSectFnErrors.classList.remove("hidden"); - } - - // function allCompTimeFnCallsHaveTypeResult(typeIndex, value) { - // let srcIndex = zigAnalysis.fns[value].src; - // let calls = nodesToCallsMap[srcIndex]; - // if (calls == null) return false; - // for (let i = 0; i < calls.length; i += 1) { - // let call = zigAnalysis.calls[calls[i]]; - // if (call.result.type !== typeTypeId) return false; - // } - // return true; - // } - // - // function allCompTimeFnCallsResult(calls) { - // let firstTypeObj = null; - // let containerObj = { - // privDecls: [], - // }; - // for (let callI = 0; callI < calls.length; callI += 1) { - // let call = zigAnalysis.calls[calls[callI]]; - // if (call.result.type !== typeTypeId) return null; - // let typeObj = zigAnalysis.types[call.result.value]; - // if (!typeKindIsContainer(typeObj.kind)) return null; - // if (firstTypeObj == null) { - // firstTypeObj = typeObj; - // containerObj.src = typeObj.src; - // } else if (firstTypeObj.src !== typeObj.src) { - // return null; - // } - // - // if (containerObj.fields == null) { - // containerObj.fields = (typeObj.fields || []).concat([]); - // } else for (let fieldI = 0; fieldI < typeObj.fields.length; fieldI += 1) { - // let prev = containerObj.fields[fieldI]; - // let next = typeObj.fields[fieldI]; - // if (prev === next) continue; - // if (typeof(prev) === 'object') { - // if (prev[next] == null) prev[next] = typeObj; - // } else { - // containerObj.fields[fieldI] = {}; - // containerObj.fields[fieldI][prev] = firstTypeObj; - // containerObj.fields[fieldI][next] = typeObj; - // } - // } - // - // if (containerObj.pubDecls == null) { - // containerObj.pubDecls = (typeObj.pubDecls || []).concat([]); - // } else for (let declI = 0; declI < typeObj.pubDecls.length; declI += 1) { - // let prev = containerObj.pubDecls[declI]; - // let next = typeObj.pubDecls[declI]; - // if (prev === next) continue; - // // TODO instead of showing "examples" as the public declarations, - // // do logic like this: - // //if (typeof(prev) !== 'object') { - // // let newDeclId = zigAnalysis.decls.length; - // // prev = clone(zigAnalysis.decls[prev]); - // // prev.id = newDeclId; - // // zigAnalysis.decls.push(prev); - // // containerObj.pubDecls[declI] = prev; - // //} - // //mergeDecls(prev, next, firstTypeObj, typeObj); - // } - // } - // for (let declI = 0; declI < containerObj.pubDecls.length; declI += 1) { - // let decl = containerObj.pubDecls[declI]; - // if (typeof(decl) === 'object') { - // containerObj.pubDecls[declI] = containerObj.pubDecls[declI].id; - // } - // } - // return containerObj; - // } - - function renderValue(decl) { - let resolvedValue = resolveValue(decl.value); - if (resolvedValue.expr.fieldRef) { - const declRef = decl.value.expr.refPath[0].declRef; - const type = getDecl(declRef); - - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.const; - yield Tok.space; - yield Tok.identifier(decl.name); - yield Tok.colon; - yield Tok.space; - yield Tok.identifier(type.name); - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); - } else if ( - resolvedValue.expr.string !== undefined || - resolvedValue.expr.call !== undefined || - resolvedValue.expr.comptimeExpr !== undefined - ) { - // TODO: we're using the resolved value but - // not keeping track of how we got there - // that's important context that should - // be shown to the user! - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.const; - yield Tok.space; - yield Tok.identifier(decl.name); - if (decl.value.typeRef) { - yield Tok.colon; - yield Tok.space; - yield* ex(decl.value.typeRef, {}); - } - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(resolvedValue.expr, {}); - yield Tok.semi; - })()); - } else if (resolvedValue.expr.compileError) { - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.const; - yield Tok.space; - yield Tok.identifier(decl.name); - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); - } else { - const parent = getType(decl.parent_container); - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.const; - yield Tok.space; - yield Tok.identifier(decl.name); - if (decl.value.typeRef !== null) { - yield Tok.colon; - yield Tok.space; - yield* ex(decl.value.typeRef, {}); - } - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); + renderTopLevelDocs(decl_index); } - let docs = getAstNode(decl.src).docs; - if (docs != null) { - // TODO: it shouldn't just be decl.parent_container, but rather - // the type that the decl holds (if the value is a type) - domTldDocs.innerHTML = markdown(docs, decl); - - domTldDocs.classList.remove("hidden"); - } - - domFnProto.classList.remove("hidden"); - } - - function renderVar(decl) { - let resolvedVar = resolveValue(decl.value); - - if (resolvedVar.expr.fieldRef) { - const declRef = decl.value.expr.refPath[0].declRef; - const type = getDecl(declRef); - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.var; - yield Tok.space; - yield Tok.identifier(decl.name); - yield Tok.colon; - yield Tok.space; - yield Tok.identifier(type.name); - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); - } else if ( - resolvedVar.expr.string !== undefined || - resolvedVar.expr.call !== undefined || - resolvedVar.expr.comptimeExpr !== undefined - ) { - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.var; - yield Tok.space; - yield Tok.identifier(decl.name); - if (decl.value.typeRef) { - yield Tok.colon; - yield Tok.space; - yield* ex(decl.value.typeRef, {}); - } - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); - } else if (resolvedVar.expr.compileError) { - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.var; - yield Tok.space; - yield Tok.identifier(decl.name); - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); - } else { - domFnProtoCode.innerHTML = renderTokens( - (function*() { - yield Tok.var; - yield Tok.space; - yield Tok.identifier(decl.name); - yield Tok.colon; - yield Tok.space; - yield* ex(resolvedVar.typeRef, {}); - yield Tok.space; - yield Tok.eql; - yield Tok.space; - yield* ex(decl.value.expr, {}); - yield Tok.semi; - })()); - } - - let docs = getAstNode(decl.src).docs; - if (docs != null) { - domTldDocs.innerHTML = markdown(docs); - domTldDocs.classList.remove("hidden"); - } - - domFnProto.classList.remove("hidden"); - } - - function categorizeDecls( - decls, - typesList, - namespacesWithDocsList, - namespacesNoDocsList, - errSetsList, - fnsList, - varsList, - valsList, - testsList, - unsList - ) { - for (let i = 0; i < decls.length; i += 1) { - let decl = getDecl(decls[i]); - let declValue = resolveValue(decl.value); - - // if (decl.isTest) { - // testsList.push(decl); - // continue; - // } - - if (decl.kind === "var") { - varsList.push(decl); - continue; - } - - if (decl.kind === "const") { - if ("type" in declValue.expr) { - // We have the actual type expression at hand. - const typeExpr = getType(declValue.expr.type); - if (typeExpr.kind == typeKinds.Fn) { - const funcRetExpr = resolveValue({ - expr: typeExpr.ret, - }); - if ( - "type" in funcRetExpr.expr && - funcRetExpr.expr.type == typeTypeId - ) { - if (typeIsErrSet(declValue.expr.type)) { - errSetsList.push(decl); - } else if (typeIsStructWithNoFields(declValue.expr.type)) { - - let docs = getAstNode(decl.src).docs; - if (!docs) { - // If this is a re-export, try to fetch docs from the actual definition - const { value, seenDecls } = resolveValue(decl.value, true); - if (seenDecls.length > 0) { - const definitionDecl = getDecl(seenDecls[seenDecls.length - 1]); - docs = getAstNode(definitionDecl.src).docs; - } else { - docs = getAstNode(getType(value.expr.type).src).docs; - } - } - - if (docs) { - namespacesWithDocsList.push({decl, docs}); - } else { - namespacesNoDocsList.push(decl); - } - } else { - typesList.push(decl); - } - } else { - fnsList.push(decl); - } - } else { - if (typeIsErrSet(declValue.expr.type)) { - errSetsList.push(decl); - } else if (typeIsStructWithNoFields(declValue.expr.type)) { - let docs = getAstNode(decl.src).docs; - if (!docs) { - // If this is a re-export, try to fetch docs from the actual definition - const { value, seenDecls } = resolveValue(decl.value, true); - if (seenDecls.length > 0) { - const definitionDecl = getDecl(seenDecls[seenDecls.length - 1]); - docs = getAstNode(definitionDecl.src).docs; - } else { - docs = getAstNode(getType(value.expr.type).src).docs; - } - } - if (docs) { - namespacesWithDocsList.push({decl, docs}); - } else { - namespacesNoDocsList.push(decl); - } - } else { - typesList.push(decl); - } - } - } else if (declValue.typeRef) { - if ("type" in declValue.typeRef && declValue.typeRef == typeTypeId) { - // We don't know what the type expression is, but we know it's a type. - typesList.push(decl); - } else { - valsList.push(decl); - } - } else { - valsList.push(decl); - } - } - - if (decl.is_uns) { - unsList.push(decl); - } - } - } - - function sourceFileLink(decl) { - const srcNode = getAstNode(decl.src); - const srcFile = getFile(srcNode.file); - return sourceFileUrlTemplate. - replace("{{mod}}", zigAnalysis.modules[srcFile.modIndex].name). - replace("{{file}}", srcFile.name). - replace("{{line}}", srcNode.line + 1); - } - - function renderContainer(container) { - let typesList = []; - - let namespacesWithDocsList = []; - let namespacesNoDocsList = []; - - let errSetsList = []; - - let fnsList = []; - - let varsList = []; - - let valsList = []; - - let testsList = []; - - let unsList = []; - - categorizeDecls( - container.pubDecls, - typesList, - namespacesWithDocsList, - namespacesNoDocsList, - errSetsList, - fnsList, - varsList, - valsList, - testsList, - unsList - ); - if (curNav.showPrivDecls) - categorizeDecls( - container.privDecls, - typesList, - namespacesWithDocsList, - namespacesNoDocsList, - errSetsList, - fnsList, - varsList, - valsList, - testsList, - unsList - ); - - while (unsList.length > 0) { - let uns = unsList.shift(); - let declValue = resolveValue(uns.value); - if (!("type" in declValue.expr)) continue; - let uns_container = getType(declValue.expr.type); - if (!isContainerType(uns_container)) continue; - categorizeDecls( - uns_container.pubDecls, - typesList, - namespacesWithDocsList, - namespacesNoDocsList, - errSetsList, - fnsList, - varsList, - valsList, - testsList, - unsList - ); - if (curNav.showPrivDecls) - categorizeDecls( - uns_container.privDecls, - typesList, - namespacesWithDocsList, - namespacesNoDocsList, - errSetsList, - fnsList, - varsList, - valsList, - testsList, - unsList - ); - } - - typesList.sort(byNameProperty); - namespacesWithDocsList.sort(byNameProperty); - namespacesNoDocsList.sort(byNameProperty); - errSetsList.sort(byNameProperty); - fnsList.sort(byNameProperty); - varsList.sort(byNameProperty); - valsList.sort(byNameProperty); - testsList.sort(byNameProperty); - - if (container.src != null) { - let docs = getAstNode(container.src).docs; - if (docs != null) { - domTldDocs.innerHTML = markdown(docs, container); + function renderTopLevelDocs(decl_index) { + const tld_docs_html = unwrapString(wasm_exports.decl_docs_html(decl_index, false)); + if (tld_docs_html.length > 0) { + domTldDocs.innerHTML = tld_docs_html; domTldDocs.classList.remove("hidden"); } } - if (typesList.length !== 0) { - const splitPoint = Math.ceil(typesList.length / 2); - const template = '
No documentation provided.
"; - } - if (i == splitPoint - 1) { - activeList = domListTypesRight; - offset = splitPoint; - } - } - domSectTypes.classList.remove("hidden"); - } - - if (namespacesWithDocsList.length !== 0) { - const splitPoint = Math.ceil(namespacesWithDocsList.length / 2); - const template = 'No documentation provided.
"; - } - } - domSectFns.classList.remove("hidden"); - } - - let containerNode = getAstNode(container.src); - if (containerNode.fields && containerNode.fields.length > 0) { - resizeDomList(domListFields, containerNode.fields.length, "
"); - - for (let i = 0; i < containerNode.fields.length; i += 1) { - let fieldNode = getAstNode(containerNode.fields[i]); - let divDom = domListFields.children[i]; - let fieldName = fieldNode.name; - let docs = fieldNode.docs; - let docsNonEmpty = docs != null && docs !== ""; - let extraPreClass = docsNonEmpty ? " fieldHasDocs" : ""; - - let html = - '' +
- escapeHtml(fieldName);
-
- if (container.kind === typeKinds.Enum) {
- let value = container.values[i];
- if (value !== null) {
- html += renderTokens((function*() {
- yield Tok.space;
- yield Tok.eql;
- yield Tok.space;
- yield* ex(value, {});
- })());
- }
- } else {
- let fieldTypeExpr = container.field_types[i];
- if (container.kind !== typeKinds.Struct || !container.is_tuple) {
- html += renderTokens((function*() {
- yield Tok.colon;
- yield Tok.space;
- })());
- }
- html += renderTokens(ex(fieldTypeExpr, {}));
- let tsn = typeShorthandName(fieldTypeExpr);
- if (tsn) {
- html += " (" + tsn + ")";
- }
- if (container.kind === typeKinds.Struct && !container.is_tuple) {
- let defaultInitExpr = container.field_defaults[i];
- if (defaultInitExpr !== null) {
- html += renderTokens((function*() {
- yield Tok.space;
- yield Tok.eql;
- yield Tok.space;
- yield* ex(defaultInitExpr, {});
- })());
- }
- }
- }
-
- html += "," + - zigAnalysis.typeKinds[container.kind] + - ""; - domHdrName.classList.remove("hidden"); - } - } - - function operatorCompare(a, b) { - if (a === b) { - return 0; - } else if (a < b) { - return -1; - } else { - return 1; - } - } - - function detectRootIsStd() { - let rootMod = zigAnalysis.modules[zigAnalysis.rootMod]; - if (rootMod.table["std"] == null) { - // no std mapped into the root module - return false; - } - let stdMod = zigAnalysis.modules[rootMod.table["std"]]; - if (stdMod == null) return false; - return rootMod.file === stdMod.file; - } - - function indexTypeKinds() { - let map = {}; - for (let i = 0; i < zigAnalysis.typeKinds.length; i += 1) { - map[zigAnalysis.typeKinds[i]] = i; - } - // This is just for debugging purposes, not needed to function - let assertList = [ - "Type", - "Void", - "Bool", - "NoReturn", - "Int", - "Float", - "Pointer", - "Array", - "Struct", - "ComptimeFloat", - "ComptimeInt", - "Undefined", - "Null", - "Optional", - "ErrorUnion", - "ErrorSet", - "Enum", - "Union", - "Fn", - "Opaque", - "Frame", - "AnyFrame", - "Vector", - "EnumLiteral", - ]; - for (let i = 0; i < assertList.length; i += 1) { - if (map[assertList[i]] == null) - throw new Error("No type kind '" + assertList[i] + "' found"); - } - return map; - } - - function findTypeTypeId() { - for (let i = 0; i < zigAnalysis.types.length; i += 1) { - if (getType(i).kind == typeKinds.Type) { - return i; - } - } - throw new Error("No type 'type' found"); - } - - - function updateCurNav() { - curNav = { - hash: location.hash, - mode: NAV_MODES.API, - modNames: [], - modObjs: [], - declNames: [], - declObjs: [], - callName: null, - activeGuide: null, - activeGuideScrollTo: null, - }; - curNavSearch = ""; - - const mode = location.hash.substring(0, 3); - let query = location.hash.substring(3); - - let qpos = query.indexOf("?"); - let nonSearchPart; - if (qpos === -1) { - nonSearchPart = query; - } else { - nonSearchPart = query.substring(0, qpos); - curNavSearch = decodeURIComponent(query.substring(qpos + 1)); - } - - const DEFAULT_HASH = NAV_MODES.API + zigAnalysis.modules[zigAnalysis.rootMod].name; - switch (mode) { - case NAV_MODES.API: - // #A;MODULE:decl.decl.decl?search-term - curNav.mode = mode; - { - let parts = nonSearchPart.split(":"); - if (parts[0] == "") { - location.hash = DEFAULT_HASH; - } else { - curNav.modNames = decodeURIComponent(parts[0]).split("."); - } - - if (parts[1] != null) { - curNav.declNames = decodeURIComponent(parts[1]).split("."); - } - } - return; - case NAV_MODES.GUIDES: - curNav.mode = mode; - - { - let parts = nonSearchPart.split(":"); - curNav.activeGuide = parts[0]; - if (parts[1] != null) { - curNav.activeGuideScrollTo = decodeURIComponent(":" + parts[1]); - } - } - return; - default: - location.hash = DEFAULT_HASH; - return; - } - } - - function onHashChange(ev) { - scrollHistory[curNav.hash] = scrollMonitor.map(function (x) { - return [x, x.scrollTop] - }); - - if (skipNextHashChange == decodeURIComponent(location.hash)) { - skipNextHashChange = null; - return; - } - skipNextHashChange = null; - updateCurNav(); - - if (domSearch.value !== curNavSearch) { - domSearch.value = curNavSearch; - if (domSearch.value.length == 0) - domSearchPlaceholder.classList.remove("hidden"); - else - domSearchPlaceholder.classList.add("hidden"); - } - render(); - if (imFeelingLucky) { - imFeelingLucky = false; - activateSelectedResult(); - } - - scroll(); - } - - function scroll() { - const cur = scrollHistory[location.hash]; - if (cur) { - for (let [elem, offset] of cur) { - elem.scrollTo(0, offset); - } - } else { - if (curNav.activeGuideScrollTo) return; - for (let elem of scrollMonitor) { - elem.scrollTo(0, 0); - } - } - } - - function findSubDecl(parentTypeOrDecl, childName) { - let parentType = parentTypeOrDecl; - { - // Generic functions / resolving decls - if ("value" in parentType) { - const rv = resolveValue(parentType.value); - if ("type" in rv.expr) { - const t = getType(rv.expr.type); - parentType = t; - if (t.kind == typeKinds.Fn && t.generic_ret != null) { - let resolvedGenericRet = resolveValue({ expr: t.generic_ret }); - - if ("call" in resolvedGenericRet.expr) { - let call = zigAnalysis.calls[resolvedGenericRet.expr.call]; - let resolvedFunc = resolveValue({ expr: call.func }); - if (!("type" in resolvedFunc.expr)) return null; - let callee = getType(resolvedFunc.expr.type); - if (!callee.generic_ret) return null; - resolvedGenericRet = resolveValue({ expr: callee.generic_ret }); - } - - if ("type" in resolvedGenericRet.expr) { - parentType = getType(resolvedGenericRet.expr.type); - } - } - } - } - } - - if (parentType.pubDecls) { - for (let i = 0; i < parentType.pubDecls.length; i += 1) { - let declIndex = parentType.pubDecls[i]; - let childDecl = getDecl(declIndex); - if (childDecl.name === childName) { - childDecl.find_subdecl_idx = declIndex; - return childDecl; - } else if (childDecl.is_uns) { - let declValue = resolveValue(childDecl.value); - if (!("type" in declValue.expr)) continue; - let uns_container = getType(declValue.expr.type); - let uns_res = findSubDecl(uns_container, childName); - if (uns_res !== null) return uns_res; - } - } - } - - if (parentType.privDecls) { - for (let i = 0; i < parentType.privDecls.length; i += 1) { - let declIndex = parentType.privDecls[i]; - let childDecl = getDecl(declIndex); - if (childDecl.name === childName) { - childDecl.find_subdecl_idx = declIndex; - childDecl.is_private = true; - return childDecl; - } else if (childDecl.is_uns) { - let declValue = resolveValue(childDecl.value); - if (!("type" in declValue.expr)) continue; - let uns_container = getType(declValue.expr.type); - let uns_res = findSubDecl(uns_container, childName); - uns_res.is_private = true; - if (uns_res !== null) return uns_res; - } - } - } - - return null; - } - - function computeCanonicalModulePaths() { - let list = new Array(zigAnalysis.modules.length); - // Now we try to find all the modules from root. - let rootMod = zigAnalysis.modules[zigAnalysis.rootMod]; - // Breadth-first to keep the path shortest possible. - let stack = [ + function renderNavFancy(cur_nav_decl, list) { { - path: [], - mod: rootMod, - }, - ]; - while (stack.length !== 0) { - let item = stack.shift(); - for (let key in item.mod.table) { - let childModIndex = item.mod.table[key]; - if (list[childModIndex] != null) continue; - let childMod = zigAnalysis.modules[childModIndex]; - if (childMod == null) continue; - - let newPath = item.path.concat([key]); - list[childModIndex] = newPath; - stack.push({ - path: newPath, - mod: childMod, - }); - } - } - - for (let i = 0; i < zigAnalysis.modules.length; i += 1) { - const p = zigAnalysis.modules[i]; - // TODO - // declSearchIndex.add(p.name, {moduleId: i}); - } - return list; - } - - function computeCanonDeclPaths() { - let list = new Array(zigAnalysis.decls.length); - canonTypeDecls = new Array(zigAnalysis.types.length); - - for (let modI = 0; modI < zigAnalysis.modules.length; modI += 1) { - let mod = zigAnalysis.modules[modI]; - let modNames = canonModPaths[modI]; - if (modNames === undefined) continue; - - let stack = [ - { - declNames: [], - declIndexes: [], - type: getType(mod.main), - }, - ]; - while (stack.length !== 0) { - let item = stack.shift(); - - if (isContainerType(item.type)) { - let t = item.type; - - let len = t.pubDecls ? t.pubDecls.length : 0; - for (let declI = 0; declI < len; declI += 1) { - let declIndex = t.pubDecls[declI]; - if (list[declIndex] != null) continue; - - let decl = getDecl(declIndex); - - if (decl.is_uns) { - let unsDeclList = [decl]; - while (unsDeclList.length != 0) { - let unsDecl = unsDeclList.pop(); - let unsDeclVal = resolveValue(unsDecl.value); - if (!("type" in unsDeclVal.expr)) continue; - let unsType = getType(unsDeclVal.expr.type); - if (!isContainerType(unsType)) continue; - let unsPubDeclLen = unsType.pubDecls ? unsType.pubDecls.length : 0; - for (let unsDeclI = 0; unsDeclI < unsPubDeclLen; unsDeclI += 1) { - let childDeclIndex = unsType.pubDecls[unsDeclI]; - let childDecl = getDecl(childDeclIndex); - - if (childDecl.is_uns) { - unsDeclList.push(childDecl); - } else { - addDeclToSearchResults(childDecl, childDeclIndex, modNames, item, list, stack); - } - } - } - } else { - addDeclToSearchResults(decl, declIndex, modNames, item, list, stack); - } - } + // First, walk backwards the decl parents within a file. + let decl_it = cur_nav_decl; + let prev_decl_it = null; + while (decl_it != null) { + list.push({ + name: declIndexName(decl_it), + href: navLinkDeclIndex(decl_it), + }); + prev_decl_it = decl_it; + decl_it = declParent(decl_it); } - } - } - window.cdp = list; - return list; - } - function addDeclToSearchResults(decl, declIndex, modNames, item, list, stack) { - let {value: declVal, seenDecls} = resolveValue(decl.value, true); - let declNames = item.declNames.concat([decl.name]); - let declIndexes = item.declIndexes.concat([declIndex]); - - if (list[declIndex] != null) return; - list[declIndex] = { - modNames: modNames, - declNames: declNames, - declIndexes: declIndexes, - }; - - for (let sd of seenDecls) { - if (list[sd] != null) continue; - list[sd] = { - modNames: modNames, - declNames: declNames, - declIndexes: declIndexes, - }; - } - - // add to search index - { - declSearchIndex.add(decl.name, { declIndex }); - } - - - if ("type" in declVal.expr) { - let value = getType(declVal.expr.type); - if (declCanRepresentTypeKind(value.kind)) { - canonTypeDecls[declVal.type] = declIndex; - } - - if (isContainerType(value)) { - stack.push({ - declNames: declNames, - declIndexes: declIndexes, - type: value, - }); - } - - // Generic function - if (typeIsGenericFn(declVal.expr.type)) { - let ret = resolveGenericRet(value); - if (ret != null && "type" in ret.expr) { - let generic_type = getType(ret.expr.type); - if (isContainerType(generic_type)) { - stack.push({ - declNames: declNames, - declIndexes: declIndexes, - type: generic_type, + // Next, walk backwards the file path segments. + if (prev_decl_it != null) { + const file_path = fullyQualifiedName(prev_decl_it); + const parts = file_path.split("."); + parts.pop(); // skip last + for (;;) { + const href = navLinkFqn(parts.join(".")); + const part = parts.pop(); + if (!part) break; + list.push({ + name: part, + href: href, }); } } + + list.reverse(); } + resizeDomList(domListNav, list.length, '
" - } - for (let result of list) { - const points = result.points; - const match = result.declIndex; - - let canonPath = getCanonDeclPath(match); - if (canonPath == null) continue; - - let lastModName = canonPath.modNames[canonPath.modNames.length - 1]; - let text = lastModName + "." + canonPath.declNames.join("."); - - - const href = navLink(canonPath.modNames, canonPath.declNames); - - matchedItemsHTML += "
");
+ try file_source_html(decl.file, out, field_node, .{});
+ try out.appendSlice(gpa, "");
+
+ const field = ast.fullContainerField(field_node).?;
+ const first_doc_comment = Decl.findFirstDocComment(ast, field.firstToken());
+
+ if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, name);
+ try out.appendSlice(gpa, ": ");
+ try file_source_html(decl.file, out, param_node, .{});
+ try out.appendSlice(gpa, "");
+
+ if (ast.tokens.items(.tag)[first_doc_comment] == .doc_comment) {
+ try out.appendSlice(gpa, "");
+ const content = doc.string(data.text.content);
+ if (resolve_decl_path(r.context, content)) |resolved_decl_index| {
+ g.link_buffer.clearRetainingCapacity();
+ try resolve_decl_link(resolved_decl_index, &g.link_buffer);
+
+ try writer.writeAll("{}", .{markdown.fmtHtml(content)});
+ } else {
+ try writer.print("{}", .{markdown.fmtHtml(content)});
+ }
+
+ try writer.writeAll("");
+ },
+
+ else => try Renderer.renderDefault(r, doc, node, writer),
+ }
+ }
+ }.render,
+ };
+ try renderer.render(parsed_doc, out.writer(gpa));
+}
+
+fn resolve_decl_path(decl_index: Decl.Index, path: []const u8) ?Decl.Index {
+ var path_components = std.mem.splitScalar(u8, path, '.');
+ var current_decl_index = decl_index.get().lookup(path_components.first()) orelse return null;
+ while (path_components.next()) |component| {
+ switch (current_decl_index.get().categorize()) {
+ .alias => |aliasee| current_decl_index = aliasee,
+ else => {},
+ }
+ current_decl_index = current_decl_index.get().get_child(component) orelse return null;
+ }
+ return current_decl_index;
+}
+
+export fn decl_type_html(decl_index: Decl.Index) String {
+ const decl = decl_index.get();
+ const ast = decl.file.get_ast();
+ string_result.clearRetainingCapacity();
+ t: {
+ // If there is an explicit type, use it.
+ if (ast.fullVarDecl(decl.ast_node)) |var_decl| {
+ if (var_decl.ast.type_node != 0) {
+ string_result.appendSlice(gpa, "") catch @panic("OOM");
+ file_source_html(decl.file, &string_result, var_decl.ast.type_node, .{
+ .skip_comments = true,
+ .collapse_whitespace = true,
+ }) catch |e| {
+ fatal("unable to render html: {s}", .{@errorName(e)});
+ };
+ string_result.appendSlice(gpa, "") catch @panic("OOM");
+ break :t;
+ }
+ }
+ }
+ return String.init(string_result.items);
+}
+
+const Oom = error{OutOfMemory};
+
+fn unpack_inner(tar_bytes: []u8) !void {
+ var fbs = std.io.fixedBufferStream(tar_bytes);
+ var file_name_buffer: [1024]u8 = undefined;
+ var link_name_buffer: [1024]u8 = undefined;
+ var it = std.tar.iterator(fbs.reader(), .{
+ .file_name_buffer = &file_name_buffer,
+ .link_name_buffer = &link_name_buffer,
+ });
+ while (try it.next()) |tar_file| {
+ switch (tar_file.kind) {
+ .normal => {
+ if (tar_file.size == 0 and tar_file.name.len == 0) break;
+ if (std.mem.endsWith(u8, tar_file.name, ".zig")) {
+ log.debug("found file: '{s}'", .{tar_file.name});
+ const file_name = try gpa.dupe(u8, tar_file.name);
+ if (std.mem.indexOfScalar(u8, file_name, '/')) |pkg_name_end| {
+ const pkg_name = file_name[0..pkg_name_end];
+ const gop = try Walk.modules.getOrPut(gpa, pkg_name);
+ const file: Walk.File.Index = @enumFromInt(Walk.files.entries.len);
+ if (!gop.found_existing or
+ std.mem.eql(u8, file_name[pkg_name_end..], "/root.zig") or
+ std.mem.eql(u8, file_name[pkg_name_end + 1 .. file_name.len - ".zig".len], pkg_name))
+ {
+ gop.value_ptr.* = file;
+ }
+ const file_bytes = tar_bytes[fbs.pos..][0..@intCast(tar_file.size)];
+ assert(file == try Walk.add_file(file_name, file_bytes));
+ }
+ } else {
+ log.warn("skipping: '{s}' - the tar creation should have done that", .{
+ tar_file.name,
+ });
+ }
+ try tar_file.skip();
+ },
+ else => continue,
+ }
+ }
+}
+
+fn fatal(comptime format: []const u8, args: anytype) noreturn {
+ var buf: [500]u8 = undefined;
+ const line = std.fmt.bufPrint(&buf, format, args) catch l: {
+ buf[buf.len - 3 ..][0..3].* = "...".*;
+ break :l &buf;
+ };
+ js.panic(line.ptr, line.len);
+}
+
+fn ascii_lower(bytes: []u8) void {
+ for (bytes) |*b| b.* = std.ascii.toLower(b.*);
+}
+
+export fn module_name(index: u32) String {
+ const names = Walk.modules.keys();
+ return String.init(if (index >= names.len) "" else names[index]);
+}
+
+export fn find_module_root(pkg: Walk.ModuleIndex) Decl.Index {
+ const root_file = Walk.modules.values()[@intFromEnum(pkg)];
+ const result = root_file.findRootDecl();
+ assert(result != .none);
+ return result;
+}
+
+/// Set by `set_input_string`.
+var input_string: std.ArrayListUnmanaged(u8) = .{};
+
+export fn set_input_string(len: usize) [*]u8 {
+ input_string.resize(gpa, len) catch @panic("OOM");
+ return input_string.items.ptr;
+}
+
+/// Looks up the root struct decl corresponding to a file by path.
+/// Uses `input_string`.
+export fn find_file_root() Decl.Index {
+ const file: Walk.File.Index = @enumFromInt(Walk.files.getIndex(input_string.items) orelse return .none);
+ return file.findRootDecl();
+}
+
+/// Uses `input_string`.
+/// Tries to look up the Decl component-wise but then falls back to a file path
+/// based scan.
+export fn find_decl() Decl.Index {
+ const result = Decl.find(input_string.items);
+ if (result != .none) return result;
+
+ const g = struct {
+ var match_fqn: std.ArrayListUnmanaged(u8) = .{};
+ };
+ for (Walk.decls.items, 0..) |*decl, decl_index| {
+ g.match_fqn.clearRetainingCapacity();
+ decl.fqn(&g.match_fqn) catch @panic("OOM");
+ if (std.mem.eql(u8, g.match_fqn.items, input_string.items)) {
+ //const path = @as(Decl.Index, @enumFromInt(decl_index)).get().file.path();
+ //log.debug("find_decl '{s}' found in {s}", .{ input_string.items, path });
+ return @enumFromInt(decl_index);
+ }
+ }
+ return .none;
+}
+
+/// Set only by `categorize_decl`; read only by `get_aliasee`, valid only
+/// when `categorize_decl` returns `.alias`.
+var global_aliasee: Decl.Index = .none;
+
+export fn get_aliasee() Decl.Index {
+ return global_aliasee;
+}
+export fn categorize_decl(decl_index: Decl.Index, resolve_alias_count: usize) Walk.Category.Tag {
+ global_aliasee = .none;
+ var chase_alias_n = resolve_alias_count;
+ var decl = decl_index.get();
+ while (true) {
+ const result = decl.categorize();
+ switch (result) {
+ .alias => |new_index| {
+ assert(new_index != .none);
+ global_aliasee = new_index;
+ if (chase_alias_n > 0) {
+ chase_alias_n -= 1;
+ decl = new_index.get();
+ continue;
+ }
+ },
+ else => {},
+ }
+ return result;
+ }
+}
+
+export fn type_fn_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
+ return namespace_members(parent, include_private);
+}
+
+export fn namespace_members(parent: Decl.Index, include_private: bool) Slice(Decl.Index) {
+ const g = struct {
+ var members: std.ArrayListUnmanaged(Decl.Index) = .{};
+ };
+
+ g.members.clearRetainingCapacity();
+
+ for (Walk.decls.items, 0..) |*decl, i| {
+ if (decl.parent == parent) {
+ if (include_private or decl.is_pub()) {
+ g.members.append(gpa, @enumFromInt(i)) catch @panic("OOM");
+ }
+ }
+ }
+
+ return Slice(Decl.Index).init(g.members.items);
+}
+
+const RenderSourceOptions = struct {
+ skip_doc_comments: bool = false,
+ skip_comments: bool = false,
+ collapse_whitespace: bool = false,
+ fn_link: Decl.Index = .none,
+};
+
+fn file_source_html(
+ file_index: Walk.File.Index,
+ out: *std.ArrayListUnmanaged(u8),
+ root_node: Ast.Node.Index,
+ options: RenderSourceOptions,
+) !void {
+ const ast = file_index.get_ast();
+ const file = file_index.get();
+
+ const g = struct {
+ var field_access_buffer: std.ArrayListUnmanaged(u8) = .{};
+ };
+
+ const token_tags = ast.tokens.items(.tag);
+ const token_starts = ast.tokens.items(.start);
+ const main_tokens = ast.nodes.items(.main_token);
+
+ const start_token = ast.firstToken(root_node);
+ const end_token = ast.lastToken(root_node) + 1;
+
+ var cursor: usize = token_starts[start_token];
+
+ for (
+ token_tags[start_token..end_token],
+ token_starts[start_token..end_token],
+ start_token..,
+ ) |tag, start, token_index| {
+ const between = ast.source[cursor..start];
+ if (std.mem.trim(u8, between, " \t\r\n").len > 0) {
+ if (!options.skip_comments) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, between);
+ try out.appendSlice(gpa, "");
+ }
+ } else if (between.len > 0) {
+ if (options.collapse_whitespace) {
+ if (out.items.len > 0 and out.items[out.items.len - 1] != ' ')
+ try out.append(gpa, ' ');
+ } else {
+ try out.appendSlice(gpa, between);
+ }
+ }
+ if (tag == .eof) break;
+ const slice = ast.tokenSlice(token_index);
+ cursor = start + slice.len;
+ switch (tag) {
+ .eof => unreachable,
+
+ .keyword_addrspace,
+ .keyword_align,
+ .keyword_and,
+ .keyword_asm,
+ .keyword_async,
+ .keyword_await,
+ .keyword_break,
+ .keyword_catch,
+ .keyword_comptime,
+ .keyword_const,
+ .keyword_continue,
+ .keyword_defer,
+ .keyword_else,
+ .keyword_enum,
+ .keyword_errdefer,
+ .keyword_error,
+ .keyword_export,
+ .keyword_extern,
+ .keyword_for,
+ .keyword_if,
+ .keyword_inline,
+ .keyword_noalias,
+ .keyword_noinline,
+ .keyword_nosuspend,
+ .keyword_opaque,
+ .keyword_or,
+ .keyword_orelse,
+ .keyword_packed,
+ .keyword_anyframe,
+ .keyword_pub,
+ .keyword_resume,
+ .keyword_return,
+ .keyword_linksection,
+ .keyword_callconv,
+ .keyword_struct,
+ .keyword_suspend,
+ .keyword_switch,
+ .keyword_test,
+ .keyword_threadlocal,
+ .keyword_try,
+ .keyword_union,
+ .keyword_unreachable,
+ .keyword_usingnamespace,
+ .keyword_var,
+ .keyword_volatile,
+ .keyword_allowzero,
+ .keyword_while,
+ .keyword_anytype,
+ .keyword_fn,
+ => {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ },
+
+ .string_literal,
+ .char_literal,
+ .multiline_string_literal_line,
+ => {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ },
+
+ .builtin => {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ },
+
+ .doc_comment,
+ .container_doc_comment,
+ => {
+ if (!options.skip_doc_comments) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ }
+ },
+
+ .identifier => i: {
+ if (options.fn_link != .none) {
+ const fn_link = options.fn_link.get();
+ const fn_token = main_tokens[fn_link.ast_node];
+ if (token_index == fn_token + 1) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ break :i;
+ }
+ }
+
+ if (token_index > 0 and token_tags[token_index - 1] == .keyword_fn) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ break :i;
+ }
+
+ if (Walk.isPrimitiveNonType(slice)) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ break :i;
+ }
+
+ if (std.zig.primitives.isPrimitive(slice)) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ break :i;
+ }
+
+ if (file.token_parents.get(token_index)) |field_access_node| {
+ g.field_access_buffer.clearRetainingCapacity();
+ try walk_field_accesses(file_index, &g.field_access_buffer, field_access_node);
+ if (g.field_access_buffer.items.len > 0) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ } else {
+ try appendEscaped(out, slice);
+ }
+ break :i;
+ }
+
+ {
+ g.field_access_buffer.clearRetainingCapacity();
+ try resolve_ident_link(file_index, &g.field_access_buffer, token_index);
+ if (g.field_access_buffer.items.len > 0) {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ break :i;
+ }
+ }
+
+ try appendEscaped(out, slice);
+ },
+
+ .number_literal => {
+ try out.appendSlice(gpa, "");
+ try appendEscaped(out, slice);
+ try out.appendSlice(gpa, "");
+ },
+
+ .bang,
+ .pipe,
+ .pipe_pipe,
+ .pipe_equal,
+ .equal,
+ .equal_equal,
+ .equal_angle_bracket_right,
+ .bang_equal,
+ .l_paren,
+ .r_paren,
+ .semicolon,
+ .percent,
+ .percent_equal,
+ .l_brace,
+ .r_brace,
+ .l_bracket,
+ .r_bracket,
+ .period,
+ .period_asterisk,
+ .ellipsis2,
+ .ellipsis3,
+ .caret,
+ .caret_equal,
+ .plus,
+ .plus_plus,
+ .plus_equal,
+ .plus_percent,
+ .plus_percent_equal,
+ .plus_pipe,
+ .plus_pipe_equal,
+ .minus,
+ .minus_equal,
+ .minus_percent,
+ .minus_percent_equal,
+ .minus_pipe,
+ .minus_pipe_equal,
+ .asterisk,
+ .asterisk_equal,
+ .asterisk_asterisk,
+ .asterisk_percent,
+ .asterisk_percent_equal,
+ .asterisk_pipe,
+ .asterisk_pipe_equal,
+ .arrow,
+ .colon,
+ .slash,
+ .slash_equal,
+ .comma,
+ .ampersand,
+ .ampersand_equal,
+ .question_mark,
+ .angle_bracket_left,
+ .angle_bracket_left_equal,
+ .angle_bracket_angle_bracket_left,
+ .angle_bracket_angle_bracket_left_equal,
+ .angle_bracket_angle_bracket_left_pipe,
+ .angle_bracket_angle_bracket_left_pipe_equal,
+ .angle_bracket_right,
+ .angle_bracket_right_equal,
+ .angle_bracket_angle_bracket_right,
+ .angle_bracket_angle_bracket_right_equal,
+ .tilde,
+ => try appendEscaped(out, slice),
+
+ .invalid, .invalid_periodasterisks => return error.InvalidToken,
+ }
+ }
+}
+
+fn resolve_ident_link(
+ file_index: Walk.File.Index,
+ out: *std.ArrayListUnmanaged(u8),
+ ident_token: Ast.TokenIndex,
+) Oom!void {
+ const decl_index = file_index.get().lookup_token(ident_token);
+ if (decl_index == .none) return;
+ try resolve_decl_link(decl_index, out);
+}
+
+fn resolve_decl_link(decl_index: Decl.Index, out: *std.ArrayListUnmanaged(u8)) Oom!void {
+ const decl = decl_index.get();
+ switch (decl.categorize()) {
+ .alias => |alias_decl| try alias_decl.get().fqn(out),
+ else => try decl.fqn(out),
+ }
+}
+
+fn walk_field_accesses(
+ file_index: Walk.File.Index,
+ out: *std.ArrayListUnmanaged(u8),
+ node: Ast.Node.Index,
+) Oom!void {
+ const ast = file_index.get_ast();
+ const node_tags = ast.nodes.items(.tag);
+ assert(node_tags[node] == .field_access);
+ const node_datas = ast.nodes.items(.data);
+ const main_tokens = ast.nodes.items(.main_token);
+ const object_node = node_datas[node].lhs;
+ const dot_token = main_tokens[node];
+ const field_ident = dot_token + 1;
+ switch (node_tags[object_node]) {
+ .identifier => {
+ const lhs_ident = main_tokens[object_node];
+ try resolve_ident_link(file_index, out, lhs_ident);
+ },
+ .field_access => {
+ try walk_field_accesses(file_index, out, object_node);
+ },
+ else => {},
+ }
+ if (out.items.len > 0) {
+ try out.append(gpa, '.');
+ try out.appendSlice(gpa, ast.tokenSlice(field_ident));
+ }
+}
+
+fn appendEscaped(out: *std.ArrayListUnmanaged(u8), s: []const u8) !void {
+ for (s) |c| {
+ try out.ensureUnusedCapacity(gpa, 6);
+ switch (c) {
+ '&' => out.appendSliceAssumeCapacity("&"),
+ '<' => out.appendSliceAssumeCapacity("<"),
+ '>' => out.appendSliceAssumeCapacity(">"),
+ '"' => out.appendSliceAssumeCapacity("""),
+ else => out.appendAssumeCapacity(c),
+ }
+ }
+}
+
+fn count_scalar(haystack: []const u8, needle: u8) usize {
+ var total: usize = 0;
+ for (haystack) |elem| {
+ if (elem == needle)
+ total += 1;
+ }
+ return total;
+}
diff --git a/lib/docs/wasm/markdown.zig b/lib/docs/wasm/markdown.zig
new file mode 100644
index 0000000000..4ce1ee15b4
--- /dev/null
+++ b/lib/docs/wasm/markdown.zig
@@ -0,0 +1,940 @@
+//! Markdown parsing and rendering support.
+//!
+//! A Markdown document consists of a series of blocks. Depending on its type,
+//! each block may contain other blocks, inline content, or nothing. The
+//! supported blocks are as follows:
+//!
+//! - **List** - a sequence of list items of the same type.
+//!
+//! - **List item** - unordered list items start with `-`, `*`, or `+` followed
+//! by a space. Ordered list items start with a number between 0 and
+//! 999,999,999, followed by a `.` or `)` and a space. The number of an
+//! ordered list item only matters for the first item in the list (to
+//! determine the starting number of the list). All subsequent ordered list
+//! items will have sequentially increasing numbers.
+//!
+//! All list items may contain block content. Any content indented at least as
+//! far as the end of the list item marker (including the space after it) is
+//! considered part of the list item.
+//!
+//! Lists which have no blank lines between items or between direct children
+//! of items are considered _tight_, and direct child paragraphs of tight list
+//! items are rendered without `` tags.
+//!
+//! - **Table** - a sequence of adjacent table row lines, where each line starts
+//! and ends with a `|`, and cells within the row are delimited by `|`s.
+//!
+//! The first or second row of a table may be a _header delimiter row_, which
+//! is a row consisting of cells of the pattern `---` (for unset column
+//! alignment), `:--` (for left alignment), `:-:` (for center alignment), or
+//! `--:` (for right alignment). The number of `-`s must be at least one, but
+//! is otherwise arbitrary. If there is a row just before the header delimiter
+//! row, it becomes the header row for the table (a table need not have a
+//! header row at all).
+//!
+//! - **Heading** - a sequence of between 1 and 6 `#` characters, followed by a
+//! space and further inline content on the same line.
+//!
+//! - **Code block** - a sequence of at least 3 `` ` `` characters (a _fence_),
+//! optionally followed by a "tag" on the same line, and continuing until a
+//! line consisting only of a closing fence whose length matches the opening
+//! fence, or until the end of the containing block.
+//!
+//! The content of a code block is not parsed as inline content. It is
+//! included verbatim in the output document (minus leading indentation up to
+//! the position of the opening fence).
+//!
+//! - **Blockquote** - a sequence of lines preceded by `>` characters.
+//!
+//! - **Paragraph** - ordinary text, parsed as inline content, ending with a
+//! blank line or the end of the containing block.
+//!
+//! Paragraphs which are part of another block may be "lazily" continued by
+//! subsequent paragraph lines even if those lines would not ordinarily be
+//! considered part of the containing block. For example, this is a single
+//! list item, not a list item followed by a paragraph:
+//!
+//! ```markdown
+//! - First line of content.
+//! This content is still part of the paragraph,
+//! even though it isn't indented far enough.
+//! ```
+//!
+//! - **Thematic break** - a line consisting of at least three matching `-`,
+//! `_`, or `*` characters and, optionally, spaces.
+//!
+//! Indentation may consist of spaces and tabs. The use of tabs is not
+//! recommended: a tab is treated the same as a single space for the purpose of
+//! determining the indentation level, and is not recognized as a space for
+//! block starters which require one (for example, `-` followed by a tab is not
+//! a valid list item).
+//!
+//! The supported inlines are as follows:
+//!
+//! - **Link** - of the format `[text](target)`. `text` may contain inline
+//! content. `target` may contain `\`-escaped characters and balanced
+//! parentheses.
+//!
+//! - **Image** - a link directly preceded by a `!`. The link text is
+//! interpreted as the alt text of the image.
+//!
+//! - **Emphasis** - a run of `*` or `_` characters may be an emphasis opener,
+//! closer, or both. For `*` characters, the run may be an opener as long as
+//! it is not directly followed by a whitespace character (or the end of the
+//! inline content) and a closer as long as it is not directly preceded by
+//! one. For `_` characters, this rule is strengthened by requiring that the
+//! run also be preceded by a whitespace or punctuation character (for
+//! openers) or followed by one (for closers), to avoid mangling `snake_case`
+//! words.
+//!
+//! The rule for emphasis handling is greedy: any run that can close existing
+//! emphasis will do so, otherwise it will open emphasis. A single run may
+//! serve both functions: the middle `**` in the following example both closes
+//! the initial emphasis and opens a new one:
+//!
+//! ```markdown
+//! *one**two*
+//! ```
+//!
+//! A single `*` or `_` is used for normal emphasis (HTML ``), and a
+//! double `**` or `__` is used for strong emphasis (HTML ``). Even
+//! longer runs may be used to produce further nested emphasis (though only
+//! `***` and `___` to produce `` is really useful).
+//!
+//! - **Code span** - a run of `` ` `` characters, terminated by a matching run
+//! or the end of inline content. The content of a code span is not parsed
+//! further.
+//!
+//! - **Text** - normal text is interpreted as-is, except that `\` may be used
+//! to escape any punctuation character, preventing it from being interpreted
+//! according to other syntax rules. A `\` followed by a line break within a
+//! paragraph is interpreted as a hard line break.
+//!
+//! Any null bytes or invalid UTF-8 bytes within text are replaced with Unicode
+//! replacement characters, `U+FFFD`.
+
+const std = @import("std");
+const testing = std.testing;
+
+pub const Document = @import("markdown/Document.zig");
+pub const Parser = @import("markdown/Parser.zig");
+pub const Renderer = @import("markdown/renderer.zig").Renderer;
+pub const renderNodeInlineText = @import("markdown/renderer.zig").renderNodeInlineText;
+pub const fmtHtml = @import("markdown/renderer.zig").fmtHtml;
+
+// Avoid exposing main to other files merely importing this one.
+pub const main = if (@import("root") == @This())
+ mainImpl
+else
+ @compileError("only available as root source file");
+
+fn mainImpl() !void {
+ const gpa = std.heap.c_allocator;
+
+ var parser = try Parser.init(gpa);
+ defer parser.deinit();
+
+ var stdin_buf = std.io.bufferedReader(std.io.getStdIn().reader());
+ var line_buf = std.ArrayList(u8).init(gpa);
+ defer line_buf.deinit();
+ while (stdin_buf.reader().streamUntilDelimiter(line_buf.writer(), '\n', null)) {
+ if (line_buf.getLastOrNull() == '\r') _ = line_buf.pop();
+ try parser.feedLine(line_buf.items);
+ line_buf.clearRetainingCapacity();
+ } else |err| switch (err) {
+ error.EndOfStream => {},
+ else => |e| return e,
+ }
+
+ var doc = try parser.endInput();
+ defer doc.deinit(gpa);
+
+ var stdout_buf = std.io.bufferedWriter(std.io.getStdOut().writer());
+ try doc.render(stdout_buf.writer());
+ try stdout_buf.flush();
+}
+
+test "empty document" {
+ try testRender("", "");
+ try testRender(" ", "");
+ try testRender("\n \n\t\n \n", "");
+}
+
+test "unordered lists" {
+ try testRender(
+ \\- Spam
+ \\- Spam
+ \\- Spam
+ \\- Eggs
+ \\- Bacon
+ \\- Spam
+ \\
+ \\* Spam
+ \\* Spam
+ \\* Spam
+ \\* Eggs
+ \\* Bacon
+ \\* Spam
+ \\
+ \\+ Spam
+ \\+ Spam
+ \\+ Spam
+ \\+ Eggs
+ \\+ Bacon
+ \\+ Spam
+ \\
+ ,
+ \\ Item 1. Item 2. This one has another paragraph. Item 3. Blockquote. Some contents below the heading. | Not | a | table Also not a table:
+ \\|
+ \\| ####### Not a heading You miss 100% of the shots you don't take. ~ Wayne Gretzky ~ Michael Scott Deeply nested blockquote
+ \\which continues on another line
+ \\and then yet another one. But now two of them have been closed. And then there were none. Paragraph one. Paragraph two.
+ \\Still in the paragraph.
+ \\So is this. Last paragraph. Link
+ \\Link with inlines
+ \\Nested parens
+ \\Escaped parens
+ \\Line break in target Emphasis.
+ \\Strong.
+ \\Strong emphasis.
+ \\More...
+ \\MORE...
+ \\Even more...
+ \\OK, this is enough. Emphasis.
+ \\Strong.
+ \\Strong emphasis.
+ \\More...
+ \\MORE...
+ \\Even more...
+ \\OK, this is enough. Hello, world!
+ \\Hello, world!
+ \\Hello, world!
+ \\Hello, world!
+ \\Hello, nested world!
+ \\Hello, world!
+ \\Hello, world!
+ \\Hello, world!
+ \\Hello, world!
+ \\Test123
+ \\Test____123 First one _wins_.
+ \\No other __rule matters. Cannot open: *
+ \\Cannot open: _
+ \\*Cannot close: *
+ \\_Cannot close: _ foobarbaz
+ \\foo_bar_baz
+ \\foobarbaz
+ \\foo__bar__baz Unterminated Weird empty code span: Very important code: Not *emphasized*.
+ \\Literal \backslashes\.
+ \\Not code: `hi`.
+ \\# Not a title.
+ \\## Also not a title.
+ \\> Not a blockquote.
+ \\- Not a list item.
+ \\| Not a table. |
+ \\| Also not a table. |
+ \\Any \punctuation\ characte\r can be escaped:
+ \\!"#$%&'()*+,-./:;<=>?@[\]^_`{|}~ The iguana sits \u{FFFD}\u{FFFD}\u{FFFD} \u{FFFD}\u{FFFD}\u{FFFD} \u{FFFD}\u{FFFD} \u{FFFD} ");
+ for (doc.extraChildren(data.container.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll("
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ );
+}
+
+test "ordered lists" {
+ try testRender(
+ \\1. Breakfast
+ \\2. Second breakfast
+ \\3. Lunch
+ \\2. Afternoon snack
+ \\1. Dinner
+ \\6. Dessert
+ \\7. Midnight snack
+ \\
+ \\1) Breakfast
+ \\2) Second breakfast
+ \\3) Lunch
+ \\2) Afternoon snack
+ \\1) Dinner
+ \\6) Dessert
+ \\7) Midnight snack
+ \\
+ \\1001. Breakfast
+ \\2. Second breakfast
+ \\3. Lunch
+ \\2. Afternoon snack
+ \\1. Dinner
+ \\6. Dessert
+ \\7. Midnight snack
+ \\
+ \\1001) Breakfast
+ \\2) Second breakfast
+ \\3) Lunch
+ \\2) Afternoon snack
+ \\1) Dinner
+ \\6) Dessert
+ \\7) Midnight snack
+ \\
+ ,
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ );
+}
+
+test "nested lists" {
+ try testRender(
+ \\- - Item 1.
+ \\ - Item 2.
+ \\Item 2 continued.
+ \\ * New list.
+ \\
+ ,
+ \\
+ \\
+ \\
+ );
+}
+
+test "lists with block content" {
+ try testRender(
+ \\1. Item 1.
+ \\2. Item 2.
+ \\
+ \\ This one has another paragraph.
+ \\3. Item 3.
+ \\
+ \\- > Blockquote.
+ \\- - Sub-list.
+ \\ - Sub-list continued.
+ \\ * Different sub-list.
+ \\- ## Heading.
+ \\
+ \\ Some contents below the heading.
+ \\ 1. Item 1.
+ \\ 2. Item 2.
+ \\ 3. Item 3.
+ \\
+ ,
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ );
+}
+
+test "tables" {
+ try testRender(
+ \\| Operator | Meaning |
+ \\| :------: | ---------------- |
+ \\| `+` | Add |
+ \\| `-` | Subtract |
+ \\| `*` | Multiply |
+ \\| `/` | Divide |
+ \\| `??` | **Not sure yet** |
+ \\
+ \\| Item 1 | Value 1 |
+ \\| Item 2 | Value 2 |
+ \\| Item 3 | Value 3 |
+ \\| Item 4 | Value 4 |
+ \\
+ \\| :--- | :----: | ----: |
+ \\| Left | Center | Right |
+ \\
+ ,
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\Heading.
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\Operator
+ \\Meaning
+ \\
+ \\
+ \\
+ \\+Add
+ \\
+ \\
+ \\
+ \\-Subtract
+ \\
+ \\
+ \\
+ \\*Multiply
+ \\
+ \\
+ \\
+ \\/Divide
+ \\
+ \\
+ \\
+ \\??Not sure yet
+ \\
+ \\
+ \\
+ \\
+ \\Item 1
+ \\Value 1
+ \\
+ \\
+ \\Item 2
+ \\Value 2
+ \\
+ \\
+ \\Item 3
+ \\Value 3
+ \\
+ \\
+ \\Item 4
+ \\Value 4
+ \\
+ \\
+ \\
+ );
+}
+
+test "table with uneven number of columns" {
+ try testRender(
+ \\| One |
+ \\| :-- | :--: |
+ \\| One | Two | Three |
+ \\
+ ,
+ \\
+ \\
+ \\Left
+ \\Center
+ \\Right
+ \\
+ \\
+ \\
+ );
+}
+
+test "table with escaped pipes" {
+ try testRender(
+ \\| One \| Two |
+ \\| --- | --- |
+ \\| One \| Two |
+ \\
+ ,
+ \\
+ \\
+ \\One
+ \\
+ \\
+ \\One
+ \\Two
+ \\Three
+ \\
+ \\
+ \\
+ );
+}
+
+test "table with pipes in code spans" {
+ try testRender(
+ \\| `|` | Bitwise _OR_ |
+ \\| `||` | Combines error sets |
+ \\| `` `||` `` | Escaped version |
+ \\| ` ``||`` ` | Another escaped version |
+ \\| `Oops unterminated code span |
+ \\
+ ,
+ \\
+ \\
+ \\One | Two
+ \\
+ \\
+ \\One | Two
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\|Bitwise OR
+ \\
+ \\
+ \\
+ \\||Combines error sets
+ \\
+ \\
+ \\
+ \\`||`Escaped version
+ \\
+ \\
+ \\
+ \\``||``Another escaped version
+ \\Oops unterminated code span |
+ \\
+ \\
+ \\
+ \\But
+ \\this
+ \\is
+ \\Level one
+ \\Level two
+ \\Level three
+ \\Level four
+ \\Level five
+ \\Level six
+ \\Outline of
+ \\std.zigImportant notes
+ \\Nested inline content
+ \\
+ );
+}
+
+test "code blocks" {
+ try testRender(
+ \\```
+ \\Hello, world!
+ \\This is some code.
+ \\```
+ \\``` zig test
+ \\const std = @import("std");
+ \\
+ \\test {
+ \\ try std.testing.expect(2 + 2 == 4);
+ \\}
+ \\```
+ \\
+ ,
+ \\
+ \\Hello, world!
+ \\This is some code.
+ \\
+ \\
+ );
+}
+
+test "blockquotes" {
+ try testRender(
+ \\> > You miss 100% of the shots you don't take.
+ \\> >
+ \\> > ~ Wayne Gretzky
+ \\>
+ \\> ~ Michael Scott
+ \\
+ ,
+ \\const std = @import("std");
+ \\
+ \\test {
+ \\ try std.testing.expect(2 + 2 == 4);
+ \\}
+ \\
+ \\
+ \\
+ );
+}
+
+test "blockquote lazy continuation lines" {
+ try testRender(
+ \\>>>>Deeply nested blockquote
+ \\>>which continues on another line
+ \\and then yet another one.
+ \\>>
+ \\>> But now two of them have been closed.
+ \\
+ \\And then there were none.
+ \\
+ ,
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ \\
+ );
+}
+
+test "links" {
+ try testRender(
+ \\[Link](https://example.com)
+ \\[Link *with inlines*](https://example.com)
+ \\[Nested parens](https://example.com/nested(parens(inside)))
+ \\[Escaped parens](https://example.com/\)escaped\()
+ \\[Line break in target](test\
+ \\target)
+ \\
+ ,
+ \\
+ \\
+ \\
+ \\
+ \\Hello, world!
+ \\Multiple `backticks` can be used.
+ \\**This** does not produce emphasis.
+ \\`Backtick enclosed string.`
+ \\Delimiter lengths ```must``` match.code...hi
+ \\Perched atop a short desk chair
+ \\Writing code in Zig\n");
+ } else {
+ try writer.print("
\n", .{start});
+ }
+ } else {
+ try writer.writeAll("
\n");
+ } else {
+ try writer.writeAll("\n");
+ }
+ },
+ .list_item => {
+ try writer.writeAll("\n");
+ }
+ for (doc.extraChildren(data.list.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ if (data.list.start.asNumber() != null) {
+ try writer.writeAll("
\n");
+ for (doc.extraChildren(data.container.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll("
\n");
+ },
+ .table_row => {
+ try writer.writeAll("\n");
+ for (doc.extraChildren(data.container.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll(" \n");
+ },
+ .table_cell => {
+ if (data.table_cell.info.header) {
+ try writer.writeAll(" try writer.writeAll(">"),
+ else => |a| try writer.print(" style=\"text-align: {s}\">", .{@tagName(a)}),
+ }
+
+ for (doc.extraChildren(data.table_cell.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+
+ if (data.table_cell.info.header) {
+ try writer.writeAll(" \n");
+ } else {
+ try writer.writeAll("\n");
+ }
+ },
+ .heading => {
+ try writer.print("
\n", .{fmtHtml(content)});
+ },
+ .blockquote => {
+ try writer.writeAll("{}\n");
+ for (doc.extraChildren(data.container.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll("
\n");
+ },
+ .paragraph => {
+ try writer.writeAll("
\n");
+ },
+ .link => {
+ const target = doc.string(data.link.target);
+ try writer.print("", .{fmtHtml(target)});
+ for (doc.extraChildren(data.link.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll("");
+ },
+ .image => {
+ const target = doc.string(data.link.target);
+ try writer.print("");
+ },
+ .strong => {
+ try writer.writeAll("");
+ for (doc.extraChildren(data.container.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll("");
+ },
+ .emphasis => {
+ try writer.writeAll("");
+ for (doc.extraChildren(data.container.children)) |child| {
+ try r.renderFn(r, doc, child, writer);
+ }
+ try writer.writeAll("");
+ },
+ .code_span => {
+ const content = doc.string(data.text.content);
+ try writer.print("
{}", .{fmtHtml(content)});
+ },
+ .text => {
+ const content = doc.string(data.text.content);
+ try writer.print("{}", .{fmtHtml(content)});
+ },
+ .line_break => {
+ try writer.writeAll("
\n");
+ },
+ }
+ }
+ };
+}
+
+/// Renders an inline node as plain text. Asserts that the node is an inline and
+/// has no non-inline children.
+pub fn renderInlineNodeText(
+ doc: Document,
+ node: Node.Index,
+ writer: anytype,
+) @TypeOf(writer).Error!void {
+ const data = doc.nodes.items(.data)[@intFromEnum(node)];
+ switch (doc.nodes.items(.tag)[@intFromEnum(node)]) {
+ .root,
+ .list,
+ .list_item,
+ .table,
+ .table_row,
+ .table_cell,
+ .heading,
+ .code_block,
+ .blockquote,
+ .paragraph,
+ .thematic_break,
+ => unreachable, // Blocks
+
+ .link, .image => {
+ for (doc.extraChildren(data.link.children)) |child| {
+ try renderInlineNodeText(doc, child, writer);
+ }
+ },
+ .strong => {
+ for (doc.extraChildren(data.container.children)) |child| {
+ try renderInlineNodeText(doc, child, writer);
+ }
+ },
+ .emphasis => {
+ for (doc.extraChildren(data.container.children)) |child| {
+ try renderInlineNodeText(doc, child, writer);
+ }
+ },
+ .code_span, .text => {
+ const content = doc.string(data.text.content);
+ try writer.print("{}", .{fmtHtml(content)});
+ },
+ .line_break => {
+ try writer.writeAll("\n");
+ },
+ }
+}
+
+pub fn fmtHtml(bytes: []const u8) std.fmt.Formatter(formatHtml) {
+ return .{ .data = bytes };
+}
+
+fn formatHtml(
+ bytes: []const u8,
+ comptime fmt: []const u8,
+ options: std.fmt.FormatOptions,
+ writer: anytype,
+) !void {
+ _ = fmt;
+ _ = options;
+ for (bytes) |b| {
+ switch (b) {
+ '<' => try writer.writeAll("<"),
+ '>' => try writer.writeAll(">"),
+ '&' => try writer.writeAll("&"),
+ '"' => try writer.writeAll("""),
+ else => try writer.writeByte(b),
+ }
+ }
+}
diff --git a/lib/docs/ziglexer.js b/lib/docs/ziglexer.js
deleted file mode 100644
index fdd94bee9c..0000000000
--- a/lib/docs/ziglexer.js
+++ /dev/null
@@ -1,2147 +0,0 @@
-'use strict';
-
-const Tag = {
- whitespace: "whitespace",
- invalid: "invalid",
- identifier: "identifier",
- string_literal: "string_literal",
- multiline_string_literal_line: "multiline_string_literal_line",
- char_literal: "char_literal",
- eof: "eof",
- builtin: "builtin",
- number_literal: "number_literal",
- doc_comment: "doc_comment",
- container_doc_comment: "container_doc_comment",
- line_comment: "line_comment",
- invalid_periodasterisks: "invalid_periodasterisks",
- bang: "bang",
- pipe: "pipe",
- pipe_pipe: "pipe_pipe",
- pipe_equal: "pipe_equal",
- equal: "equal",
- equal_equal: "equal_equal",
- equal_angle_bracket_right: "equal_angle_bracket_right",
- bang_equal: "bang_equal",
- l_paren: "l_paren",
- r_paren: "r_paren",
- semicolon: "semicolon",
- percent: "percent",
- percent_equal: "percent_equal",
- l_brace: "l_brace",
- r_brace: "r_brace",
- l_bracket: "l_bracket",
- r_bracket: "r_bracket",
- period: "period",
- period_asterisk: "period_asterisk",
- ellipsis2: "ellipsis2",
- ellipsis3: "ellipsis3",
- caret: "caret",
- caret_equal: "caret_equal",
- plus: "plus",
- plus_plus: "plus_plus",
- plus_equal: "plus_equal",
- plus_percent: "plus_percent",
- plus_percent_equal: "plus_percent_equal",
- plus_pipe: "plus_pipe",
- plus_pipe_equal: "plus_pipe_equal",
- minus: "minus",
- minus_equal: "minus_equal",
- minus_percent: "minus_percent",
- minus_percent_equal: "minus_percent_equal",
- minus_pipe: "minus_pipe",
- minus_pipe_equal: "minus_pipe_equal",
- asterisk: "asterisk",
- asterisk_equal: "asterisk_equal",
- asterisk_asterisk: "asterisk_asterisk",
- asterisk_percent: "asterisk_percent",
- asterisk_percent_equal: "asterisk_percent_equal",
- asterisk_pipe: "asterisk_pipe",
- asterisk_pipe_equal: "asterisk_pipe_equal",
- arrow: "arrow",
- colon: "colon",
- slash: "slash",
- slash_equal: "slash_equal",
- comma: "comma",
- ampersand: "ampersand",
- ampersand_equal: "ampersand_equal",
- question_mark: "question_mark",
- angle_bracket_left: "angle_bracket_left",
- angle_bracket_left_equal: "angle_bracket_left_equal",
- angle_bracket_angle_bracket_left: "angle_bracket_angle_bracket_left",
- angle_bracket_angle_bracket_left_equal: "angle_bracket_angle_bracket_left_equal",
- angle_bracket_angle_bracket_left_pipe: "angle_bracket_angle_bracket_left_pipe",
- angle_bracket_angle_bracket_left_pipe_equal: "angle_bracket_angle_bracket_left_pipe_equal",
- angle_bracket_right: "angle_bracket_right",
- angle_bracket_right_equal: "angle_bracket_right_equal",
- angle_bracket_angle_bracket_right: "angle_bracket_angle_bracket_right",
- angle_bracket_angle_bracket_right_equal: "angle_bracket_angle_bracket_right_equal",
- tilde: "tilde",
- keyword_addrspace: "keyword_addrspace",
- keyword_align: "keyword_align",
- keyword_allowzero: "keyword_allowzero",
- keyword_and: "keyword_and",
- keyword_anyframe: "keyword_anyframe",
- keyword_anytype: "keyword_anytype",
- keyword_asm: "keyword_asm",
- keyword_async: "keyword_async",
- keyword_await: "keyword_await",
- keyword_break: "keyword_break",
- keyword_callconv: "keyword_callconv",
- keyword_catch: "keyword_catch",
- keyword_comptime: "keyword_comptime",
- keyword_const: "keyword_const",
- keyword_continue: "keyword_continue",
- keyword_defer: "keyword_defer",
- keyword_else: "keyword_else",
- keyword_enum: "keyword_enum",
- keyword_errdefer: "keyword_errdefer",
- keyword_error: "keyword_error",
- keyword_export: "keyword_export",
- keyword_extern: "keyword_extern",
- keyword_fn: "keyword_fn",
- keyword_for: "keyword_for",
- keyword_if: "keyword_if",
- keyword_inline: "keyword_inline",
- keyword_noalias: "keyword_noalias",
- keyword_noinline: "keyword_noinline",
- keyword_nosuspend: "keyword_nosuspend",
- keyword_opaque: "keyword_opaque",
- keyword_or: "keyword_or",
- keyword_orelse: "keyword_orelse",
- keyword_packed: "keyword_packed",
- keyword_pub: "keyword_pub",
- keyword_resume: "keyword_resume",
- keyword_return: "keyword_return",
- keyword_linksection: "keyword_linksection",
- keyword_struct: "keyword_struct",
- keyword_suspend: "keyword_suspend",
- keyword_switch: "keyword_switch",
- keyword_test: "keyword_test",
- keyword_threadlocal: "keyword_threadlocal",
- keyword_try: "keyword_try",
- keyword_union: "keyword_union",
- keyword_unreachable: "keyword_unreachable",
- keyword_usingnamespace: "keyword_usingnamespace",
- keyword_var: "keyword_var",
- keyword_volatile: "keyword_volatile",
- keyword_while: "keyword_while"
-}
-
-const Tok = {
- const: { src: "const", tag: Tag.keyword_const },
- var: { src: "var", tag: Tag.keyword_var },
- colon: { src: ":", tag: Tag.colon },
- eql: { src: "=", tag: Tag.equals },
- space: { src: " ", tag: Tag.whitespace },
- tab: { src: " ", tag: Tag.whitespace },
- enter: { src: "\n", tag: Tag.whitespace },
- semi: { src: ";", tag: Tag.semicolon },
- l_bracket: { src: "[", tag: Tag.l_bracket },
- r_bracket: { src: "]", tag: Tag.r_bracket },
- l_brace: { src: "{", tag: Tag.l_brace },
- r_brace: { src: "}", tag: Tag.r_brace },
- l_paren: { src: "(", tag: Tag.l_paren },
- r_paren: { src: ")", tag: Tag.r_paren },
- period: { src: ".", tag: Tag.period },
- comma: { src: ",", tag: Tag.comma },
- question_mark: { src: "?", tag: Tag.question_mark },
- asterisk: { src: "*", tag: Tag.asterisk },
- identifier: (name) => { return { src: name, tag: Tag.identifier } },
-};
-
-
-const State = {
- start: 0,
- identifier: 1,
- builtin: 2,
- string_literal: 3,
- string_literal_backslash: 4,
- multiline_string_literal_line: 5,
- char_literal: 6,
- char_literal_backslash: 7,
- char_literal_hex_escape: 8,
- char_literal_unicode_escape_saw_u: 9,
- char_literal_unicode_escape: 10,
- char_literal_unicode_invalid: 11,
- char_literal_unicode: 12,
- char_literal_end: 13,
- backslash: 14,
- equal: 15,
- bang: 16,
- pipe: 17,
- minus: 18,
- minus_percent: 19,
- minus_pipe: 20,
- asterisk: 21,
- asterisk_percent: 22,
- asterisk_pipe: 23,
- slash: 24,
- line_comment_start: 25,
- line_comment: 26,
- doc_comment_start: 27,
- doc_comment: 28,
- int: 29,
- int_exponent: 30,
- int_period: 31,
- float: 32,
- float_exponent: 33,
- ampersand: 34,
- caret: 35,
- percent: 36,
- plus: 37,
- plus_percent: 38,
- plus_pipe: 39,
- angle_bracket_left: 40,
- angle_bracket_angle_bracket_left: 41,
- angle_bracket_angle_bracket_left_pipe: 42,
- angle_bracket_right: 43,
- angle_bracket_angle_bracket_right: 44,
- period: 45,
- period_2: 46,
- period_asterisk: 47,
- saw_at_sign: 48,
- whitespace: 49,
-}
-
-const keywords = {
- "addrspace": Tag.keyword_addrspace,
- "align": Tag.keyword_align,
- "allowzero": Tag.keyword_allowzero,
- "and": Tag.keyword_and,
- "anyframe": Tag.keyword_anyframe,
- "anytype": Tag.keyword_anytype,
- "asm": Tag.keyword_asm,
- "async": Tag.keyword_async,
- "await": Tag.keyword_await,
- "break": Tag.keyword_break,
- "callconv": Tag.keyword_callconv,
- "catch": Tag.keyword_catch,
- "comptime": Tag.keyword_comptime,
- "const": Tag.keyword_const,
- "continue": Tag.keyword_continue,
- "defer": Tag.keyword_defer,
- "else": Tag.keyword_else,
- "enum": Tag.keyword_enum,
- "errdefer": Tag.keyword_errdefer,
- "error": Tag.keyword_error,
- "export": Tag.keyword_export,
- "extern": Tag.keyword_extern,
- "fn": Tag.keyword_fn,
- "for": Tag.keyword_for,
- "if": Tag.keyword_if,
- "inline": Tag.keyword_inline,
- "noalias": Tag.keyword_noalias,
- "noinline": Tag.keyword_noinline,
- "nosuspend": Tag.keyword_nosuspend,
- "opaque": Tag.keyword_opaque,
- "or": Tag.keyword_or,
- "orelse": Tag.keyword_orelse,
- "packed": Tag.keyword_packed,
- "pub": Tag.keyword_pub,
- "resume": Tag.keyword_resume,
- "return": Tag.keyword_return,
- "linksection": Tag.keyword_linksection,
- "struct": Tag.keyword_struct,
- "suspend": Tag.keyword_suspend,
- "switch": Tag.keyword_switch,
- "test": Tag.keyword_test,
- "threadlocal": Tag.keyword_threadlocal,
- "try": Tag.keyword_try,
- "union": Tag.keyword_union,
- "unreachable": Tag.keyword_unreachable,
- "usingnamespace": Tag.keyword_usingnamespace,
- "var": Tag.keyword_var,
- "volatile": Tag.keyword_volatile,
- "while": Tag.keyword_while,
-};
-
-function make_token(tag, start, end) {
- return {
- tag: tag,
- loc: {
- start: start,
- end: end
- }
- }
-
-}
-
-function dump_tokens(tokens, raw_source) {
-
- //TODO: this is not very fast
- function find_tag_key(tag) {
- for (const [key, value] of Object.entries(Tag)) {
- if (value == tag) return key;
- }
- }
-
- for (let i = 0; i < tokens.length; i++) {
- const tok = tokens[i];
- const z = raw_source.substring(tok.loc.start, tok.loc.end).toLowerCase();
- console.log(`${find_tag_key(tok.tag)} "${tok.tag}" '${z}'`)
- }
-}
-
-function* Tokenizer(raw_source) {
- let tokenizer = new InnerTokenizer(raw_source);
- while (true) {
- let t = tokenizer.next();
- if (t.tag == Tag.eof)
- return;
-
- t.src = raw_source.slice(t.loc.start, t.loc.end);
-
- yield t;
- }
-
-}
-function InnerTokenizer(raw_source) {
- this.index = 0;
- this.flag = false;
-
- this.seen_escape_digits = undefined;
- this.remaining_code_units = undefined;
-
- this.next = () => {
- let state = State.start;
-
- var result = {
- tag: -1,
- loc: {
- start: this.index,
- end: undefined,
- },
- src: undefined,
- };
-
- //having a while (true) loop seems like a bad idea the loop should never
- //take more iterations than twice the length of the source code
- const MAX_ITERATIONS = raw_source.length * 2;
- let iterations = 0;
-
- while (iterations <= MAX_ITERATIONS) {
-
- if (this.flag) {
- return make_token(Tag.eof, this.index - 2, this.index - 2);
- }
- iterations += 1; // avoid death loops
-
- var c = raw_source[this.index];
-
- if (c === undefined) {
- c = ' '; // push the last token
- this.flag = true;
- }
-
- switch (state) {
- case State.start:
- switch (c) {
- case 0: {
- if (this.index != raw_source.length) {
- result.tag = Tag.invalid;
- result.loc.start = this.index;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- result.loc.end = this.index;
- return result;
- }
- case ' ':
- case '\n':
- case '\t':
- case '\r': {
- state = State.whitespace;
- result.tag = Tag.whitespace;
- result.loc.start = this.index;
- break;
- }
- case '"': {
- state = State.string_literal;
- result.tag = Tag.string_literal;
- break;
- }
- case '\'': {
- state = State.char_literal;
- break;
- }
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'p':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'P':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '_': {
- state = State.identifier;
- result.tag = Tag.identifier;
- break;
- }
- case '@': {
- state = State.saw_at_sign;
- break;
- }
- case '=': {
- state = State.equal;
- break;
- }
- case '!': {
- state = State.bang;
- break;
- }
- case '|': {
- state = State.pipe;
- break;
- }
- case '(': {
- result.tag = Tag.l_paren;
- this.index += 1;
- result.loc.end = this.index;
-
- return result;
-
- }
- case ')': {
- result.tag = Tag.r_paren;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case '[': {
- result.tag = Tag.l_bracket;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case ']': {
- result.tag = Tag.r_bracket;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case ';': {
- result.tag = Tag.semicolon;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case ',': {
- result.tag = Tag.comma;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case '?': {
- result.tag = Tag.question_mark;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case ':': {
- result.tag = Tag.colon;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case '%': {
- state = State.percent; break;
- }
- case '*': {
- state = State.asterisk; break;
- }
- case '+': {
- state = State.plus; break;
- }
- case '<': {
- state = State.angle_bracket_left; break;
- }
- case '>': {
- state = State.angle_bracket_right; break;
- }
- case '^': {
- state = State.caret; break;
- }
- case '\\': {
- state = State.backslash;
- result.tag = Tag.multiline_string_literal_line; break;
- }
- case '{': {
- result.tag = Tag.l_brace;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case '}': {
- result.tag = Tag.r_brace;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case '~': {
- result.tag = Tag.tilde;
- this.index += 1; result.loc.end = this.index;
- return result;
-
- }
- case '.': {
- state = State.period; break;
- }
- case '-': {
- state = State.minus; break;
- }
- case '/': {
- state = State.slash; break;
- }
- case '&': {
- state = State.ampersand; break;
- }
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- {
- state = State.int;
- result.tag = Tag.number_literal; break;
- }
- default: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- this.index += 1;
- return result;
- }
- }
- break;
- case State.saw_at_sign:
- switch (c) {
- case '"': {
- result.tag = Tag.identifier;
- state = State.string_literal; break;
- }
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'p':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'P':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '_': {
- state = State.builtin;
- result.tag = Tag.builtin;
- break;
- }
- default: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.ampersand:
- switch (c) {
- case '=': {
- result.tag = Tag.ampersand_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.ampersand; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.asterisk: switch (c) {
- case '=': {
- result.tag = Tag.asterisk_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- case '*': {
- result.tag = Tag.asterisk_asterisk;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- case '%': {
- state = State.asterisk_percent; break;
- }
- case '|': {
- state = State.asterisk_pipe; break;
- }
- default: {
- result.tag = Tag.asterisk;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.asterisk_percent:
- switch (c) {
- case '=': {
- result.tag = Tag.asterisk_percent_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.asterisk_percent;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.asterisk_pipe:
- switch (c) {
- case '=': {
- result.tag = Tag.asterisk_pipe_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.asterisk_pipe; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.percent:
- switch (c) {
- case '=': {
- result.tag = Tag.percent_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.percent; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.plus:
- switch (c) {
- case '=': {
- result.tag = Tag.plus_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- case '+': {
- result.tag = Tag.plus_plus;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- case '%': {
- state = State.plus_percent; break;
- }
- case '|': {
- state = State.plus_pipe; break;
- }
- default: {
- result.tag = Tag.plus; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.plus_percent:
- switch (c) {
- case '=': {
- result.tag = Tag.plus_percent_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.plus_percent; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.plus_pipe:
- switch (c) {
- case '=': {
- result.tag = Tag.plus_pipe_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.plus_pipe; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.caret:
- switch (c) {
- case '=': {
- result.tag = Tag.caret_equal;
- this.index += 1; result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.caret; result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.identifier:
- switch (c) {
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'p':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'P':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '_':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9': break;
- default: {
- // if (Token.getKeyword(buffer[result.loc.start..this.index])) | tag | {
- const z = raw_source.substring(result.loc.start, this.index);
- if (z in keywords) {
- result.tag = keywords[z];
- }
- result.loc.end = this.index;
- return result;
- }
-
-
- }
- break;
- case State.builtin: switch (c) {
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'p':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'P':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '_':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9': break;
- default: result.loc.end = this.index;
- return result;
- }
- break;
- case State.backslash:
- switch (c) {
- case '\\': {
- state = State.multiline_string_literal_line;
- break;
- }
- default: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.string_literal:
- switch (c) {
- case '\\': {
- state = State.string_literal_backslash; break;
- }
- case '"': {
- this.index += 1;
- result.loc.end = this.index;
-
- return result;
- }
- case 0: {
- //TODO: PORT
- // if (this.index == buffer.len) {
- // result.tag = .invalid;
- // break;
- // } else {
- // checkLiteralCharacter();
- // }
- result.loc.end = this.index;
- return result;
- }
- case '\n': {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- //TODO: PORT
- //default: checkLiteralCharacter(),
- }
- break;
- case State.string_literal_backslash:
- switch (c) {
- case 0:
- case '\n': {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- default: {
- state = State.string_literal; break;
- }
- }
- break;
- case State.char_literal: switch (c) {
- case 0: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- case '\\': {
- state = State.char_literal_backslash;
- break;
- }
- //TODO: PORT
- // '\'', 0x80...0xbf, 0xf8...0xff => {
- // result.tag = .invalid;
- // break;
- // },
- // 0xc0...0xdf => { // 110xxxxx
- // this.remaining_code_units = 1;
- // state = .char_literal_unicode;
- // },
- // 0xe0...0xef => { // 1110xxxx
- // this.remaining_code_units = 2;
- // state = .char_literal_unicode;
- // },
- // 0xf0...0xf7 => { // 11110xxx
- // this.remaining_code_units = 3;
- // state = .char_literal_unicode;
- // },
-
- // case 0x80:
- // case 0x81:
- // case 0x82:
- // case 0x83:
- // case 0x84:
- // case 0x85:
- // case 0x86:
- // case 0x87:
- // case 0x88:
- // case 0x89:
- // case 0x8a:
- // case 0x8b:
- // case 0x8c:
- // case 0x8d:
- // case 0x8e:
- // case 0x8f:
- // case 0x90:
- // case 0x91:
- // case 0x92:
- // case 0x93:
- // case 0x94:
- // case 0x95:
- // case 0x96:
- // case 0x97:
- // case 0x98:
- // case 0x99:
- // case 0x9a:
- // case 0x9b:
- // case 0x9c:
- // case 0x9d:
- // case 0x9e:
- // case 0x9f:
- // case 0xa0:
- // case 0xa1:
- // case 0xa2:
- // case 0xa3:
- // case 0xa4:
- // case 0xa5:
- // case 0xa6:
- // case 0xa7:
- // case 0xa8:
- // case 0xa9:
- // case 0xaa:
- // case 0xab:
- // case 0xac:
- // case 0xad:
- // case 0xae:
- // case 0xaf:
- // case 0xb0:
- // case 0xb1:
- // case 0xb2:
- // case 0xb3:
- // case 0xb4:
- // case 0xb5:
- // case 0xb6:
- // case 0xb7:
- // case 0xb8:
- // case 0xb9:
- // case 0xba:
- // case 0xbb:
- // case 0xbc:
- // case 0xbd:
- // case 0xbe:
- // case 0xbf:
- // case 0xf8:
- // case 0xf9:
- // case 0xfa:
- // case 0xfb:
- // case 0xfc:
- // case 0xfd:
- // case 0xfe:
- // case 0xff:
- // result.tag = .invalid;
- // break;
- // case 0xc0:
- // case 0xc1:
- // case 0xc2:
- // case 0xc3:
- // case 0xc4:
- // case 0xc5:
- // case 0xc6:
- // case 0xc7:
- // case 0xc8:
- // case 0xc9:
- // case 0xca:
- // case 0xcb:
- // case 0xcc:
- // case 0xcd:
- // case 0xce:
- // case 0xcf:
- // case 0xd0:
- // case 0xd1:
- // case 0xd2:
- // case 0xd3:
- // case 0xd4:
- // case 0xd5:
- // case 0xd6:
- // case 0xd7:
- // case 0xd8:
- // case 0xd9:
- // case 0xda:
- // case 0xdb:
- // case 0xdc:
- // case 0xdd:
- // case 0xde:
- // case 0xdf:
- // this.remaining_code_units = 1;
- // state = .char_literal_unicode;
- // case 0xe0:
- // case 0xe1:
- // case 0xe2:
- // case 0xe3:
- // case 0xe4:
- // case 0xe5:
- // case 0xe6:
- // case 0xe7:
- // case 0xe8:
- // case 0xe9:
- // case 0xea:
- // case 0xeb:
- // case 0xec:
- // case 0xed:
- // case 0xee:
- // case 0xef:
- // this.remaining_code_units = 2;
- // state = .char_literal_unicode;
- // case 0xf0:
- // case 0xf1:
- // case 0xf2:
- // case 0xf3:
- // case 0xf4:
- // case 0xf5:
- // case 0xf6:
- // case 0xf7:
- // this.remaining_code_units = 3;
- // state = .char_literal_unicode;
-
- case '\n': {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- default: {
- state = State.char_literal_end; break;
- }
- }
- break;
- case State.char_literal_backslash:
- switch (c) {
- case 0:
- case '\n': {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- case 'x': {
- state = State.char_literal_hex_escape;
- this.seen_escape_digits = 0; break;
- }
- case 'u': {
- state = State.char_literal_unicode_escape_saw_u; break;
- }
- default: {
- state = State.char_literal_end; break;
- }
- }
- break;
- case State.char_literal_hex_escape:
- switch (c) {
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F': {
- this.seen_escape_digits += 1;
- if (this.seen_escape_digits == 2) {
- state = State.char_literal_end;
- } break;
- }
- default: {
- result.tag = Tag.invalid;
- esult.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.char_literal_unicode_escape_saw_u:
- switch (c) {
- case 0: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- case '{': {
- state = State.char_literal_unicode_escape; break;
- }
- default: {
- result.tag = Tag.invalid;
- state = State.char_literal_unicode_invalid; break;
- }
- }
- break;
- case State.char_literal_unicode_escape:
- switch (c) {
- case 0: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F': break;
- case '}': {
- state = State.char_literal_end; // too many/few digits handled later
- break;
- }
- default: {
- result.tag = Tag.invalid;
- state = State.char_literal_unicode_invalid; break;
- }
- }
- break;
- case State.char_literal_unicode_invalid:
- switch (c) {
- // Keep consuming characters until an obvious stopping point.
- // This consolidates e.g. `u{0ab1Q}` into a single invalid token
- // instead of creating the tokens `u{0ab1`, `Q`, `}`
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'e':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'p':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'E':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'P':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '}':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9': break;
- default: break;
- }
- break;
- case State.char_literal_end:
- switch (c) {
- case '\'': {
- result.tag = Tag.char_literal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.char_literal_unicode:
- switch (c) {
- // 0x80...0xbf => {
- // this.remaining_code_units -= 1;
- // if (this.remaining_code_units == 0) {
- // state = .char_literal_end;
- // }
- // },
- default: {
- result.tag = Tag.invalid;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.multiline_string_literal_line:
- switch (c) {
- case 0:
- result.loc.end = this.index;
- return result;
- case '\n': {
-
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- case '\t': break;
- //TODO: PORT
- //default: checkLiteralCharacter(),
-
- }
- break;
- case State.bang:
- switch (c) {
- case '=': {
- result.tag = Tag.bang_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.bang;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.pipe:
- switch (c) {
- case '=': {
- result.tag = Tag.pipe_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- case '|': {
- result.tag = Tag.pipe_pipe;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.pipe;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.equal: switch (c) {
- case '=': {
- result.tag = Tag.equal_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- case '>': {
- result.tag = Tag.equal_angle_bracket_right;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.equal;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.minus: switch (c) {
- case '>': {
- result.tag = Tag.arrow;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- case '=': {
- result.tag = Tag.minus_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- case '%': {
- state = State.minus_percent; break;
- }
- case '|': {
- state = State.minus_pipe; break;
- }
- default: {
- result.tag = Tag.minus;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.minus_percent:
- switch (c) {
- case '=': {
- result.tag = Tag.minus_percent_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.minus_percent;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.minus_pipe:
- switch (c) {
- case '=': {
- result.tag = Tag.minus_pipe_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.minus_pipe;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.angle_bracket_left:
- switch (c) {
- case '<': {
- state = State.angle_bracket_angle_bracket_left; break;
- }
- case '=': {
- result.tag = Tag.angle_bracket_left_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.angle_bracket_left;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.angle_bracket_angle_bracket_left:
- switch (c) {
- case '=': {
- result.tag = Tag.angle_bracket_angle_bracket_left_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- case '|': {
- state = State.angle_bracket_angle_bracket_left_pipe;
- }
- default: {
- result.tag = Tag.angle_bracket_angle_bracket_left;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.angle_bracket_angle_bracket_left_pipe:
- switch (c) {
- case '=': {
- result.tag = Tag.angle_bracket_angle_bracket_left_pipe_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.angle_bracket_angle_bracket_left_pipe;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.angle_bracket_right:
- switch (c) {
- case '>': {
- state = State.angle_bracket_angle_bracket_right; break;
- }
- case '=': {
- result.tag = Tag.angle_bracket_right_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.angle_bracket_right;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.angle_bracket_angle_bracket_right:
- switch (c) {
- case '=': {
- result.tag = Tag.angle_bracket_angle_bracket_right_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.angle_bracket_angle_bracket_right;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.period:
- switch (c) {
- case '.': {
- state = State.period_2; break;
- }
- case '*': {
- state = State.period_asterisk; break;
- }
- default: {
- result.tag = Tag.period;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.period_2:
- switch (c) {
- case '.': {
- result.tag = Tag.ellipsis3;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.ellipsis2;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.period_asterisk:
- switch (c) {
- case '*': {
- result.tag = Tag.invalid_periodasterisks;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.period_asterisk;
- result.loc.end = this.index;
- return result;
- }
- }
- break;
- case State.slash:
- switch (c) {
- case '/': {
- state = State.line_comment_start;
- break;
- }
- case '=': {
- result.tag = Tag.slash_equal;
- this.index += 1;
- result.loc.end = this.index;
- return result;
- }
- default: {
- result.tag = Tag.slash;
- result.loc.end = this.index;
- return result;
- }
- } break;
- case State.line_comment_start:
- switch (c) {
- case 0: {
- if (this.index != raw_source.length) {
- result.tag = Tag.invalid;
- this.index += 1;
- }
- result.loc.end = this.index;
- return result;
- }
- case '/': {
- state = State.doc_comment_start; break;
- }
- case '!': {
- result.tag = Tag.container_doc_comment;
- state = State.doc_comment; break;
- }
- case '\n': {
- state = State.start;
- result.loc.start = this.index + 1; break;
- }
- case '\t':
- state = State.line_comment; break;
- default: {
- state = State.line_comment;
- //TODO: PORT
- //checkLiteralCharacter();
- break;
- }
- } break;
- case State.doc_comment_start:
- switch (c) {
- case '/': {
- state = State.line_comment; break;
- }
- case 0:
- case '\n':
- {
- result.tag = Tag.doc_comment;
- result.loc.end = this.index;
- return result;
- }
- case '\t': {
- state = State.doc_comment;
- result.tag = Tag.doc_comment; break;
- }
- default: {
- state = State.doc_comment;
- result.tag = Tag.doc_comment;
- //TODO: PORT
- //checkLiteralCharacter();
- break;
- }
- } break;
- case State.line_comment:
- switch (c) {
- case 0: {
- if (this.index != raw_source.length) {
- result.tag = Tag.invalid;
- this.index += 1;
- }
- result.loc.end = this.index;
- return result;
- }
- case '\n': {
- result.tag = Tag.line_comment;
- result.loc.end = this.index;
- return result;
- }
- case '\t': break;
- //TODO: PORT
- //default: checkLiteralCharacter(),
- } break;
- case State.doc_comment:
- switch (c) {
- case 0://
- case '\n':
- result.loc.end = this.index;
- return result;
- case '\t': break;
- //TODOL PORT
- // default: checkLiteralCharacter(),
- default:
- break;
- } break;
- case State.int:
- switch (c) {
- case '.':
- state = State.int_period;
- break;
- case '_':
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- break;
- case 'e':
- case 'E':
- case 'p':
- case 'P':
- state = State.int_exponent;
- break;
- default: result.loc.end = this.index;
- return result;
- } break;
- case State.int_exponent:
- switch (c) {
- case '-':
- case '+':
- {
- ``
- state = State.float; break;
- }
- default: {
- this.index -= 1;
- state = State.int; break;
- }
- } break;
- case State.int_period: switch (c) {
- case '_':
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9': {
- state = State.float; break;
- }
- case 'e':
- case 'E':
- case 'p':
- case 'P':
- state = State.float_exponent; break;
- default: {
- this.index -= 1;
- result.loc.end = this.index;
- return result;
- }
- } break;
- case State.float:
- switch (c) {
- case '_':
- case 'a':
- case 'b':
- case 'c':
- case 'd':
- case 'f':
- case 'g':
- case 'h':
- case 'i':
- case 'j':
- case 'k':
- case 'l':
- case 'm':
- case 'n':
- case 'o':
- case 'q':
- case 'r':
- case 's':
- case 't':
- case 'u':
- case 'v':
- case 'w':
- case 'x':
- case 'y':
- case 'z':
- case 'A':
- case 'B':
- case 'C':
- case 'D':
- case 'F':
- case 'G':
- case 'H':
- case 'I':
- case 'J':
- case 'K':
- case 'L':
- case 'M':
- case 'N':
- case 'O':
- case 'Q':
- case 'R':
- case 'S':
- case 'T':
- case 'U':
- case 'V':
- case 'W':
- case 'X':
- case 'Y':
- case 'Z':
- case '0':
- case '1':
- case '2':
- case '3':
- case '4':
- case '5':
- case '6':
- case '7':
- case '8':
- case '9':
- break;
-
- case 'e':
- case 'E':
- case 'p':
- case 'P':
- state = State.float_exponent; break;
- default: result.loc.end = this.index;
- return result;
- } break;
- case State.float_exponent:
- switch (c) {
- case '-':
- case '+':
- state = State.float; break;
- default: {
- this.index -= 1;
- state = State.float; break;
- }
- }
- break;
-
- case State.whitespace:
- switch(c) {
- case ' ':
- case '\n':
- case '\t':
- case '\r': {
- break;
- }
- default: {
- result.loc.end = this.index;
- return result;
- }
- }
- }
- this.index += 1;
- }
-
- //TODO: PORT
- // if (result.tag == Tag.eof) {
- // if (pending_invalid_token) | token | {
- // pending_invalid_token = null;
- // return token;
- // }
- // result.loc.start = sindex;
- // }
-
- result.loc.end = this.index;
- return result;
-
- }
-}
-
-
-const builtin_types = [
- "f16", "f32", "f64", "f80", "f128",
- "c_longdouble", "c_short", "c_ushort", "c_int", "c_uint",
- "c_long", "c_ulong", "c_longlong", "c_ulonglong", "c_char",
- "anyopaque", "void", "bool", "isize", "usize",
- "noreturn", "type", "anyerror", "comptime_int", "comptime_float",
-];
-
-function isSimpleType(typeName) {
- return builtin_types.includes(typeName) || isIntType(typeName);
-}
-
-function isIntType(typeName) {
- if (typeName[0] != 'u' && typeName[0] != 'i') return false;
- let i = 1;
- if (i == typeName.length) return false;
- for (; i < typeName.length; i += 1) {
- if (typeName[i] < '0' || typeName[i] > '9') return false;
- }
- return true;
-}
-
-function isSpecialIndentifier(identifier) {
- return ["null", "true", "false", ,"undefined"].includes(identifier);
-}
-
-//const fs = require('fs');
-//const src = fs.readFileSync("../std/c.zig", 'utf8');
-//console.log(generate_html_for_src(src));
-
-
-// gist for zig_lexer_test code: https://gist.github.com/Myvar/2684ba4fb86b975274629d6f21eddc7b
-// // Just for testing not to commit in pr
-// var isNode = new Function("try {return this===global;}catch(e){return false;}");
-// if (isNode()) {
-
-
-// //const s = "const std = @import(\"std\");";
-// //const toksa = tokenize_zig_source(s);
-// //dump_tokens(toksa, s);
-// //console.log(JSON.stringify(toksa));
-
-// const fs = require('fs');
-
-// function testFile(fileName) {
-// //console.log(fileName);
-// var exec = require('child_process').execFileSync;
-// var passed = true;
-// const zig_data = exec('./zig_lexer_test', [fileName]);
-// const data = fs.readFileSync(fileName, 'utf8');
-
-// const toks = tokenize_zig_source(data);
-// const a_json = toks;
-
-// // dump_tokens(a_json, data);
-// // return;
-
-// const b_json = JSON.parse(zig_data.toString());
-
-// if (a_json.length !== b_json.length) {
-// console.log("FAILED a and be is not the same length");
-// passed = false;
-// //return;
-// }
-
-// let len = a_json.length;
-// if (len >= b_json.length) len = b_json.length;
-
-// for (let i = 0; i < len; i++) {
-// const a = a_json[i];
-// const b = b_json[i];
-
-// // console.log(a.tag + " == " + b.tag);
-
-// if (a.tag !== b.tag) {
-
-// // console.log("Around here:");
-// // console.log(
-// // data.substring(b_json[i - 2].loc.start, b_json[i - 2].loc.end),
-// // data.substring(b_json[i - 1].loc.start, b_json[i - 1].loc.end),
-// // data.substring(b_json[i].loc.start, b_json[i].loc.end),
-// // data.substring(b_json[i + 1].loc.start, b_json[i + 1].loc.end),
-// // data.substring(b_json[i + 2].loc.start, b_json[i + 2].loc.end),
-// // );
-
-// console.log("TAG: a != b");
-// console.log("js", a.tag);
-// console.log("zig", b.tag);
-// passed = false;
-// return;
-// }
-
-// if (a.tag !== Tag.eof && a.loc.start !== b.loc.start) {
-// console.log("START: a != b");
-
-// console.log("js", "\"" + data.substring(a_json[i ].loc.start, a_json[i].loc.end) + "\"");
-// console.log("zig", "\"" + data.substring(b_json[i ].loc.start, b_json[i].loc.end) + "\"");
-
-
-// passed = false;
-// return;
-// }
-
-// // if (a.tag !== Tag.eof && a.loc.end !== b.loc.end) {
-// // console.log("END: a != b");
-// // // console.log("Around here:");
-// // // console.log(
-// // // // data.substring(b_json[i - 2].loc.start, b_json[i - 2].loc.end),
-// // // // data.substring(b_json[i - 1].loc.start, b_json[i - 1].loc.end),
-// // // data.substring(b_json[i ].loc.start, b_json[i].loc.end),
-// // // // data.substring(b_json[i + 1].loc.start, b_json[i + 1].loc.end),
-// // // // data.substring(b_json[i + 2].loc.start, b_json[i + 2].loc.end),
-// // // );
-// // console.log("js", "\"" + data.substring(a_json[i ].loc.start, a_json[i].loc.end) + "\"");
-// // console.log("zig", "\"" + data.substring(b_json[i ].loc.start, b_json[i].loc.end) + "\"");
-// // passed = false;
-// // return;
-// // }
-// }
-// return passed;
-// }
-// var path = require('path');
-// function fromDir(startPath, filter) {
-// if (!fs.existsSync(startPath)) {
-// console.log("no dir ", startPath);
-// return;
-// }
-// var files = fs.readdirSync(startPath);
-// for (var i = 0; i < files.length; i++) {
-// var filename = path.join(startPath, files[i]);
-// var stat = fs.lstatSync(filename);
-// if (stat.isDirectory()) {
-// fromDir(filename, filter); //recurse
-// } else if (filename.endsWith(filter)) {
-// try {
-// console.log('-- TESTING: ', filename);
-// console.log("\t\t", testFile(filename));
-// }
-// catch {
-// }
-// };
-// };
-// };
-// fromDir('../std', '.zig');
-// //console.log(testFile("/home/myvar/code/zig/lib/std/fmt/errol.zig"));
-// //console.log(testFile("test.zig"));
-// }
\ No newline at end of file
diff --git a/lib/std/Thread/WaitGroup.zig b/lib/std/Thread/WaitGroup.zig
index a6a82a9492..d85188fa78 100644
--- a/lib/std/Thread/WaitGroup.zig
+++ b/lib/std/Thread/WaitGroup.zig
@@ -1,3 +1,4 @@
+const builtin = @import("builtin");
const std = @import("std");
const assert = std.debug.assert;
const WaitGroup = @This();
@@ -43,3 +44,24 @@ pub fn isDone(wg: *WaitGroup) bool {
return (state / one_pending) == 0;
}
+
+// Spawns a new thread for the task. This is appropriate when the callee
+// delegates all work.
+pub fn spawnManager(
+ wg: *WaitGroup,
+ comptime func: anytype,
+ args: anytype,
+) void {
+ if (builtin.single_threaded) {
+ @call(.auto, func, args);
+ return;
+ }
+ const Manager = struct {
+ fn run(wg_inner: *WaitGroup, args_inner: @TypeOf(args)) void {
+ defer wg_inner.finish();
+ @call(.auto, func, args_inner);
+ }
+ };
+ wg.start();
+ _ = std.Thread.spawn(.{}, Manager.run, .{ wg, args }) catch Manager.run(wg, args);
+}
diff --git a/lib/std/base64.zig b/lib/std/base64.zig
index ce84e640d1..2627480295 100644
--- a/lib/std/base64.zig
+++ b/lib/std/base64.zig
@@ -1,3 +1,5 @@
+//! Base64 encoding/decoding.
+
const std = @import("std.zig");
const assert = std.debug.assert;
const builtin = @import("builtin");
diff --git a/lib/std/builtin.zig b/lib/std/builtin.zig
index fc85d32d52..3339a5337e 100644
--- a/lib/std/builtin.zig
+++ b/lib/std/builtin.zig
@@ -1,3 +1,5 @@
+//! Types and values provided by the Zig language.
+
const builtin = @import("builtin");
/// `explicit_subsystem` is missing when the subsystem is automatically detected,
diff --git a/lib/std/compress.zig b/lib/std/compress.zig
index a6d0a40b26..200489c18a 100644
--- a/lib/std/compress.zig
+++ b/lib/std/compress.zig
@@ -1,3 +1,5 @@
+//! Compression algorithms.
+
const std = @import("std.zig");
pub const flate = @import("compress/flate.zig");
diff --git a/lib/std/crypto.zig b/lib/std/crypto.zig
index c548f01c07..fe6e96c2dd 100644
--- a/lib/std/crypto.zig
+++ b/lib/std/crypto.zig
@@ -1,3 +1,5 @@
+//! Cryptography.
+
const root = @import("root");
/// Authenticated Encryption with Associated Data
diff --git a/lib/std/dwarf.zig b/lib/std/dwarf.zig
index 2544f35c42..aeeff5f41d 100644
--- a/lib/std/dwarf.zig
+++ b/lib/std/dwarf.zig
@@ -1,3 +1,5 @@
+//! DWARF debugging data format.
+
const builtin = @import("builtin");
const std = @import("std.zig");
const debug = std.debug;
diff --git a/lib/std/elf.zig b/lib/std/elf.zig
index e40c215e83..bdb84f5171 100644
--- a/lib/std/elf.zig
+++ b/lib/std/elf.zig
@@ -1,3 +1,5 @@
+//! Executable and Linkable Format.
+
const std = @import("std.zig");
const math = std.math;
const mem = std.mem;
diff --git a/lib/std/fmt.zig b/lib/std/fmt.zig
index 262d51bcee..3594516287 100644
--- a/lib/std/fmt.zig
+++ b/lib/std/fmt.zig
@@ -1,3 +1,5 @@
+//! String formatting and parsing.
+
const std = @import("std.zig");
const builtin = @import("builtin");
diff --git a/lib/std/fs.zig b/lib/std/fs.zig
index 6eba6fcb92..c9294e727a 100644
--- a/lib/std/fs.zig
+++ b/lib/std/fs.zig
@@ -1,3 +1,5 @@
+//! File System.
+
const std = @import("std.zig");
const builtin = @import("builtin");
const root = @import("root");
diff --git a/lib/std/io/Writer.zig b/lib/std/io/Writer.zig
index 82040efc3a..95a7bd5d6a 100644
--- a/lib/std/io/Writer.zig
+++ b/lib/std/io/Writer.zig
@@ -58,3 +58,14 @@ pub fn writeStruct(self: Self, value: anytype) anyerror!void {
comptime assert(@typeInfo(@TypeOf(value)).Struct.layout != .Auto);
return self.writeAll(mem.asBytes(&value));
}
+
+pub fn writeFile(self: Self, file: std.fs.File) anyerror!void {
+ // TODO: figure out how to adjust std lib abstractions so that this ends up
+ // doing sendfile or maybe even copy_file_range under the right conditions.
+ var buf: [4000]u8 = undefined;
+ while (true) {
+ const n = try file.readAll(&buf);
+ try self.writeAll(buf[0..n]);
+ if (n < buf.len) return;
+ }
+}
diff --git a/lib/std/net.zig b/lib/std/net.zig
index 66b90867c6..e68adc4207 100644
--- a/lib/std/net.zig
+++ b/lib/std/net.zig
@@ -1,3 +1,5 @@
+//! Cross-platform networking abstractions.
+
const std = @import("std.zig");
const builtin = @import("builtin");
const assert = std.debug.assert;
diff --git a/lib/std/simd.zig b/lib/std/simd.zig
index c3ae2d8dbe..7fe9b839cc 100644
--- a/lib/std/simd.zig
+++ b/lib/std/simd.zig
@@ -1,7 +1,9 @@
-//! This module provides functions for working conveniently with SIMD (Single Instruction; Multiple Data),
-//! which may offer a potential boost in performance on some targets by performing the same operations on
-//! multiple elements at once.
-//! Please be aware that some functions are known to not work on MIPS.
+//! SIMD (Single Instruction; Multiple Data) convenience functions.
+//!
+//! May offer a potential boost in performance on some targets by performing
+//! the same operations on multiple elements at once.
+//!
+//! Some functions are known to not work on MIPS.
const std = @import("std");
const builtin = @import("builtin");
diff --git a/lib/std/std.zig b/lib/std/std.zig
index 0781c877cf..557b320c24 100644
--- a/lib/std/std.zig
+++ b/lib/std/std.zig
@@ -55,149 +55,56 @@ pub const Tz = tz.Tz;
pub const Uri = @import("Uri.zig");
pub const array_hash_map = @import("array_hash_map.zig");
-
-/// Memory ordering, atomic data structures, and operations.
pub const atomic = @import("atomic.zig");
-
-/// Base64 encoding/decoding.
pub const base64 = @import("base64.zig");
-
-/// Bit manipulation data structures.
pub const bit_set = @import("bit_set.zig");
-
-/// Comptime-available information about the build environment, such as the target and optimize mode.
pub const builtin = @import("builtin.zig");
-
pub const c = @import("c.zig");
-
-/// COFF format.
pub const coff = @import("coff.zig");
-
-/// Compression algorithms such as zlib, zstd, etc.
pub const compress = @import("compress.zig");
-
pub const comptime_string_map = @import("comptime_string_map.zig");
-
-/// Cryptography.
pub const crypto = @import("crypto.zig");
-
-/// Debug printing, allocation and other debug helpers.
pub const debug = @import("debug.zig");
-
-/// DWARF debugging data format.
pub const dwarf = @import("dwarf.zig");
-
-/// ELF format.
pub const elf = @import("elf.zig");
-
-/// Enum-related metaprogramming helpers.
pub const enums = @import("enums.zig");
-
-/// First in, first out data structures.
pub const fifo = @import("fifo.zig");
-
-/// String formatting and parsing (e.g. parsing numbers out of strings).
pub const fmt = @import("fmt.zig");
-
-/// File system-related functionality.
pub const fs = @import("fs.zig");
-
-/// GPU programming helpers.
pub const gpu = @import("gpu.zig");
-
-/// Fast hashing functions (i.e. not cryptographically secure).
pub const hash = @import("hash.zig");
pub const hash_map = @import("hash_map.zig");
-
-/// Allocator implementations.
pub const heap = @import("heap.zig");
-
-/// HTTP client and server.
pub const http = @import("http.zig");
-
-/// I/O streams, reader/writer interfaces and common helpers.
pub const io = @import("io.zig");
-
-/// JSON parsing and serialization.
pub const json = @import("json.zig");
-
-/// LEB128 encoding.
pub const leb = @import("leb128.zig");
-
-/// A standardized interface for logging.
pub const log = @import("log.zig");
-
-/// Mach-O format.
pub const macho = @import("macho.zig");
-
-/// Mathematical constants and operations.
pub const math = @import("math.zig");
-
-/// Functions for comparing, searching, and manipulating memory.
pub const mem = @import("mem.zig");
-
-/// Metaprogramming helpers.
pub const meta = @import("meta.zig");
-
-/// Networking.
pub const net = @import("net.zig");
-
-/// POSIX-like API layer.
pub const posix = @import("os.zig");
-
/// Non-portable Operating System-specific API.
pub const os = @import("os.zig");
-
pub const once = @import("once.zig").once;
-
-/// A set of array and slice types that bit-pack integer elements.
pub const packed_int_array = @import("packed_int_array.zig");
-
-/// PDB file format.
pub const pdb = @import("pdb.zig");
-
-/// Accessors for process-related info (e.g. command line arguments)
-/// and spawning of child processes.
pub const process = @import("process.zig");
-
/// Deprecated: use `Random` instead.
pub const rand = Random;
-
-/// Sorting.
pub const sort = @import("sort.zig");
-
-/// Single Instruction Multiple Data (SIMD) helpers.
pub const simd = @import("simd.zig");
-
-/// ASCII text processing.
pub const ascii = @import("ascii.zig");
-
-/// Tar archive format compression/decompression.
pub const tar = @import("tar.zig");
-
-/// Testing allocator, testing assertions, and other helpers for testing code.
pub const testing = @import("testing.zig");
-
-/// Sleep, obtaining the current time, conversion constants, and more.
pub const time = @import("time.zig");
-
-/// Time zones.
pub const tz = @import("tz.zig");
-
-/// UTF-8 and UTF-16LE encoding/decoding.
pub const unicode = @import("unicode.zig");
-
-/// Helpers for integrating with Valgrind.
pub const valgrind = @import("valgrind.zig");
-
-/// Constants and types representing the Wasm binary format.
pub const wasm = @import("wasm.zig");
-
-/// Builds of the Zig compiler are distributed partly in source form. That
-/// source lives here. These APIs are provided as-is and have absolutely no API
-/// guarantees whatsoever.
pub const zig = @import("zig.zig");
-
pub const start = @import("start.zig");
const root = @import("root");
diff --git a/lib/std/tar.zig b/lib/std/tar.zig
index af900b3880..121e7db248 100644
--- a/lib/std/tar.zig
+++ b/lib/std/tar.zig
@@ -1,23 +1,25 @@
-/// Tar archive is single ordinary file which can contain many files (or
-/// directories, symlinks, ...). It's build by series of blocks each size of 512
-/// bytes. First block of each entry is header which defines type, name, size
-/// permissions and other attributes. Header is followed by series of blocks of
-/// file content, if any that entry has content. Content is padded to the block
-/// size, so next header always starts at block boundary.
-///
-/// This simple format is extended by GNU and POSIX pax extensions to support
-/// file names longer than 256 bytes and additional attributes.
-///
-/// This is not comprehensive tar parser. Here we are only file types needed to
-/// support Zig package manager; normal file, directory, symbolic link. And
-/// subset of attributes: name, size, permissions.
-///
-/// GNU tar reference: https://www.gnu.org/software/tar/manual/html_node/Standard.html
-/// pax reference: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13
-///
+//! Tar archive is single ordinary file which can contain many files (or
+//! directories, symlinks, ...). It's build by series of blocks each size of 512
+//! bytes. First block of each entry is header which defines type, name, size
+//! permissions and other attributes. Header is followed by series of blocks of
+//! file content, if any that entry has content. Content is padded to the block
+//! size, so next header always starts at block boundary.
+//!
+//! This simple format is extended by GNU and POSIX pax extensions to support
+//! file names longer than 256 bytes and additional attributes.
+//!
+//! This is not comprehensive tar parser. Here we are only file types needed to
+//! support Zig package manager; normal file, directory, symbolic link. And
+//! subset of attributes: name, size, permissions.
+//!
+//! GNU tar reference: https://www.gnu.org/software/tar/manual/html_node/Standard.html
+//! pax reference: https://pubs.opengroup.org/onlinepubs/9699919799/utilities/pax.html#tag_20_92_13
+
const std = @import("std.zig");
const assert = std.debug.assert;
+pub const output = @import("tar/output.zig");
+
pub const Options = struct {
/// Number of directory levels to skip when extracting files.
strip_components: u32 = 0,
diff --git a/lib/std/tar/output.zig b/lib/std/tar/output.zig
new file mode 100644
index 0000000000..73cfca58b1
--- /dev/null
+++ b/lib/std/tar/output.zig
@@ -0,0 +1,85 @@
+/// A struct that is exactly 512 bytes and matches tar file format. This is
+/// intended to be used for outputting tar files; for parsing there is
+/// `std.tar.Header`.
+pub const Header = extern struct {
+ // This struct was originally copied from
+ // https://github.com/mattnite/tar/blob/main/src/main.zig which is MIT
+ // licensed.
+
+ name: [100]u8,
+ mode: [7:0]u8,
+ uid: [7:0]u8,
+ gid: [7:0]u8,
+ size: [11:0]u8,
+ mtime: [11:0]u8,
+ checksum: [7:0]u8,
+ typeflag: FileType,
+ linkname: [100]u8,
+ magic: [5:0]u8,
+ version: [2]u8,
+ uname: [31:0]u8,
+ gname: [31:0]u8,
+ devmajor: [7:0]u8,
+ devminor: [7:0]u8,
+ prefix: [155]u8,
+ pad: [12]u8,
+
+ pub const FileType = enum(u8) {
+ regular = '0',
+ hard_link = '1',
+ symbolic_link = '2',
+ character = '3',
+ block = '4',
+ directory = '5',
+ fifo = '6',
+ reserved = '7',
+ pax_global = 'g',
+ extended = 'x',
+ _,
+ };
+
+ pub fn init() Header {
+ var ret = std.mem.zeroes(Header);
+ ret.magic = [_:0]u8{ 'u', 's', 't', 'a', 'r' };
+ ret.version = [_:0]u8{ '0', '0' };
+ return ret;
+ }
+
+ pub fn setPath(self: *Header, prefix: []const u8, path: []const u8) !void {
+ if (prefix.len + 1 + path.len > 100) {
+ var i: usize = 0;
+ while (i < path.len and path.len - i > 100) {
+ while (path[i] != '/') : (i += 1) {}
+ }
+
+ _ = try std.fmt.bufPrint(&self.prefix, "{s}/{s}", .{ prefix, path[0..i] });
+ _ = try std.fmt.bufPrint(&self.name, "{s}", .{path[i + 1 ..]});
+ } else {
+ _ = try std.fmt.bufPrint(&self.name, "{s}/{s}", .{ prefix, path });
+ }
+ }
+
+ pub fn setSize(self: *Header, size: u64) !void {
+ _ = try std.fmt.bufPrint(&self.size, "{o:0>11}", .{size});
+ }
+
+ pub fn updateChecksum(self: *Header) !void {
+ const offset = @offsetOf(Header, "checksum");
+ var checksum: usize = 0;
+ for (std.mem.asBytes(self), 0..) |val, i| {
+ checksum += if (i >= offset and i < offset + @sizeOf(@TypeOf(self.checksum)))
+ ' '
+ else
+ val;
+ }
+
+ _ = try std.fmt.bufPrint(&self.checksum, "{o:0>7}", .{checksum});
+ }
+
+ comptime {
+ assert(@sizeOf(Header) == 512);
+ }
+};
+
+const std = @import("../std.zig");
+const assert = std.debug.assert;
diff --git a/lib/std/zig.zig b/lib/std/zig.zig
index 46fb562310..c887030ae6 100644
--- a/lib/std/zig.zig
+++ b/lib/std/zig.zig
@@ -1,3 +1,7 @@
+//! Builds of the Zig compiler are distributed partly in source form. That
+//! source lives here. These APIs are provided as-is and have absolutely no API
+//! guarantees whatsoever.
+
pub const ErrorBundle = @import("zig/ErrorBundle.zig");
pub const Server = @import("zig/Server.zig");
pub const Client = @import("zig/Client.zig");
diff --git a/src/Autodoc.zig b/src/Autodoc.zig
deleted file mode 100644
index e93884eb2c..0000000000
--- a/src/Autodoc.zig
+++ /dev/null
@@ -1,6035 +0,0 @@
-const builtin = @import("builtin");
-const std = @import("std");
-const build_options = @import("build_options");
-const Ast = std.zig.Ast;
-const Autodoc = @This();
-const Compilation = @import("Compilation.zig");
-const Zcu = @import("Module.zig");
-const File = Zcu.File;
-const Module = @import("Package.zig").Module;
-const Tokenizer = std.zig.Tokenizer;
-const InternPool = @import("InternPool.zig");
-const Zir = std.zig.Zir;
-const Ref = Zir.Inst.Ref;
-const log = std.log.scoped(.autodoc);
-const renderer = @import("autodoc/render_source.zig");
-
-zcu: *Zcu,
-arena: std.mem.Allocator,
-
-// The goal of autodoc is to fill up these arrays
-// that will then be serialized as JSON and consumed
-// by the JS frontend.
-modules: std.AutoArrayHashMapUnmanaged(*Module, DocData.DocModule) = .{},
-files: std.AutoArrayHashMapUnmanaged(*File, usize) = .{},
-calls: std.ArrayListUnmanaged(DocData.Call) = .{},
-types: std.ArrayListUnmanaged(DocData.Type) = .{},
-decls: std.ArrayListUnmanaged(DocData.Decl) = .{},
-exprs: std.ArrayListUnmanaged(DocData.Expr) = .{},
-ast_nodes: std.ArrayListUnmanaged(DocData.AstNode) = .{},
-comptime_exprs: std.ArrayListUnmanaged(DocData.ComptimeExpr) = .{},
-guide_sections: std.ArrayListUnmanaged(Section) = .{},
-
-// These fields hold temporary state of the analysis process
-// and are mainly used by the decl path resolving algorithm.
-pending_ref_paths: std.AutoHashMapUnmanaged(
- *DocData.Expr, // pointer to declpath tail end (ie `&decl_path[decl_path.len - 1]`)
- std.ArrayListUnmanaged(RefPathResumeInfo),
-) = .{},
-ref_paths_pending_on_decls: std.AutoHashMapUnmanaged(
- *Scope.DeclStatus,
- std.ArrayListUnmanaged(RefPathResumeInfo),
-) = .{},
-ref_paths_pending_on_types: std.AutoHashMapUnmanaged(
- usize,
- std.ArrayListUnmanaged(RefPathResumeInfo),
-) = .{},
-
-/// A set of ZIR instruction refs which have a meaning other than the
-/// instruction they refer to. For instance, during analysis of the arguments to
-/// a `call`, the index of the `call` itself is repurposed to refer to the
-/// parameter type.
-/// TODO: there should be some kind of proper handling for these instructions;
-/// currently we just ignore them!
-repurposed_insts: std.AutoHashMapUnmanaged(Zir.Inst.Index, void) = .{},
-
-const RefPathResumeInfo = struct {
- file: *File,
- ref_path: []DocData.Expr,
-};
-
-/// Used to accumulate src_node offsets.
-/// In ZIR, all ast node indices are relative to the parent decl.
-/// More concretely, `union_decl`, `struct_decl`, `enum_decl` and `opaque_decl`
-/// and the value of each of their decls participate in the relative offset
-/// counting, and nothing else.
-/// We keep track of the line and byte values for these instructions in order
-/// to avoid tokenizing every file (on new lines) from the start every time.
-const SrcLocInfo = struct {
- bytes: u32 = 0,
- line: usize = 0,
- src_node: u32 = 0,
-};
-
-const Section = struct {
- name: []const u8 = "", // empty string is the default section
- guides: std.ArrayListUnmanaged(Guide) = .{},
-
- const Guide = struct {
- name: []const u8,
- body: []const u8,
- };
-};
-
-pub fn generate(zcu: *Zcu, output_dir: std.fs.Dir) !void {
- var arena_allocator = std.heap.ArenaAllocator.init(zcu.gpa);
- defer arena_allocator.deinit();
- var autodoc: Autodoc = .{
- .zcu = zcu,
- .arena = arena_allocator.allocator(),
- };
- try autodoc.generateZirData(output_dir);
-
- const lib_dir = zcu.comp.zig_lib_directory.handle;
- try lib_dir.copyFile("docs/main.js", output_dir, "main.js", .{});
- try lib_dir.copyFile("docs/ziglexer.js", output_dir, "ziglexer.js", .{});
- try lib_dir.copyFile("docs/commonmark.js", output_dir, "commonmark.js", .{});
- try lib_dir.copyFile("docs/index.html", output_dir, "index.html", .{});
-}
-
-fn generateZirData(self: *Autodoc, output_dir: std.fs.Dir) !void {
- const root_src_path = self.zcu.main_mod.root_src_path;
- const joined_src_path = try self.zcu.main_mod.root.joinString(self.arena, root_src_path);
- defer self.arena.free(joined_src_path);
-
- const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{ ".", joined_src_path });
- defer self.arena.free(abs_root_src_path);
-
- const file = self.zcu.import_table.get(abs_root_src_path).?; // file is expected to be present in the import table
- // Append all the types in Zir.Inst.Ref.
- {
- comptime std.debug.assert(@intFromEnum(InternPool.Index.first_type) == 0);
- var i: u32 = 0;
- while (i <= @intFromEnum(InternPool.Index.last_type)) : (i += 1) {
- const ip_index = @as(InternPool.Index, @enumFromInt(i));
- var tmpbuf = std.ArrayList(u8).init(self.arena);
- if (ip_index == .generic_poison_type) {
- // Not a real type, doesn't have a normal name
- try tmpbuf.writer().writeAll("(generic poison)");
- } else {
- try @import("type.zig").Type.fromInterned(ip_index).fmt(self.zcu).format("", .{}, tmpbuf.writer());
- }
- try self.types.append(
- self.arena,
- switch (ip_index) {
- .u0_type,
- .i0_type,
- .u1_type,
- .u8_type,
- .i8_type,
- .u16_type,
- .i16_type,
- .u29_type,
- .u32_type,
- .i32_type,
- .u64_type,
- .i64_type,
- .u80_type,
- .u128_type,
- .i128_type,
- .usize_type,
- .isize_type,
- .c_char_type,
- .c_short_type,
- .c_ushort_type,
- .c_int_type,
- .c_uint_type,
- .c_long_type,
- .c_ulong_type,
- .c_longlong_type,
- .c_ulonglong_type,
- => .{
- .Int = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .f16_type,
- .f32_type,
- .f64_type,
- .f80_type,
- .f128_type,
- .c_longdouble_type,
- => .{
- .Float = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .comptime_int_type => .{
- .ComptimeInt = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .comptime_float_type => .{
- .ComptimeFloat = .{ .name = try tmpbuf.toOwnedSlice() },
- },
-
- .anyopaque_type => .{
- .ComptimeExpr = .{ .name = try tmpbuf.toOwnedSlice() },
- },
-
- .bool_type => .{
- .Bool = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .noreturn_type => .{
- .NoReturn = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .void_type => .{
- .Void = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .type_info_type => .{
- .ComptimeExpr = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .type_type => .{
- .Type = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .anyerror_type => .{
- .ErrorSet = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- // should be different types but if we don't analyze std we don't get the ast nodes etc.
- // since they're defined in std.builtin
- .calling_convention_type,
- .atomic_order_type,
- .atomic_rmw_op_type,
- .address_space_type,
- .float_mode_type,
- .reduce_op_type,
- .call_modifier_type,
- .prefetch_options_type,
- .export_options_type,
- .extern_options_type,
- => .{
- .Type = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .manyptr_u8_type => .{
- .Pointer = .{
- .size = .Many,
- .child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
- .is_mutable = true,
- },
- },
- .manyptr_const_u8_type => .{
- .Pointer = .{
- .size = .Many,
- .child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
- },
- },
- .manyptr_const_u8_sentinel_0_type => .{
- .Pointer = .{
- .size = .Many,
- .child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
- .sentinel = .{ .int = .{ .value = 0 } },
- },
- },
- .single_const_pointer_to_comptime_int_type => .{
- .Pointer = .{
- .size = .One,
- .child = .{ .type = @intFromEnum(InternPool.Index.comptime_int_type) },
- },
- },
- .slice_const_u8_type => .{
- .Pointer = .{
- .size = .Slice,
- .child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
- },
- },
- .slice_const_u8_sentinel_0_type => .{
- .Pointer = .{
- .size = .Slice,
- .child = .{ .type = @intFromEnum(InternPool.Index.u8_type) },
- .sentinel = .{ .int = .{ .value = 0 } },
- },
- },
- // Not fully correct
- // since it actually has no src or line_number
- .empty_struct_type => .{
- .Struct = .{
- .name = "",
- .src = 0,
- .is_tuple = false,
- .line_number = 0,
- .parent_container = null,
- .layout = null,
- },
- },
- .anyerror_void_error_union_type => .{
- .ErrorUnion = .{
- .lhs = .{ .type = @intFromEnum(InternPool.Index.anyerror_type) },
- .rhs = .{ .type = @intFromEnum(InternPool.Index.void_type) },
- },
- },
- .anyframe_type => .{
- .AnyFrame = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .enum_literal_type => .{
- .EnumLiteral = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .undefined_type => .{
- .Undefined = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .null_type => .{
- .Null = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- .optional_noreturn_type => .{
- .Optional = .{
- .name = try tmpbuf.toOwnedSlice(),
- .child = .{ .type = @intFromEnum(InternPool.Index.noreturn_type) },
- },
- },
- // Poison and special tag
- .generic_poison_type,
- .var_args_param_type,
- .adhoc_inferred_error_set_type,
- => .{
- .Type = .{ .name = try tmpbuf.toOwnedSlice() },
- },
- // We want to catch new types added to InternPool.Index
- else => unreachable,
- },
- );
- }
- }
-
- const rootName = blk: {
- const rootName = std.fs.path.basename(self.zcu.main_mod.root_src_path);
- break :blk rootName[0 .. rootName.len - 4];
- };
-
- const main_type_index = self.types.items.len;
- {
- try self.modules.put(self.arena, self.zcu.main_mod, .{
- .name = rootName,
- .main = main_type_index,
- .table = .{},
- });
- try self.modules.entries.items(.value)[0].table.put(
- self.arena,
- self.zcu.main_mod,
- .{
- .name = rootName,
- .value = 0,
- },
- );
- }
-
- var root_scope = Scope{
- .parent = null,
- .enclosing_type = null,
- };
-
- const tldoc_comment = try self.getTLDocComment(file);
- const cleaned_tldoc_comment = try self.findGuidePaths(file, tldoc_comment);
- defer self.arena.free(cleaned_tldoc_comment);
- try self.ast_nodes.append(self.arena, .{
- .name = "(root)",
- .docs = cleaned_tldoc_comment,
- });
- try self.files.put(self.arena, file, main_type_index);
-
- _ = try self.walkInstruction(
- file,
- &root_scope,
- .{},
- .main_struct_inst,
- false,
- null,
- );
-
- if (self.ref_paths_pending_on_decls.count() > 0) {
- @panic("some decl paths were never fully analyzed (pending on decls)");
- }
-
- if (self.ref_paths_pending_on_types.count() > 0) {
- @panic("some decl paths were never fully analyzed (pending on types)");
- }
-
- if (self.pending_ref_paths.count() > 0) {
- @panic("some decl paths were never fully analyzed");
- }
-
- var data = DocData{
- .modules = self.modules,
- .files = self.files,
- .calls = self.calls.items,
- .types = self.types.items,
- .decls = self.decls.items,
- .exprs = self.exprs.items,
- .astNodes = self.ast_nodes.items,
- .comptimeExprs = self.comptime_exprs.items,
- .guideSections = self.guide_sections,
- };
-
- inline for (comptime std.meta.tags(std.meta.FieldEnum(DocData))) |f| {
- const field_name = @tagName(f);
- const file_name = "data-" ++ field_name ++ ".js";
- const data_js_f = try output_dir.createFile(file_name, .{});
- defer data_js_f.close();
-
- var buffer = std.io.bufferedWriter(data_js_f.writer());
- const out = buffer.writer();
-
- try out.print("var {s} =", .{field_name});
-
- var jsw = std.json.writeStream(out, .{
- .whitespace = .minified,
- .emit_null_optional_fields = true,
- });
-
- switch (f) {
- .files => try writeFileTableToJson(data.files, data.modules, &jsw),
- .guideSections => try writeGuidesToJson(data.guideSections, &jsw),
- .modules => try jsw.write(data.modules.values()),
- else => try jsw.write(@field(data, field_name)),
- }
-
- // try std.json.stringifyArbitraryDepth(
- // self.arena,
- // @field(data, field.name),
- // .{
- // .whitespace = .minified,
- // .emit_null_optional_fields = true,
- // },
- // out,
- // );
- try out.print(";", .{});
-
- // last thing (that can fail) that we do is flush
- try buffer.flush();
- }
-
- {
- output_dir.makeDir("src") catch |e| switch (e) {
- error.PathAlreadyExists => {},
- else => |err| return err,
- };
- const html_dir = try output_dir.openDir("src", .{});
-
- var files_iterator = self.files.iterator();
-
- while (files_iterator.next()) |entry| {
- const sub_file_path = entry.key_ptr.*.sub_file_path;
- const file_module = entry.key_ptr.*.mod;
- const module_name = (self.modules.get(file_module) orelse continue).name;
-
- const file_path = std.fs.path.dirname(sub_file_path) orelse "";
- const file_name = if (file_path.len > 0) sub_file_path[file_path.len + 1 ..] else sub_file_path;
-
- const html_file_name = try std.mem.concat(self.arena, u8, &.{ file_name, ".html" });
- defer self.arena.free(html_file_name);
-
- const dir_name = try std.fs.path.join(self.arena, &.{ module_name, file_path });
- defer self.arena.free(dir_name);
-
- var dir = try html_dir.makeOpenPath(dir_name, .{});
- defer dir.close();
-
- const html_file = dir.createFile(html_file_name, .{}) catch |err| switch (err) {
- error.PathAlreadyExists => try dir.openFile(html_file_name, .{}),
- else => return err,
- };
- defer html_file.close();
- var buffer = std.io.bufferedWriter(html_file.writer());
-
- const out = buffer.writer();
-
- try renderer.genHtml(self.zcu.gpa, entry.key_ptr.*, out);
- try buffer.flush();
- }
- }
-}
-
-/// Represents a chain of scopes, used to resolve decl references to the
-/// corresponding entry in `self.decls`. It also keeps track of whether
-/// a given decl has been analyzed or not.
-const Scope = struct {
- parent: ?*Scope,
- map: std.AutoHashMapUnmanaged(
- Zir.NullTerminatedString, // index into the current file's string table (decl name)
- *DeclStatus,
- ) = .{},
- captures: []const Zir.Inst.Capture = &.{},
- enclosing_type: ?usize, // index into `types`, null = file top-level struct
-
- pub const DeclStatus = union(enum) {
- Analyzed: usize, // index into `decls`
- Pending,
- NotRequested: u32, // instr_index
- };
-
- fn getCapture(scope: Scope, idx: u16) struct {
- union(enum) { inst: Zir.Inst.Index, decl: Zir.NullTerminatedString },
- *Scope,
- } {
- const parent = scope.parent.?;
- return switch (scope.captures[idx].unwrap()) {
- .nested => |parent_idx| parent.getCapture(parent_idx),
- .instruction => |inst| .{
- .{ .inst = inst },
- parent,
- },
- .decl_val, .decl_ref => |str| .{
- .{ .decl = str },
- parent,
- },
- };
- }
-
- /// Returns a pointer so that the caller has a chance to modify the value
- /// in case they decide to start analyzing a previously not requested decl.
- /// Another reason is that in some places we use the pointer to uniquely
- /// refer to a decl, as we wait for it to be analyzed. This means that
- /// those pointers must stay stable.
- pub fn resolveDeclName(self: Scope, string_table_idx: Zir.NullTerminatedString, file: *File, inst: Zir.Inst.OptionalIndex) *DeclStatus {
- var cur: ?*const Scope = &self;
- return while (cur) |s| : (cur = s.parent) {
- break s.map.get(string_table_idx) orelse continue;
- } else {
- printWithOptionalContext(
- file,
- inst,
- "Could not find `{s}`\n\n",
- .{file.zir.nullTerminatedString(string_table_idx)},
- );
- unreachable;
- };
- }
-
- pub fn insertDeclRef(
- self: *Scope,
- arena: std.mem.Allocator,
- decl_name_index: Zir.NullTerminatedString, // index into the current file's string table
- decl_status: DeclStatus,
- ) !void {
- const decl_status_ptr = try arena.create(DeclStatus);
- errdefer arena.destroy(decl_status_ptr);
-
- decl_status_ptr.* = decl_status;
- try self.map.put(arena, decl_name_index, decl_status_ptr);
- }
-};
-
-/// The output of our analysis process.
-const DocData = struct {
- // NOTE: editing fields of DocData requires also updating:
- // - the deployment script for ziglang.org
- // - imports in index.html
- typeKinds: []const []const u8 = std.meta.fieldNames(DocTypeKinds),
- rootMod: u32 = 0,
- modules: std.AutoArrayHashMapUnmanaged(*Module, DocModule),
-
- // non-hardcoded stuff
- astNodes: []AstNode,
- calls: []Call,
- files: std.AutoArrayHashMapUnmanaged(*File, usize),
- types: []Type,
- decls: []Decl,
- exprs: []Expr,
- comptimeExprs: []ComptimeExpr,
-
- guideSections: std.ArrayListUnmanaged(Section),
-
- const Call = struct {
- func: Expr,
- args: []Expr,
- ret: Expr,
- };
-
- /// All the type "families" as described by `std.builtin.TypeId`
- /// plus a couple extra that are unique to our use case.
- ///
- /// `Unanalyzed` is used so that we can refer to types that have started
- /// analysis but that haven't been fully analyzed yet (in case we find
- /// self-referential stuff, like `@This()`).
- ///
- /// `ComptimeExpr` represents the result of a piece of comptime logic
- /// that we weren't able to analyze fully. Examples of that are comptime
- /// function calls and comptime if / switch / ... expressions.
- const DocTypeKinds = @typeInfo(Type).Union.tag_type.?;
-
- const ComptimeExpr = struct {
- code: []const u8,
- };
- const DocModule = struct {
- name: []const u8 = "(root)",
- file: usize = 0, // index into `files`
- main: usize = 0, // index into `types`
- table: std.AutoHashMapUnmanaged(*Module, TableEntry),
- pub const TableEntry = struct {
- name: []const u8,
- value: usize,
- };
-
- pub fn jsonStringify(self: DocModule, jsw: anytype) !void {
- try jsw.beginObject();
- inline for (comptime std.meta.tags(std.meta.FieldEnum(DocModule))) |f| {
- const f_name = @tagName(f);
- try jsw.objectField(f_name);
- switch (f) {
- .table => try writeModuleTableToJson(self.table, jsw),
- else => try jsw.write(@field(self, f_name)),
- }
- }
- try jsw.endObject();
- }
- };
-
- const Decl = struct {
- name: []const u8,
- kind: []const u8,
- src: usize, // index into astNodes
- value: WalkResult,
- // The index in astNodes of the `test declname { }` node
- decltest: ?usize = null,
- is_uns: bool = false, // usingnamespace
- parent_container: ?usize, // index into `types`
-
- pub fn jsonStringify(self: Decl, jsw: anytype) !void {
- try jsw.beginArray();
- inline for (comptime std.meta.fields(Decl)) |f| {
- try jsw.write(@field(self, f.name));
- }
- try jsw.endArray();
- }
- };
-
- const AstNode = struct {
- file: usize = 0, // index into files
- line: usize = 0,
- col: usize = 0,
- name: ?[]const u8 = null,
- code: ?[]const u8 = null,
- docs: ?[]const u8 = null,
- fields: ?[]usize = null, // index into astNodes
- @"comptime": bool = false,
-
- pub fn jsonStringify(self: AstNode, jsw: anytype) !void {
- try jsw.beginArray();
- inline for (comptime std.meta.fields(AstNode)) |f| {
- try jsw.write(@field(self, f.name));
- }
- try jsw.endArray();
- }
- };
-
- const Type = union(enum) {
- Unanalyzed: struct {},
- Type: struct { name: []const u8 },
- Void: struct { name: []const u8 },
- Bool: struct { name: []const u8 },
- NoReturn: struct { name: []const u8 },
- Int: struct { name: []const u8 },
- Float: struct { name: []const u8 },
- Pointer: struct {
- size: std.builtin.Type.Pointer.Size,
- child: Expr,
- sentinel: ?Expr = null,
- @"align": ?Expr = null,
- address_space: ?Expr = null,
- bit_start: ?Expr = null,
- host_size: ?Expr = null,
- is_ref: bool = false,
- is_allowzero: bool = false,
- is_mutable: bool = false,
- is_volatile: bool = false,
- has_sentinel: bool = false,
- has_align: bool = false,
- has_addrspace: bool = false,
- has_bit_range: bool = false,
- },
- Array: struct {
- len: Expr,
- child: Expr,
- sentinel: ?Expr = null,
- },
- Struct: struct {
- name: []const u8,
- src: usize, // index into astNodes
- privDecls: []usize = &.{}, // index into decls
- pubDecls: []usize = &.{}, // index into decls
- field_types: []Expr = &.{}, // (use src->fields to find names)
- field_defaults: []?Expr = &.{}, // default values is specified
- backing_int: ?Expr = null, // backing integer if specified
- is_tuple: bool,
- line_number: usize,
- parent_container: ?usize, // index into `types`
- layout: ?Expr, // if different than Auto
- },
- ComptimeExpr: struct { name: []const u8 },
- ComptimeFloat: struct { name: []const u8 },
- ComptimeInt: struct { name: []const u8 },
- Undefined: struct { name: []const u8 },
- Null: struct { name: []const u8 },
- Optional: struct {
- name: []const u8,
- child: Expr,
- },
- ErrorUnion: struct { lhs: Expr, rhs: Expr },
- InferredErrorUnion: struct { payload: Expr },
- ErrorSet: struct {
- name: []const u8,
- fields: ?[]const Field = null,
- // TODO: fn field for inferred error sets?
- },
- Enum: struct {
- name: []const u8,
- src: usize, // index into astNodes
- privDecls: []usize = &.{}, // index into decls
- pubDecls: []usize = &.{}, // index into decls
- // (use src->fields to find field names)
- tag: ?Expr = null, // tag type if specified
- values: []?Expr = &.{}, // tag values if specified
- nonexhaustive: bool,
- parent_container: ?usize, // index into `types`
- },
- Union: struct {
- name: []const u8,
- src: usize, // index into astNodes
- privDecls: []usize = &.{}, // index into decls
- pubDecls: []usize = &.{}, // index into decls
- fields: []Expr = &.{}, // (use src->fields to find names)
- tag: ?Expr, // tag type if specified
- auto_enum: bool, // tag is an auto enum
- parent_container: ?usize, // index into `types`
- layout: ?Expr, // if different than Auto
- },
- Fn: struct {
- name: []const u8,
- src: ?usize = null, // index into `astNodes`
- ret: Expr,
- generic_ret: ?Expr = null,
- params: ?[]Expr = null, // (use src->fields to find names)
- lib_name: []const u8 = "",
- is_var_args: bool = false,
- is_inferred_error: bool = false,
- has_lib_name: bool = false,
- has_cc: bool = false,
- cc: ?usize = null,
- @"align": ?usize = null,
- has_align: bool = false,
- is_test: bool = false,
- is_extern: bool = false,
- },
- Opaque: struct {
- name: []const u8,
- src: usize, // index into astNodes
- privDecls: []usize = &.{}, // index into decls
- pubDecls: []usize = &.{}, // index into decls
- parent_container: ?usize, // index into `types`
- },
- Frame: struct { name: []const u8 },
- AnyFrame: struct { name: []const u8 },
- Vector: struct { name: []const u8 },
- EnumLiteral: struct { name: []const u8 },
-
- const Field = struct {
- name: []const u8,
- docs: []const u8,
- };
-
- pub fn jsonStringify(self: Type, jsw: anytype) !void {
- const active_tag = std.meta.activeTag(self);
- try jsw.beginArray();
- try jsw.write(@intFromEnum(active_tag));
- inline for (comptime std.meta.fields(Type)) |case| {
- if (@field(Type, case.name) == active_tag) {
- const current_value = @field(self, case.name);
- inline for (comptime std.meta.fields(case.type)) |f| {
- if (f.type == std.builtin.Type.Pointer.Size) {
- try jsw.write(@intFromEnum(@field(current_value, f.name)));
- } else {
- try jsw.write(@field(current_value, f.name));
- }
- }
- }
- }
- try jsw.endArray();
- }
- };
-
- /// An Expr represents the (untyped) result of analyzing instructions.
- /// The data is normalized, which means that an Expr that results in a
- /// type definition will hold an index into `self.types`.
- pub const Expr = union(enum) {
- comptimeExpr: usize, // index in `comptimeExprs`
- void: struct {},
- @"unreachable": struct {},
- null: struct {},
- undefined: struct {},
- @"struct": []FieldVal,
- fieldVal: FieldVal,
- bool: bool,
- @"anytype": struct {},
- @"&": usize, // index in `exprs`
- type: usize, // index in `types`
- this: usize, // index in `types`
- declRef: *Scope.DeclStatus,
- declIndex: usize, // index into `decls`, alternative repr for `declRef`
- declName: []const u8, // unresolved decl name
- builtinField: enum { len, ptr },
- fieldRef: FieldRef,
- refPath: []Expr,
- int: struct {
- value: u64, // direct value
- negated: bool = false,
- },
- int_big: struct {
- value: []const u8, // string representation
- negated: bool = false,
- },
- float: f64, // direct value
- float128: f128, // direct value
- array: []usize, // index in `exprs`
- call: usize, // index in `calls`
- enumLiteral: []const u8, // direct value
- typeOf: usize, // index in `exprs`
- typeOf_peer: []usize,
- errorUnion: usize, // index in `types`
- as: As,
- sizeOf: usize, // index in `exprs`
- bitSizeOf: usize, // index in `exprs`
- compileError: usize, // index in `exprs`
- optionalPayload: usize, // index in `exprs`
- elemVal: ElemVal,
- errorSets: usize,
- string: []const u8, // direct value
- sliceIndex: usize,
- slice: Slice,
- sliceLength: SliceLength,
- cmpxchgIndex: usize,
- cmpxchg: Cmpxchg,
- builtin: Builtin,
- builtinIndex: usize,
- builtinBin: BuiltinBin,
- builtinBinIndex: usize,
- unionInit: UnionInit,
- builtinCall: BuiltinCall,
- mulAdd: MulAdd,
- switchIndex: usize, // index in `exprs`
- switchOp: SwitchOp,
- unOp: UnOp,
- unOpIndex: usize,
- binOp: BinOp,
- binOpIndex: usize,
- load: usize, // index in `exprs`
- const UnOp = struct {
- param: usize, // index in `exprs`
- name: []const u8 = "", // tag name
- };
- const BinOp = struct {
- lhs: usize, // index in `exprs`
- rhs: usize, // index in `exprs`
- name: []const u8 = "", // tag name
- };
- const SwitchOp = struct {
- cond_index: usize,
- file_name: []const u8,
- src: usize,
- outer_decl: usize, // index in `types`
- };
- const BuiltinBin = struct {
- name: []const u8 = "", // fn name
- lhs: usize, // index in `exprs`
- rhs: usize, // index in `exprs`
- };
- const UnionInit = struct {
- type: usize, // index in `exprs`
- field: usize, // index in `exprs`
- init: usize, // index in `exprs`
- };
- const Builtin = struct {
- name: []const u8 = "", // fn name
- param: usize, // index in `exprs`
- };
- const BuiltinCall = struct {
- modifier: usize, // index in `exprs`
- function: usize, // index in `exprs`
- args: usize, // index in `exprs`
- };
- const MulAdd = struct {
- mulend1: usize, // index in `exprs`
- mulend2: usize, // index in `exprs`
- addend: usize, // index in `exprs`
- type: usize, // index in `exprs`
- };
- const Slice = struct {
- lhs: usize, // index in `exprs`
- start: usize,
- end: ?usize = null,
- sentinel: ?usize = null, // index in `exprs`
- };
- const SliceLength = struct {
- lhs: usize,
- start: usize,
- len: usize,
- sentinel: ?usize = null,
- };
- const Cmpxchg = struct {
- name: []const u8,
- type: usize,
- ptr: usize,
- expected_value: usize,
- new_value: usize,
- success_order: usize,
- failure_order: usize,
- };
- const As = struct {
- typeRefArg: ?usize, // index in `exprs`
- exprArg: usize, // index in `exprs`
- };
- const FieldRef = struct {
- type: usize, // index in `types`
- index: usize, // index in type.fields
- };
-
- const FieldVal = struct {
- name: []const u8,
- val: struct {
- typeRef: ?usize, // index in `exprs`
- expr: usize, // index in `exprs`
- },
- };
-
- const ElemVal = struct {
- lhs: usize, // index in `exprs`
- rhs: usize, // index in `exprs`
- };
-
- pub fn jsonStringify(self: Expr, jsw: anytype) !void {
- const active_tag = std.meta.activeTag(self);
- try jsw.beginObject();
- if (active_tag == .declIndex) {
- try jsw.objectField("declRef");
- } else {
- try jsw.objectField(@tagName(active_tag));
- }
- switch (self) {
- .int => {
- if (self.int.negated) {
- try jsw.write(-@as(i65, self.int.value));
- } else {
- try jsw.write(self.int.value);
- }
- },
- .builtinField => {
- try jsw.write(@tagName(self.builtinField));
- },
- .declRef => {
- try jsw.write(self.declRef.Analyzed);
- },
- else => {
- inline for (comptime std.meta.fields(Expr)) |case| {
- // TODO: this is super ugly, fix once `inline else` is a thing
- if (comptime std.mem.eql(u8, case.name, "builtinField"))
- continue;
- if (comptime std.mem.eql(u8, case.name, "declRef"))
- continue;
- if (@field(Expr, case.name) == active_tag) {
- try jsw.write(@field(self, case.name));
- }
- }
- },
- }
- try jsw.endObject();
- }
- };
-
- /// A WalkResult represents the result of the analysis process done to a
- /// a Zir instruction. Walk results carry type information either inferred
- /// from the context (eg string literals are pointers to null-terminated
- /// arrays), or because of @as() instructions.
- /// Since the type information is only needed in certain contexts, the
- /// underlying normalized data (Expr) is untyped.
- const WalkResult = struct {
- typeRef: ?Expr = null,
- expr: Expr,
- };
-};
-
-const AutodocErrors = error{
- OutOfMemory,
- CurrentWorkingDirectoryUnlinked,
- UnexpectedEndOfFile,
- ModuleNotFound,
- ImportOutsideModulePath,
-} || std.fs.File.OpenError || std.fs.File.ReadError;
-
-/// `call` instructions will have loopy references to themselves
-/// whenever an as_node is required for a complex expression.
-/// This type is used to keep track of dangerous instruction
-/// numbers that we definitely don't want to recurse into.
-const CallContext = struct {
- inst: Zir.Inst.Index,
- prev: ?*const CallContext,
-};
-
-/// Called when we need to analyze a Zir instruction.
-/// For example it gets called by `generateZirData` on instruction 0,
-/// which represents the top-level struct corresponding to the root file.
-/// Note that in some situations where we're analyzing code that only allows
-/// for a limited subset of Zig syntax, we don't always resort to calling
-/// `walkInstruction` and instead sometimes we handle Zir directly.
-/// The best example of that are instructions corresponding to function
-/// params, as those can only occur while analyzing a function definition.
-fn walkInstruction(
- self: *Autodoc,
- file: *File,
- parent_scope: *Scope,
- parent_src: SrcLocInfo,
- inst: Zir.Inst.Index,
- need_type: bool, // true if the caller needs us to provide also a typeRef
- call_ctx: ?*const CallContext,
-) AutodocErrors!DocData.WalkResult {
- const tags = file.zir.instructions.items(.tag);
- const data = file.zir.instructions.items(.data);
-
- if (self.repurposed_insts.contains(inst)) {
- // TODO: better handling here
- return .{ .expr = .{ .comptimeExpr = 0 } };
- }
-
- // We assume that the topmost ast_node entry corresponds to our decl
- const self_ast_node_index = self.ast_nodes.items.len - 1;
-
- switch (tags[@intFromEnum(inst)]) {
- else => {
- printWithContext(
- file,
- inst,
- "TODO: implement `{s}` for walkInstruction\n\n",
- .{@tagName(tags[@intFromEnum(inst)])},
- );
- return self.cteTodo(@tagName(tags[@intFromEnum(inst)]));
- },
- .import => {
- const str_tok = data[@intFromEnum(inst)].str_tok;
- const path = str_tok.get(file.zir);
-
- // importFile cannot error out since all files
- // are already loaded at this point
- if (file.mod.deps.get(path)) |other_module| {
- const result = try self.modules.getOrPut(self.arena, other_module);
-
- // Immediately add this module to the import table of our
- // current module, regardless of wether it's new or not.
- if (self.modules.getPtr(file.mod)) |current_module| {
- // TODO: apparently, in the stdlib a file gets analyzed before
- // its module gets added. I guess we're importing a file
- // that belongs to another module through its file path?
- // (ie not through its module name).
- // We're bailing for now, but maybe we shouldn't?
- _ = try current_module.table.getOrPutValue(
- self.arena,
- other_module,
- .{
- .name = path,
- .value = self.modules.getIndex(other_module).?,
- },
- );
- }
-
- if (result.found_existing) {
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = result.value_ptr.main },
- };
- }
-
- // create a new module entry
- const main_type_index = self.types.items.len;
- result.value_ptr.* = .{
- .name = path,
- .main = main_type_index,
- .table = .{},
- };
-
- // TODO: Add this module as a dependency to the current module
- // TODO: this seems something that could be done in bulk
- // at the beginning or the end, or something.
- const abs_root_src_path = try std.fs.path.resolve(self.arena, &.{
- ".",
- other_module.root.root_dir.path orelse ".",
- other_module.root.sub_path,
- other_module.root_src_path,
- });
- defer self.arena.free(abs_root_src_path);
-
- const new_file = self.zcu.import_table.get(abs_root_src_path).?;
-
- var root_scope = Scope{
- .parent = null,
- .enclosing_type = null,
- };
- const maybe_tldoc_comment = try self.getTLDocComment(file);
- try self.ast_nodes.append(self.arena, .{
- .name = "(root)",
- .docs = maybe_tldoc_comment,
- });
- try self.files.put(self.arena, new_file, main_type_index);
- return self.walkInstruction(
- new_file,
- &root_scope,
- .{},
- .main_struct_inst,
- false,
- call_ctx,
- );
- }
-
- const new_file = try self.zcu.importFile(file, path);
- const result = try self.files.getOrPut(self.arena, new_file.file);
- if (result.found_existing) {
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = result.value_ptr.* },
- };
- }
-
- const maybe_tldoc_comment = try self.getTLDocComment(new_file.file);
- try self.ast_nodes.append(self.arena, .{
- .name = path,
- .docs = maybe_tldoc_comment,
- });
-
- result.value_ptr.* = self.types.items.len;
-
- var new_scope = Scope{
- .parent = null,
- .enclosing_type = null,
- };
-
- return self.walkInstruction(
- new_file.file,
- &new_scope,
- .{},
- .main_struct_inst,
- need_type,
- call_ctx,
- );
- },
- .ret_type => {
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = @intFromEnum(Ref.type_type) },
- };
- },
- .ret_node => {
- const un_node = data[@intFromEnum(inst)].un_node;
- return self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
- },
- .ret_load => {
- const un_node = data[@intFromEnum(inst)].un_node;
- const res_ptr_ref = un_node.operand;
- const res_ptr_inst = @intFromEnum(res_ptr_ref.toIndex().?);
- // TODO: this instruction doesn't let us know trivially if there's
- // branching involved or not. For now here's the strat:
- // We search backwarts until `ret_ptr` for `store_node`,
- // if we find only one, then that's our value, if we find more
- // than one, then it means that there's branching involved.
- // Maybe.
-
- var i = @intFromEnum(inst) - 1;
- var result_ref: ?Ref = null;
- while (i > res_ptr_inst) : (i -= 1) {
- if (tags[i] == .store_node) {
- const pl_node = data[i].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
- if (extra.data.lhs == res_ptr_ref) {
- // this store_load instruction is indeed pointing at
- // the result location that we care about!
- if (result_ref != null) return DocData.WalkResult{
- .expr = .{ .comptimeExpr = 0 },
- };
- result_ref = extra.data.rhs;
- }
- }
- }
-
- if (result_ref) |rr| {
- return self.walkRef(
- file,
- parent_scope,
- parent_src,
- rr,
- need_type,
- call_ctx,
- );
- }
-
- return DocData.WalkResult{
- .expr = .{ .comptimeExpr = 0 },
- };
- },
- .str => {
- const str = data[@intFromEnum(inst)].str.get(file.zir);
-
- const tRef: ?DocData.Expr = if (!need_type) null else blk: {
- const arrTypeId = self.types.items.len;
- try self.types.append(self.arena, .{
- .Array = .{
- .len = .{ .int = .{ .value = str.len } },
- .child = .{ .type = @intFromEnum(Ref.u8_type) },
- .sentinel = .{ .int = .{
- .value = 0,
- .negated = false,
- } },
- },
- });
- // const sentinel: ?usize = if (ptr.flags.has_sentinel) 0 else null;
- const ptrTypeId = self.types.items.len;
- try self.types.append(self.arena, .{
- .Pointer = .{
- .size = .One,
- .child = .{ .type = arrTypeId },
- .sentinel = .{ .int = .{
- .value = 0,
- .negated = false,
- } },
- .is_mutable = false,
- },
- });
- break :blk .{ .type = ptrTypeId };
- };
-
- return DocData.WalkResult{
- .typeRef = tRef,
- .expr = .{ .string = str },
- };
- },
- .compile_error => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
-
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- return DocData.WalkResult{
- .expr = .{ .compileError = operand_index },
- };
- },
- .enum_literal => {
- const str_tok = data[@intFromEnum(inst)].str_tok;
- const literal = file.zir.nullTerminatedString(str_tok.start);
- const type_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .EnumLiteral = .{ .name = "todo enum literal" },
- });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = type_index },
- .expr = .{ .enumLiteral = literal },
- };
- },
- .int => {
- const int = data[@intFromEnum(inst)].int;
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
- .expr = .{ .int = .{ .value = int } },
- };
- },
- .int_big => {
- // @check
- const str = data[@intFromEnum(inst)].str; //.get(file.zir);
- const byte_count = str.len * @sizeOf(std.math.big.Limb);
- const limb_bytes = file.zir.string_bytes[@intFromEnum(str.start)..][0..byte_count];
-
- const limbs = try self.arena.alloc(std.math.big.Limb, str.len);
- @memcpy(std.mem.sliceAsBytes(limbs)[0..limb_bytes.len], limb_bytes);
-
- const big_int = std.math.big.int.Const{
- .limbs = limbs,
- .positive = true,
- };
-
- const as_string = try big_int.toStringAlloc(self.arena, 10, .lower);
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
- .expr = .{ .int_big = .{ .value = as_string } },
- };
- },
- .@"unreachable" => {
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.noreturn_type) },
- .expr = .{ .@"unreachable" = .{} },
- };
- },
-
- .slice_start => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.SliceStart, pl_node.payload_index);
-
- const slice_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const start: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.start,
- false,
- call_ctx,
- );
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const start_index = self.exprs.items.len;
- try self.exprs.append(self.arena, start.expr);
- self.exprs.items[slice_index] = .{ .slice = .{ .lhs = lhs_index, .start = start_index } };
-
- const typeRef = switch (lhs.expr) {
- .declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
- else => null,
- };
-
- return DocData.WalkResult{
- .typeRef = typeRef,
- .expr = .{ .sliceIndex = slice_index },
- };
- },
- .slice_end => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.SliceEnd, pl_node.payload_index);
-
- const slice_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const start: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.start,
- false,
- call_ctx,
- );
- const end: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.end,
- false,
- call_ctx,
- );
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const start_index = self.exprs.items.len;
- try self.exprs.append(self.arena, start.expr);
- const end_index = self.exprs.items.len;
- try self.exprs.append(self.arena, end.expr);
- self.exprs.items[slice_index] = .{ .slice = .{ .lhs = lhs_index, .start = start_index, .end = end_index } };
-
- const typeRef = switch (lhs.expr) {
- .declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
- else => null,
- };
-
- return DocData.WalkResult{
- .typeRef = typeRef,
- .expr = .{ .sliceIndex = slice_index },
- };
- },
- .slice_sentinel => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.SliceSentinel, pl_node.payload_index);
-
- const slice_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const start: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.start,
- false,
- call_ctx,
- );
- const end: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.end,
- false,
- call_ctx,
- );
- const sentinel: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.sentinel,
- false,
- call_ctx,
- );
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const start_index = self.exprs.items.len;
- try self.exprs.append(self.arena, start.expr);
- const end_index = self.exprs.items.len;
- try self.exprs.append(self.arena, end.expr);
- const sentinel_index = self.exprs.items.len;
- try self.exprs.append(self.arena, sentinel.expr);
- self.exprs.items[slice_index] = .{ .slice = .{
- .lhs = lhs_index,
- .start = start_index,
- .end = end_index,
- .sentinel = sentinel_index,
- } };
-
- const typeRef = switch (lhs.expr) {
- .declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
- else => null,
- };
-
- return DocData.WalkResult{
- .typeRef = typeRef,
- .expr = .{ .sliceIndex = slice_index },
- };
- },
- .slice_length => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.SliceLength, pl_node.payload_index);
-
- const slice_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .slice = .{ .lhs = 0, .start = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const start: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.start,
- false,
- call_ctx,
- );
- const len: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.len,
- false,
- call_ctx,
- );
- const sentinel_opt: ?DocData.WalkResult = if (extra.data.sentinel != .none)
- try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.sentinel,
- false,
- call_ctx,
- )
- else
- null;
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const start_index = self.exprs.items.len;
- try self.exprs.append(self.arena, start.expr);
- const len_index = self.exprs.items.len;
- try self.exprs.append(self.arena, len.expr);
- const sentinel_index = if (sentinel_opt) |sentinel| sentinel_index: {
- const index = self.exprs.items.len;
- try self.exprs.append(self.arena, sentinel.expr);
- break :sentinel_index index;
- } else null;
- self.exprs.items[slice_index] = .{ .sliceLength = .{
- .lhs = lhs_index,
- .start = start_index,
- .len = len_index,
- .sentinel = sentinel_index,
- } };
-
- const typeRef = switch (lhs.expr) {
- .declRef => |ref| self.decls.items[ref.Analyzed].value.typeRef,
- else => null,
- };
-
- return DocData.WalkResult{
- .typeRef = typeRef,
- .expr = .{ .sliceIndex = slice_index },
- };
- },
-
- .load => {
- const un_node = data[@intFromEnum(inst)].un_node;
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- need_type,
- call_ctx,
- );
- const load_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- var typeRef: ?DocData.Expr = null;
- if (operand.typeRef) |ref| {
- switch (ref) {
- .type => |t_index| {
- switch (self.types.items[t_index]) {
- .Pointer => |p| typeRef = p.child,
- else => {},
- }
- },
- else => {},
- }
- }
-
- return DocData.WalkResult{
- .typeRef = typeRef,
- .expr = .{ .load = load_idx },
- };
- },
- .ref => {
- const un_tok = data[@intFromEnum(inst)].un_tok;
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_tok.operand,
- need_type,
- call_ctx,
- );
- const ref_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- return DocData.WalkResult{
- .expr = .{ .@"&" = ref_idx },
- };
- },
-
- .add,
- .addwrap,
- .add_sat,
- .sub,
- .subwrap,
- .sub_sat,
- .mul,
- .mulwrap,
- .mul_sat,
- .div,
- .shl,
- .shl_sat,
- .shr,
- .bit_or,
- .bit_and,
- .xor,
- .array_cat,
- => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
-
- const binop_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const rhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
- self.exprs.items[binop_index] = .{ .binOp = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .lhs = lhs_index,
- .rhs = rhs_index,
- } };
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .binOpIndex = binop_index },
- };
- },
- .array_mul => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.ArrayMul, pl_node.payload_index);
-
- const binop_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
- const res_ty: ?DocData.WalkResult = if (extra.data.res_ty != .none)
- try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.res_ty,
- false,
- call_ctx,
- )
- else
- null;
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const rhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
- self.exprs.items[binop_index] = .{ .binOp = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .lhs = lhs_index,
- .rhs = rhs_index,
- } };
-
- return DocData.WalkResult{
- .typeRef = if (res_ty) |rt| rt.expr else null,
- .expr = .{ .binOpIndex = binop_index },
- };
- },
- // compare operators
- .cmp_eq,
- .cmp_neq,
- .cmp_gt,
- .cmp_gte,
- .cmp_lt,
- .cmp_lte,
- => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
-
- const binop_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const rhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
- self.exprs.items[binop_index] = .{ .binOp = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .lhs = lhs_index,
- .rhs = rhs_index,
- } };
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.bool_type) },
- .expr = .{ .binOpIndex = binop_index },
- };
- },
-
- // builtin functions
- .align_of,
- .int_from_bool,
- .embed_file,
- .error_name,
- .panic,
- .set_runtime_safety, // @check
- .sqrt,
- .sin,
- .cos,
- .tan,
- .exp,
- .exp2,
- .log,
- .log2,
- .log10,
- .abs,
- .floor,
- .ceil,
- .trunc,
- .round,
- .tag_name,
- .type_name,
- .frame_type,
- .frame_size,
- .int_from_ptr,
- .type_info,
- // @check
- .clz,
- .ctz,
- .pop_count,
- .byte_swap,
- .bit_reverse,
- => {
- const un_node = data[@intFromEnum(inst)].un_node;
- const bin_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
- const param = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
-
- const param_index = self.exprs.items.len;
- try self.exprs.append(self.arena, param.expr);
-
- self.exprs.items[bin_index] = .{
- .builtin = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .param = param_index,
- },
- };
-
- return DocData.WalkResult{
- .typeRef = param.typeRef orelse .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .builtinIndex = bin_index },
- };
- },
- .bit_not,
- .bool_not,
- .negate_wrap,
- => {
- const un_node = data[@intFromEnum(inst)].un_node;
- const un_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .unOp = .{ .param = 0 } });
- const param = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
-
- const param_index = self.exprs.items.len;
- try self.exprs.append(self.arena, param.expr);
-
- self.exprs.items[un_index] = .{
- .unOp = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .param = param_index,
- },
- };
-
- return DocData.WalkResult{
- .typeRef = param.typeRef,
- .expr = .{ .unOpIndex = un_index },
- };
- },
- .bool_br_and, .bool_br_or => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.BoolBr, pl_node.payload_index);
-
- const bin_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .binOp = .{ .lhs = 0, .rhs = 0 } });
-
- const lhs = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
-
- const rhs = try self.walkInstruction(
- file,
- parent_scope,
- parent_src,
- @enumFromInt(file.zir.extra[extra.end..][extra.data.body_len - 1]),
- false,
- call_ctx,
- );
- const rhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
-
- self.exprs.items[bin_index] = .{ .binOp = .{ .name = @tagName(tags[@intFromEnum(inst)]), .lhs = lhs_index, .rhs = rhs_index } };
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.bool_type) },
- .expr = .{ .binOpIndex = bin_index },
- };
- },
- .truncate => {
- // in the ZIR this node is a builtin `bin` but we want send it as a `un` builtin
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
-
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
-
- const bin_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
-
- const rhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
-
- self.exprs.items[bin_index] = .{ .builtin = .{ .name = @tagName(tags[@intFromEnum(inst)]), .param = rhs_index } };
-
- return DocData.WalkResult{
- .typeRef = lhs.expr,
- .expr = .{ .builtinIndex = bin_index },
- };
- },
- .int_from_float,
- .float_from_int,
- .ptr_from_int,
- .enum_from_int,
- .float_cast,
- .int_cast,
- .ptr_cast,
- .has_decl,
- .has_field,
- .div_exact,
- .div_floor,
- .div_trunc,
- .mod,
- .rem,
- .mod_rem,
- .shl_exact,
- .shr_exact,
- .bitcast,
- .vector_type,
- // @check
- .bit_offset_of,
- .offset_of,
- .splat,
- .reduce,
- .min,
- .max,
- => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
-
- const binop_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .builtinBin = .{ .lhs = 0, .rhs = 0 } });
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
-
- const lhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const rhs_index = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
- self.exprs.items[binop_index] = .{ .builtinBin = .{ .name = @tagName(tags[@intFromEnum(inst)]), .lhs = lhs_index, .rhs = rhs_index } };
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .builtinBinIndex = binop_index },
- };
- },
- .mul_add => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.MulAdd, pl_node.payload_index);
-
- const mul1: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.mulend1,
- false,
- call_ctx,
- );
- const mul2: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.mulend2,
- false,
- call_ctx,
- );
- const add: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.addend,
- false,
- call_ctx,
- );
-
- const mul1_index = self.exprs.items.len;
- try self.exprs.append(self.arena, mul1.expr);
- const mul2_index = self.exprs.items.len;
- try self.exprs.append(self.arena, mul2.expr);
- const add_index = self.exprs.items.len;
- try self.exprs.append(self.arena, add.expr);
-
- const type_index: usize = self.exprs.items.len;
- try self.exprs.append(self.arena, add.typeRef orelse .{ .type = @intFromEnum(Ref.type_type) });
-
- return DocData.WalkResult{
- .typeRef = add.typeRef,
- .expr = .{
- .mulAdd = .{
- .mulend1 = mul1_index,
- .mulend2 = mul2_index,
- .addend = add_index,
- .type = type_index,
- },
- },
- };
- },
- .union_init => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.UnionInit, pl_node.payload_index);
-
- const union_type: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.union_type,
- false,
- call_ctx,
- );
- const field_name: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.field_name,
- false,
- call_ctx,
- );
- const init: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.init,
- false,
- call_ctx,
- );
-
- const union_type_index = self.exprs.items.len;
- try self.exprs.append(self.arena, union_type.expr);
- const field_name_index = self.exprs.items.len;
- try self.exprs.append(self.arena, field_name.expr);
- const init_index = self.exprs.items.len;
- try self.exprs.append(self.arena, init.expr);
-
- return DocData.WalkResult{
- .typeRef = union_type.expr,
- .expr = .{
- .unionInit = .{
- .type = union_type_index,
- .field = field_name_index,
- .init = init_index,
- },
- },
- };
- },
- .builtin_call => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.BuiltinCall, pl_node.payload_index);
-
- const modifier: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.modifier,
- false,
- call_ctx,
- );
-
- const callee: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.callee,
- false,
- call_ctx,
- );
-
- const args: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.args,
- false,
- call_ctx,
- );
-
- const modifier_index = self.exprs.items.len;
- try self.exprs.append(self.arena, modifier.expr);
- const function_index = self.exprs.items.len;
- try self.exprs.append(self.arena, callee.expr);
- const args_index = self.exprs.items.len;
- try self.exprs.append(self.arena, args.expr);
-
- return DocData.WalkResult{
- .expr = .{
- .builtinCall = .{
- .modifier = modifier_index,
- .function = function_index,
- .args = args_index,
- },
- },
- };
- },
- .error_union_type => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .ErrorUnion = .{
- .lhs = lhs.expr,
- .rhs = rhs.expr,
- } });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .errorUnion = type_slot_index },
- };
- },
- .merge_error_sets => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
-
- const lhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- false,
- call_ctx,
- );
- const rhs: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- false,
- call_ctx,
- );
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .ErrorUnion = .{
- .lhs = lhs.expr,
- .rhs = rhs.expr,
- } });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .errorSets = type_slot_index },
- };
- },
- // .elem_type => {
- // const un_node = data[@intFromEnum(inst)].un_node;
-
- // const operand: DocData.WalkResult = try self.walkRef(
- // file,
- // parent_scope, parent_src,
- // un_node.operand,
- // false,
- // );
-
- // return operand;
- // },
- .ptr_type => {
- const ptr = data[@intFromEnum(inst)].ptr_type;
- const extra = file.zir.extraData(Zir.Inst.PtrType, ptr.payload_index);
- var extra_index = extra.end;
-
- const elem_type_ref = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.elem_type,
- false,
- call_ctx,
- );
-
- // @check if `addrspace`, `bit_start` and `host_size` really need to be
- // present in json
- var sentinel: ?DocData.Expr = null;
- if (ptr.flags.has_sentinel) {
- const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
- const ref_result = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- call_ctx,
- );
- sentinel = ref_result.expr;
- extra_index += 1;
- }
-
- var @"align": ?DocData.Expr = null;
- if (ptr.flags.has_align) {
- const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
- const ref_result = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- call_ctx,
- );
- @"align" = ref_result.expr;
- extra_index += 1;
- }
- var address_space: ?DocData.Expr = null;
- if (ptr.flags.has_addrspace) {
- const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
- const ref_result = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- call_ctx,
- );
- address_space = ref_result.expr;
- extra_index += 1;
- }
- const bit_start: ?DocData.Expr = null;
- if (ptr.flags.has_bit_range) {
- const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
- const ref_result = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- call_ctx,
- );
- address_space = ref_result.expr;
- extra_index += 1;
- }
-
- var host_size: ?DocData.Expr = null;
- if (ptr.flags.has_bit_range) {
- const ref: Zir.Inst.Ref = @enumFromInt(file.zir.extra[extra_index]);
- const ref_result = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- call_ctx,
- );
- host_size = ref_result.expr;
- }
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .Pointer = .{
- .size = ptr.size,
- .child = elem_type_ref.expr,
- .has_align = ptr.flags.has_align,
- .@"align" = @"align",
- .has_addrspace = ptr.flags.has_addrspace,
- .address_space = address_space,
- .has_sentinel = ptr.flags.has_sentinel,
- .sentinel = sentinel,
- .is_mutable = ptr.flags.is_mutable,
- .is_volatile = ptr.flags.is_volatile,
- .has_bit_range = ptr.flags.has_bit_range,
- .bit_start = bit_start,
- .host_size = host_size,
- },
- });
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .array_type => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
-
- const bin = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index).data;
- const len = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- bin.lhs,
- false,
- call_ctx,
- );
- const child = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- bin.rhs,
- false,
- call_ctx,
- );
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .Array = .{
- .len = len.expr,
- .child = child.expr,
- },
- });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .array_type_sentinel => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.ArrayTypeSentinel, pl_node.payload_index);
- const len = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.len,
- false,
- call_ctx,
- );
- const sentinel = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.sentinel,
- false,
- call_ctx,
- );
- const elem_type = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.elem_type,
- false,
- call_ctx,
- );
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .Array = .{
- .len = len.expr,
- .child = elem_type.expr,
- .sentinel = sentinel.expr,
- },
- });
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .array_init => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.MultiOp, pl_node.payload_index);
- const operands = file.zir.refSlice(extra.end, extra.data.operands_len);
- const array_data = try self.arena.alloc(usize, operands.len - 1);
-
- std.debug.assert(operands.len > 0);
- const array_type = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- operands[0],
- false,
- call_ctx,
- );
-
- for (operands[1..], 0..) |op, idx| {
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- op,
- false,
- call_ctx,
- );
- const expr_index = self.exprs.items.len;
- try self.exprs.append(self.arena, wr.expr);
- array_data[idx] = expr_index;
- }
-
- return DocData.WalkResult{
- .typeRef = array_type.expr,
- .expr = .{ .array = array_data },
- };
- },
- .array_init_anon => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.MultiOp, pl_node.payload_index);
- const operands = file.zir.refSlice(extra.end, extra.data.operands_len);
- const array_data = try self.arena.alloc(usize, operands.len);
-
- for (operands, 0..) |op, idx| {
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- op,
- false,
- call_ctx,
- );
- const expr_index = self.exprs.items.len;
- try self.exprs.append(self.arena, wr.expr);
- array_data[idx] = expr_index;
- }
-
- return DocData.WalkResult{
- .typeRef = null,
- .expr = .{ .array = array_data },
- };
- },
- .array_init_ref => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.MultiOp, pl_node.payload_index);
- const operands = file.zir.refSlice(extra.end, extra.data.operands_len);
- const array_data = try self.arena.alloc(usize, operands.len - 1);
-
- std.debug.assert(operands.len > 0);
- const array_type = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- operands[0],
- false,
- call_ctx,
- );
-
- for (operands[1..], 0..) |op, idx| {
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- op,
- false,
- call_ctx,
- );
- const expr_index = self.exprs.items.len;
- try self.exprs.append(self.arena, wr.expr);
- array_data[idx] = expr_index;
- }
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .Pointer = .{
- .size = .One,
- .child = array_type.expr,
- },
- });
-
- const expr_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .array = array_data });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = type_slot_index },
- .expr = .{ .@"&" = expr_index },
- };
- },
- .float => {
- const float = data[@intFromEnum(inst)].float;
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.comptime_float_type) },
- .expr = .{ .float = float },
- };
- },
- // @check: In frontend I'm handling float128 with `.toFixed(2)`
- .float128 => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Float128, pl_node.payload_index);
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.comptime_float_type) },
- .expr = .{ .float128 = extra.data.get() },
- };
- },
- .negate => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- var operand: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- need_type,
- call_ctx,
- );
- switch (operand.expr) {
- .int => |*int| int.negated = true,
- .int_big => |*int_big| int_big.negated = true,
- else => {
- const un_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .unOp = .{ .param = 0 } });
- const param_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
- self.exprs.items[un_index] = .{
- .unOp = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .param = param_index,
- },
- };
- return DocData.WalkResult{
- .typeRef = operand.typeRef,
- .expr = .{ .unOpIndex = un_index },
- };
- },
- }
- return operand;
- },
- .size_of => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
- .expr = .{ .sizeOf = operand_index },
- };
- },
- .bit_size_of => {
- // not working correctly with `align()`
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- need_type,
- call_ctx,
- );
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- return DocData.WalkResult{
- .typeRef = operand.typeRef,
- .expr = .{ .bitSizeOf = operand_index },
- };
- },
- .int_from_enum => {
- // not working correctly with `align()`
- const un_node = data[@intFromEnum(inst)].un_node;
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
- const builtin_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
- self.exprs.items[builtin_index] = .{
- .builtin = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .param = operand_index,
- },
- };
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.comptime_int_type) },
- .expr = .{ .builtinIndex = builtin_index },
- };
- },
- .switch_block => {
- // WIP
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.SwitchBlock, pl_node.payload_index);
-
- const switch_cond = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.operand,
- false,
- call_ctx,
- );
- const cond_index = self.exprs.items.len;
- try self.exprs.append(self.arena, switch_cond.expr);
- _ = cond_index;
-
- // const ast_index = self.ast_nodes.items.len;
- // const type_index = self.types.items.len - 1;
-
- // const ast_line = self.ast_nodes.items[ast_index - 1];
-
- // const sep = "=" ** 200;
- // log.debug("{s}", .{sep});
- // log.debug("SWITCH BLOCK", .{});
- // log.debug("extra = {any}", .{extra});
- // log.debug("outer_decl = {any}", .{self.types.items[type_index]});
- // log.debug("ast_lines = {}", .{ast_line});
- // log.debug("{s}", .{sep});
-
- const switch_index = self.exprs.items.len;
-
- // const src_loc = try self.srcLocInfo(file, pl_node.src_node, parent_src);
-
- const switch_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
- try self.exprs.append(self.arena, .{ .comptimeExpr = self.comptime_exprs.items.len });
- try self.comptime_exprs.append(self.arena, .{ .code = switch_expr });
- // try self.exprs.append(self.arena, .{ .switchOp = .{
- // .cond_index = cond_index,
- // .file_name = file.sub_file_path,
- // .src = ast_index,
- // .outer_decl = type_index,
- // } });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .switchIndex = switch_index },
- };
- },
-
- .typeof => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- need_type,
- call_ctx,
- );
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- return DocData.WalkResult{
- .typeRef = operand.typeRef,
- .expr = .{ .typeOf = operand_index },
- };
- },
- .typeof_builtin => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Block, pl_node.payload_index);
- const body = file.zir.extra[extra.end..][extra.data.body_len - 1];
- const operand: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- data[body].@"break".operand,
- false,
- call_ctx,
- );
-
- const operand_index = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- return DocData.WalkResult{
- .typeRef = operand.typeRef,
- .expr = .{ .typeOf = operand_index },
- };
- },
- .as_node, .as_shift_operand => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.As, pl_node.payload_index);
-
- // Skip the as_node if the destination type is a call instruction
- if (extra.data.dest_type.toIndex()) |dti| {
- var maybe_cc = call_ctx;
- while (maybe_cc) |cc| : (maybe_cc = cc.prev) {
- if (cc.inst == dti) {
- return try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.operand,
- false,
- call_ctx,
- );
- }
- }
- }
-
- const dest_type_walk = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.dest_type,
- false,
- call_ctx,
- );
-
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.operand,
- false,
- call_ctx,
- );
-
- const operand_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- const dest_type_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, dest_type_walk.expr);
-
- // TODO: there's something wrong with how both `as` and `WalkrResult`
- // try to store type information.
- return DocData.WalkResult{
- .typeRef = dest_type_walk.expr,
- .expr = .{
- .as = .{
- .typeRefArg = dest_type_idx,
- .exprArg = operand_idx,
- },
- },
- };
- },
- .optional_type => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
-
- const operand_idx = self.types.items.len;
- try self.types.append(self.arena, .{
- .Optional = .{ .name = "?TODO", .child = operand.expr },
- });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = operand_idx },
- };
- },
- .decl_val, .decl_ref => {
- const str_tok = data[@intFromEnum(inst)].str_tok;
- const decl_status = parent_scope.resolveDeclName(str_tok.start, file, inst.toOptional());
- return DocData.WalkResult{
- .expr = .{ .declRef = decl_status },
- };
- },
- .field_val, .field_ptr => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Field, pl_node.payload_index);
-
- var path: std.ArrayListUnmanaged(DocData.Expr) = .{};
- try path.append(self.arena, .{
- .declName = file.zir.nullTerminatedString(extra.data.field_name_start),
- });
-
- // Put inside path the starting index of each decl name that
- // we encounter as we navigate through all the field_*s
- const lhs_ref = blk: {
- var lhs_extra = extra;
- while (true) {
- const lhs = @intFromEnum(lhs_extra.data.lhs.toIndex() orelse {
- break :blk lhs_extra.data.lhs;
- });
-
- if (tags[lhs] != .field_val and
- tags[lhs] != .field_ptr)
- {
- break :blk lhs_extra.data.lhs;
- }
-
- lhs_extra = file.zir.extraData(
- Zir.Inst.Field,
- data[lhs].pl_node.payload_index,
- );
-
- try path.append(self.arena, .{
- .declName = file.zir.nullTerminatedString(lhs_extra.data.field_name_start),
- });
- }
- };
-
- // If the lhs is a `call` instruction, it means that we're inside
- // a function call and we're referring to one of its arguments.
- // We can't just blindly analyze the instruction or we will
- // start recursing forever.
- // TODO: add proper resolution of the container type for `calls`
- // TODO: we're like testing lhs as an instruction twice
- // (above and below) this todo, maybe a cleaer solution woul
- // avoid that.
- // TODO: double check that we really don't need type info here
-
- const wr = blk: {
- if (lhs_ref.toIndex()) |lhs_inst| switch (tags[@intFromEnum(lhs_inst)]) {
- .call, .field_call => {
- break :blk DocData.WalkResult{
- .expr = .{
- .comptimeExpr = 0,
- },
- };
- },
- else => {},
- };
-
- break :blk try self.walkRef(
- file,
- parent_scope,
- parent_src,
- lhs_ref,
- false,
- call_ctx,
- );
- };
- try path.append(self.arena, wr.expr);
-
- // This way the data in `path` has the same ordering that the ref
- // path has in the text: most general component first.
- std.mem.reverse(DocData.Expr, path.items);
-
- // Righ now, every element of `path` is a string except its first
- // element (at index 0). We're now going to attempt to resolve each
- // string. If one or more components in this path are not yet fully
- // analyzed, the path will only be solved partially, but we expect
- // to eventually solve it fully(or give up in case of a
- // comptimeExpr). This means that:
- // - (1) Paths can be not fully analyzed temporarily, so any code
- // that requires to know where a ref path leads to, neeeds to
- // implement support for lazyness (see self.pending_ref_paths)
- // - (2) Paths can sometimes never resolve fully. This means that
- // any value that depends on that will have to become a
- // comptimeExpr.
- try self.tryResolveRefPath(file, inst, path.items);
- return DocData.WalkResult{ .expr = .{ .refPath = path.items } };
- },
- .int_type => {
- const int_type = data[@intFromEnum(inst)].int_type;
- const sign = if (int_type.signedness == .unsigned) "u" else "i";
- const bits = int_type.bit_count;
- const name = try std.fmt.allocPrint(self.arena, "{s}{}", .{ sign, bits });
-
- try self.types.append(self.arena, .{
- .Int = .{ .name = name },
- });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = self.types.items.len - 1 },
- };
- },
- .block => {
- const res = DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .comptimeExpr = self.comptime_exprs.items.len },
- };
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const block_expr = try self.getBlockSource(file, parent_src, pl_node.src_node);
- try self.comptime_exprs.append(self.arena, .{
- .code = block_expr,
- });
- return res;
- },
- .block_inline => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Block, pl_node.payload_index);
- return self.walkInlineBody(
- file,
- parent_scope,
- try self.srcLocInfo(file, pl_node.src_node, parent_src),
- parent_src,
- file.zir.bodySlice(extra.end, extra.data.body_len),
- need_type,
- call_ctx,
- );
- },
- .break_inline => {
- const @"break" = data[@intFromEnum(inst)].@"break";
- return try self.walkRef(
- file,
- parent_scope,
- parent_src,
- @"break".operand,
- need_type,
- call_ctx,
- );
- },
- .struct_init => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.StructInit, pl_node.payload_index);
- const field_vals = try self.arena.alloc(
- DocData.Expr.FieldVal,
- extra.data.fields_len,
- );
-
- var type_ref: DocData.Expr = undefined;
- var idx = extra.end;
- for (field_vals) |*fv| {
- const init_extra = file.zir.extraData(Zir.Inst.StructInit.Item, idx);
- defer idx = init_extra.end;
-
- const field_name = blk: {
- const field_inst_index = @intFromEnum(init_extra.data.field_type);
- if (tags[field_inst_index] != .struct_init_field_type) unreachable;
- const field_pl_node = data[field_inst_index].pl_node;
- const field_extra = file.zir.extraData(
- Zir.Inst.FieldType,
- field_pl_node.payload_index,
- );
- const field_src = try self.srcLocInfo(
- file,
- field_pl_node.src_node,
- parent_src,
- );
-
- // On first iteration use field info to find out the struct type
- if (idx == extra.end) {
- const wr = try self.walkRef(
- file,
- parent_scope,
- field_src,
- field_extra.data.container_type,
- false,
- call_ctx,
- );
- type_ref = wr.expr;
- }
- break :blk file.zir.nullTerminatedString(field_extra.data.name_start);
- };
- const value = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- init_extra.data.init,
- need_type,
- call_ctx,
- );
- const exprIdx = self.exprs.items.len;
- try self.exprs.append(self.arena, value.expr);
- var typeRefIdx: ?usize = null;
- if (value.typeRef) |ref| {
- typeRefIdx = self.exprs.items.len;
- try self.exprs.append(self.arena, ref);
- }
- fv.* = .{
- .name = field_name,
- .val = .{
- .typeRef = typeRefIdx,
- .expr = exprIdx,
- },
- };
- }
-
- return DocData.WalkResult{
- .typeRef = type_ref,
- .expr = .{ .@"struct" = field_vals },
- };
- },
- .struct_init_empty,
- .struct_init_empty_result,
- => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
-
- return DocData.WalkResult{
- .typeRef = operand.expr,
- .expr = .{ .@"struct" = &.{} },
- };
- },
- .struct_init_empty_ref_result => {
- const un_node = data[@intFromEnum(inst)].un_node;
-
- const operand: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- false,
- call_ctx,
- );
-
- const struct_init_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .@"struct" = &.{} });
-
- return DocData.WalkResult{
- .typeRef = operand.expr,
- .expr = .{ .@"&" = struct_init_idx },
- };
- },
- .struct_init_anon => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.StructInitAnon, pl_node.payload_index);
-
- const field_vals = try self.arena.alloc(
- DocData.Expr.FieldVal,
- extra.data.fields_len,
- );
-
- var idx = extra.end;
- for (field_vals) |*fv| {
- const init_extra = file.zir.extraData(Zir.Inst.StructInitAnon.Item, idx);
- const field_name = file.zir.nullTerminatedString(init_extra.data.field_name);
- const value = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- init_extra.data.init,
- need_type,
- call_ctx,
- );
-
- const exprIdx = self.exprs.items.len;
- try self.exprs.append(self.arena, value.expr);
- var typeRefIdx: ?usize = null;
- if (value.typeRef) |ref| {
- typeRefIdx = self.exprs.items.len;
- try self.exprs.append(self.arena, ref);
- }
-
- fv.* = .{
- .name = field_name,
- .val = .{
- .typeRef = typeRefIdx,
- .expr = exprIdx,
- },
- };
-
- idx = init_extra.end;
- }
-
- return DocData.WalkResult{
- .expr = .{ .@"struct" = field_vals },
- };
- },
- .error_set_decl => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.ErrorSetDecl, pl_node.payload_index);
- const fields = try self.arena.alloc(
- DocData.Type.Field,
- extra.data.fields_len,
- );
- var idx = extra.end;
- for (fields) |*f| {
- const name = file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[idx]));
- idx += 1;
-
- const docs = file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[idx]));
- idx += 1;
-
- f.* = .{
- .name = name,
- .docs = docs,
- };
- }
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .ErrorSet = .{
- .name = "todo errset",
- .fields = fields,
- },
- });
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .param_anytype, .param_anytype_comptime => {
- // @check if .param_anytype_comptime can be here
- // Analysis of anytype function params happens in `.func`.
- // This switch case handles the case where an expression depends
- // on an anytype field. E.g.: `fn foo(bar: anytype) @TypeOf(bar)`.
- // This means that we're looking at a generic expression.
- const str_tok = data[@intFromEnum(inst)].str_tok;
- const name = str_tok.get(file.zir);
- const cte_slot_index = self.comptime_exprs.items.len;
- try self.comptime_exprs.append(self.arena, .{
- .code = name,
- });
- return DocData.WalkResult{ .expr = .{ .comptimeExpr = cte_slot_index } };
- },
- .param, .param_comptime => {
- // See .param_anytype for more information.
- const pl_tok = data[@intFromEnum(inst)].pl_tok;
- const extra = file.zir.extraData(Zir.Inst.Param, pl_tok.payload_index);
- const name = file.zir.nullTerminatedString(extra.data.name);
-
- const cte_slot_index = self.comptime_exprs.items.len;
- try self.comptime_exprs.append(self.arena, .{
- .code = name,
- });
- return DocData.WalkResult{ .expr = .{ .comptimeExpr = cte_slot_index } };
- },
- .call => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Call, pl_node.payload_index);
-
- const callee = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.callee,
- need_type,
- call_ctx,
- );
-
- const args_len = extra.data.flags.args_len;
- var args = try self.arena.alloc(DocData.Expr, args_len);
- const body = file.zir.extra[extra.end..];
-
- try self.repurposed_insts.put(self.arena, inst, {});
- defer _ = self.repurposed_insts.remove(inst);
-
- var i: usize = 0;
- while (i < args_len) : (i += 1) {
- const arg_end = file.zir.extra[extra.end + i];
- const break_index = body[arg_end - 1];
- const ref = data[break_index].@"break".operand;
- // TODO: consider toggling need_type to true if we ever want
- // to show discrepancies between the types of provided
- // arguments and the types declared in the function
- // signature for its parameters.
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- &.{
- .inst = inst,
- .prev = call_ctx,
- },
- );
- args[i] = wr.expr;
- }
-
- const cte_slot_index = self.comptime_exprs.items.len;
- try self.comptime_exprs.append(self.arena, .{
- .code = "func call",
- });
-
- const call_slot_index = self.calls.items.len;
- try self.calls.append(self.arena, .{
- .func = callee.expr,
- .args = args,
- .ret = .{ .comptimeExpr = cte_slot_index },
- });
-
- return DocData.WalkResult{
- .typeRef = if (callee.typeRef) |tr| switch (tr) {
- .type => |func_type_idx| switch (self.types.items[func_type_idx]) {
- .Fn => |func| func.ret,
- else => blk: {
- printWithContext(
- file,
- inst,
- "unexpected callee type in walkInstruction.call: `{s}`\n",
- .{@tagName(self.types.items[func_type_idx])},
- );
-
- break :blk null;
- },
- },
- else => null,
- } else null,
- .expr = .{ .call = call_slot_index },
- };
- },
- .field_call => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.FieldCall, pl_node.payload_index);
-
- const obj_ptr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.obj_ptr,
- need_type,
- call_ctx,
- );
-
- var field_call = try self.arena.alloc(DocData.Expr, 2);
-
- if (obj_ptr.typeRef) |ref| {
- field_call[0] = ref;
- } else {
- field_call[0] = obj_ptr.expr;
- }
- field_call[1] = .{ .declName = file.zir.nullTerminatedString(extra.data.field_name_start) };
- try self.tryResolveRefPath(file, inst, field_call);
-
- const args_len = extra.data.flags.args_len;
- var args = try self.arena.alloc(DocData.Expr, args_len);
- const body = file.zir.extra[extra.end..];
-
- try self.repurposed_insts.put(self.arena, inst, {});
- defer _ = self.repurposed_insts.remove(inst);
-
- var i: usize = 0;
- while (i < args_len) : (i += 1) {
- const arg_end = file.zir.extra[extra.end + i];
- const break_index = body[arg_end - 1];
- const ref = data[break_index].@"break".operand;
- // TODO: consider toggling need_type to true if we ever want
- // to show discrepancies between the types of provided
- // arguments and the types declared in the function
- // signature for its parameters.
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- ref,
- false,
- &.{
- .inst = inst,
- .prev = call_ctx,
- },
- );
- args[i] = wr.expr;
- }
-
- const cte_slot_index = self.comptime_exprs.items.len;
- try self.comptime_exprs.append(self.arena, .{
- .code = "field call",
- });
-
- const call_slot_index = self.calls.items.len;
- try self.calls.append(self.arena, .{
- .func = .{ .refPath = field_call },
- .args = args,
- .ret = .{ .comptimeExpr = cte_slot_index },
- });
-
- return DocData.WalkResult{
- .expr = .{ .call = call_slot_index },
- };
- },
- .func, .func_inferred => {
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .Unanalyzed = .{} });
-
- const result = self.analyzeFunction(
- file,
- parent_scope,
- parent_src,
- inst,
- self_ast_node_index,
- type_slot_index,
- tags[@intFromEnum(inst)] == .func_inferred,
- call_ctx,
- );
-
- return result;
- },
- .func_fancy => {
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .Unanalyzed = .{} });
-
- const result = self.analyzeFancyFunction(
- file,
- parent_scope,
- parent_src,
- inst,
- self_ast_node_index,
- type_slot_index,
- call_ctx,
- );
-
- return result;
- },
- .optional_payload_safe, .optional_payload_unsafe => {
- const un_node = data[@intFromEnum(inst)].un_node;
- const operand = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- un_node.operand,
- need_type,
- call_ctx,
- );
- const optional_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, operand.expr);
-
- var typeRef: ?DocData.Expr = null;
- if (operand.typeRef) |ref| {
- switch (ref) {
- .type => |t_index| {
- const t = self.types.items[t_index];
- switch (t) {
- .Optional => |opt| typeRef = opt.child,
- else => {
- printWithContext(file, inst, "Invalid type for optional_payload_*: {}\n", .{t});
- },
- }
- },
- else => {},
- }
- }
-
- return DocData.WalkResult{
- .typeRef = typeRef,
- .expr = .{ .optionalPayload = optional_idx },
- };
- },
- .elem_val_node => {
- const pl_node = data[@intFromEnum(inst)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Bin, pl_node.payload_index);
- const lhs = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.lhs,
- need_type,
- call_ctx,
- );
- const rhs = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.rhs,
- need_type,
- call_ctx,
- );
- const lhs_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, lhs.expr);
- const rhs_idx = self.exprs.items.len;
- try self.exprs.append(self.arena, rhs.expr);
- return DocData.WalkResult{
- .expr = .{
- .elemVal = .{
- .lhs = lhs_idx,
- .rhs = rhs_idx,
- },
- },
- };
- },
- .extended => {
- const extended = data[@intFromEnum(inst)].extended;
- switch (extended.opcode) {
- else => {
- printWithContext(
- file,
- inst,
- "TODO: implement `walkInstruction.extended` for {s}",
- .{@tagName(extended.opcode)},
- );
- return self.cteTodo(@tagName(extended.opcode));
- },
- .typeof_peer => {
- // Zir says it's a NodeMultiOp but in this case it's TypeOfPeer
- const extra = file.zir.extraData(Zir.Inst.TypeOfPeer, extended.operand);
- const args = file.zir.refSlice(extra.end, extended.small);
- const array_data = try self.arena.alloc(usize, args.len);
-
- var array_type: ?DocData.Expr = null;
- for (args, 0..) |arg, idx| {
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- arg,
- idx == 0,
- call_ctx,
- );
- if (idx == 0) {
- array_type = wr.typeRef;
- }
-
- const expr_index = self.exprs.items.len;
- try self.exprs.append(self.arena, wr.expr);
- array_data[idx] = expr_index;
- }
-
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{
- .Array = .{
- .len = .{
- .int = .{
- .value = args.len,
- .negated = false,
- },
- },
- .child = .{ .type = 0 },
- },
- });
- const result = DocData.WalkResult{
- .typeRef = .{ .type = type_slot_index },
- .expr = .{ .typeOf_peer = array_data },
- };
-
- return result;
- },
- .opaque_decl => {
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .Unanalyzed = .{} });
-
- var scope: Scope = .{
- .parent = parent_scope,
- .enclosing_type = type_slot_index,
- };
-
- const small: Zir.Inst.OpaqueDecl.Small = @bitCast(extended.small);
- const extra = file.zir.extraData(Zir.Inst.OpaqueDecl, extended.operand);
- var extra_index: usize = extra.end;
-
- const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
-
- const captures_len = if (small.has_captures_len) blk: {
- const captures_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk captures_len;
- } else 0;
-
- if (small.has_decls_len) extra_index += 1;
-
- scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
- extra_index += captures_len;
-
- var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
- var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
-
- extra_index = try self.analyzeAllDecls(
- file,
- &scope,
- inst,
- src_info,
- &decl_indexes,
- &priv_decl_indexes,
- call_ctx,
- );
-
- self.types.items[type_slot_index] = .{
- .Opaque = .{
- .name = "todo_name",
- .src = self_ast_node_index,
- .privDecls = priv_decl_indexes.items,
- .pubDecls = decl_indexes.items,
- .parent_container = parent_scope.enclosing_type,
- },
- };
- if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
- for (paths.items) |resume_info| {
- try self.tryResolveRefPath(
- resume_info.file,
- inst,
- resume_info.ref_path,
- );
- }
-
- _ = self.ref_paths_pending_on_types.remove(type_slot_index);
- // TODO: we should deallocate the arraylist that holds all the
- // decl paths. not doing it now since it's arena-allocated
- // anyway, but maybe we should put it elsewhere.
- }
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .variable => {
- const extra = file.zir.extraData(Zir.Inst.ExtendedVar, extended.operand);
-
- const small = @as(Zir.Inst.ExtendedVar.Small, @bitCast(extended.small));
- var extra_index: usize = extra.end;
- if (small.has_lib_name) extra_index += 1;
- if (small.has_align) extra_index += 1;
-
- const var_type = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.data.var_type,
- need_type,
- call_ctx,
- );
-
- var value: DocData.WalkResult = .{
- .typeRef = var_type.expr,
- .expr = .{ .undefined = .{} },
- };
-
- if (small.has_init) {
- const var_init_ref = @as(Ref, @enumFromInt(file.zir.extra[extra_index]));
- const var_init = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- var_init_ref,
- need_type,
- call_ctx,
- );
- value.expr = var_init.expr;
- value.typeRef = var_init.typeRef;
- }
-
- return value;
- },
- .union_decl => {
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .Unanalyzed = .{} });
-
- var scope: Scope = .{
- .parent = parent_scope,
- .enclosing_type = type_slot_index,
- };
-
- const small = @as(Zir.Inst.UnionDecl.Small, @bitCast(extended.small));
- const extra = file.zir.extraData(Zir.Inst.UnionDecl, extended.operand);
- var extra_index: usize = extra.end;
-
- const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
-
- // We delay analysis because union tags can refer to
- // decls defined inside the union itself.
- const tag_type_ref: ?Ref = if (small.has_tag_type) blk: {
- const tag_type = file.zir.extra[extra_index];
- extra_index += 1;
- const tag_ref = @as(Ref, @enumFromInt(tag_type));
- break :blk tag_ref;
- } else null;
-
- const captures_len = if (small.has_captures_len) blk: {
- const captures_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk captures_len;
- } else 0;
-
- const body_len = if (small.has_body_len) blk: {
- const body_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk body_len;
- } else 0;
-
- const fields_len = if (small.has_fields_len) blk: {
- const fields_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk fields_len;
- } else 0;
-
- const layout_expr: ?DocData.Expr = switch (small.layout) {
- .Auto => null,
- else => .{ .enumLiteral = @tagName(small.layout) },
- };
-
- if (small.has_decls_len) extra_index += 1;
-
- scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
- extra_index += captures_len;
-
- var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
- var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
-
- extra_index = try self.analyzeAllDecls(
- file,
- &scope,
- inst,
- src_info,
- &decl_indexes,
- &priv_decl_indexes,
- call_ctx,
- );
-
- // Analyze the tag once all decls have been analyzed
- const tag_type = if (tag_type_ref) |tt_ref| (try self.walkRef(
- file,
- &scope,
- parent_src,
- tt_ref,
- false,
- call_ctx,
- )).expr else null;
-
- // Fields
- extra_index += body_len;
-
- var field_type_refs = try std.ArrayListUnmanaged(DocData.Expr).initCapacity(
- self.arena,
- fields_len,
- );
- var field_name_indexes = try std.ArrayListUnmanaged(usize).initCapacity(
- self.arena,
- fields_len,
- );
- try self.collectUnionFieldInfo(
- file,
- &scope,
- src_info,
- fields_len,
- &field_type_refs,
- &field_name_indexes,
- extra_index,
- call_ctx,
- );
-
- self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
-
- self.types.items[type_slot_index] = .{
- .Union = .{
- .name = "todo_name",
- .src = self_ast_node_index,
- .privDecls = priv_decl_indexes.items,
- .pubDecls = decl_indexes.items,
- .fields = field_type_refs.items,
- .tag = tag_type,
- .auto_enum = small.auto_enum_tag,
- .parent_container = parent_scope.enclosing_type,
- .layout = layout_expr,
- },
- };
-
- if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
- for (paths.items) |resume_info| {
- try self.tryResolveRefPath(
- resume_info.file,
- inst,
- resume_info.ref_path,
- );
- }
-
- _ = self.ref_paths_pending_on_types.remove(type_slot_index);
- // TODO: we should deallocate the arraylist that holds all the
- // decl paths. not doing it now since it's arena-allocated
- // anyway, but maybe we should put it elsewhere.
- }
-
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .enum_decl => {
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .Unanalyzed = .{} });
-
- var scope: Scope = .{
- .parent = parent_scope,
- .enclosing_type = type_slot_index,
- };
-
- const small = @as(Zir.Inst.EnumDecl.Small, @bitCast(extended.small));
- const extra = file.zir.extraData(Zir.Inst.EnumDecl, extended.operand);
- var extra_index: usize = extra.end;
-
- const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
-
- const tag_type: ?DocData.Expr = if (small.has_tag_type) blk: {
- const tag_type = file.zir.extra[extra_index];
- extra_index += 1;
- const tag_ref = @as(Ref, @enumFromInt(tag_type));
- const wr = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- tag_ref,
- false,
- call_ctx,
- );
- break :blk wr.expr;
- } else null;
-
- const captures_len = if (small.has_captures_len) blk: {
- const captures_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk captures_len;
- } else 0;
-
- const body_len = if (small.has_body_len) blk: {
- const body_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk body_len;
- } else 0;
-
- const fields_len = if (small.has_fields_len) blk: {
- const fields_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk fields_len;
- } else 0;
-
- if (small.has_decls_len) extra_index += 1;
-
- scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
- extra_index += captures_len;
-
- var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
- var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
-
- extra_index = try self.analyzeAllDecls(
- file,
- &scope,
- inst,
- src_info,
- &decl_indexes,
- &priv_decl_indexes,
- call_ctx,
- );
-
- // const body = file.zir.extra[extra_index..][0..body_len];
- extra_index += body_len;
-
- var field_name_indexes: std.ArrayListUnmanaged(usize) = .{};
- var field_values: std.ArrayListUnmanaged(?DocData.Expr) = .{};
- {
- var bit_bag_idx = extra_index;
- var cur_bit_bag: u32 = undefined;
- extra_index += std.math.divCeil(usize, fields_len, 32) catch unreachable;
-
- var idx: usize = 0;
- while (idx < fields_len) : (idx += 1) {
- if (idx % 32 == 0) {
- cur_bit_bag = file.zir.extra[bit_bag_idx];
- bit_bag_idx += 1;
- }
-
- const has_value = @as(u1, @truncate(cur_bit_bag)) != 0;
- cur_bit_bag >>= 1;
-
- const field_name_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
- extra_index += 1;
-
- const doc_comment_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
- extra_index += 1;
-
- const value_expr: ?DocData.Expr = if (has_value) blk: {
- const value_ref = file.zir.extra[extra_index];
- extra_index += 1;
- const value = try self.walkRef(
- file,
- &scope,
- src_info,
- @as(Ref, @enumFromInt(value_ref)),
- false,
- call_ctx,
- );
- break :blk value.expr;
- } else null;
- try field_values.append(self.arena, value_expr);
-
- const field_name = file.zir.nullTerminatedString(field_name_index);
-
- try field_name_indexes.append(self.arena, self.ast_nodes.items.len);
- const doc_comment: ?[]const u8 = if (doc_comment_index != .empty)
- file.zir.nullTerminatedString(doc_comment_index)
- else
- null;
- try self.ast_nodes.append(self.arena, .{
- .name = field_name,
- .docs = doc_comment,
- });
- }
- }
-
- self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
-
- self.types.items[type_slot_index] = .{
- .Enum = .{
- .name = "todo_name",
- .src = self_ast_node_index,
- .privDecls = priv_decl_indexes.items,
- .pubDecls = decl_indexes.items,
- .tag = tag_type,
- .values = field_values.items,
- .nonexhaustive = small.nonexhaustive,
- .parent_container = parent_scope.enclosing_type,
- },
- };
- if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
- for (paths.items) |resume_info| {
- try self.tryResolveRefPath(
- resume_info.file,
- inst,
- resume_info.ref_path,
- );
- }
-
- _ = self.ref_paths_pending_on_types.remove(type_slot_index);
- // TODO: we should deallocate the arraylist that holds all the
- // decl paths. not doing it now since it's arena-allocated
- // anyway, but maybe we should put it elsewhere.
- }
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .struct_decl => {
- const type_slot_index = self.types.items.len;
- try self.types.append(self.arena, .{ .Unanalyzed = .{} });
-
- var scope: Scope = .{
- .parent = parent_scope,
- .enclosing_type = type_slot_index,
- };
-
- const small = @as(Zir.Inst.StructDecl.Small, @bitCast(extended.small));
- const extra = file.zir.extraData(Zir.Inst.StructDecl, extended.operand);
- var extra_index: usize = extra.end;
-
- const src_info = try self.srcLocInfo(file, extra.data.src_node, parent_src);
-
- const captures_len = if (small.has_captures_len) blk: {
- const captures_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk captures_len;
- } else 0;
-
- const fields_len = if (small.has_fields_len) blk: {
- const fields_len = file.zir.extra[extra_index];
- extra_index += 1;
- break :blk fields_len;
- } else 0;
-
- // We don't care about decls yet
- if (small.has_decls_len) extra_index += 1;
-
- scope.captures = @ptrCast(file.zir.extra[extra_index..][0..captures_len]);
- extra_index += captures_len;
-
- var backing_int: ?DocData.Expr = null;
- if (small.has_backing_int) {
- const backing_int_body_len = file.zir.extra[extra_index];
- extra_index += 1; // backing_int_body_len
- if (backing_int_body_len == 0) {
- const backing_int_ref = @as(Ref, @enumFromInt(file.zir.extra[extra_index]));
- const backing_int_res = try self.walkRef(
- file,
- &scope,
- src_info,
- backing_int_ref,
- true,
- call_ctx,
- );
- backing_int = backing_int_res.expr;
- extra_index += 1; // backing_int_ref
- } else {
- const backing_int_body = file.zir.bodySlice(extra_index, backing_int_body_len);
- const break_inst = backing_int_body[backing_int_body.len - 1];
- const operand = data[@intFromEnum(break_inst)].@"break".operand;
- const backing_int_res = try self.walkRef(
- file,
- &scope,
- src_info,
- operand,
- true,
- call_ctx,
- );
- backing_int = backing_int_res.expr;
- extra_index += backing_int_body_len; // backing_int_body_inst
- }
- }
-
- const layout_expr: ?DocData.Expr = switch (small.layout) {
- .Auto => null,
- else => .{ .enumLiteral = @tagName(small.layout) },
- };
-
- var decl_indexes: std.ArrayListUnmanaged(usize) = .{};
- var priv_decl_indexes: std.ArrayListUnmanaged(usize) = .{};
-
- extra_index = try self.analyzeAllDecls(
- file,
- &scope,
- inst,
- src_info,
- &decl_indexes,
- &priv_decl_indexes,
- call_ctx,
- );
-
- // Inside field init bodies, the struct decl instruction is used to refer to the
- // field type during the second pass of analysis.
- try self.repurposed_insts.put(self.arena, inst, {});
- defer _ = self.repurposed_insts.remove(inst);
-
- var field_type_refs: std.ArrayListUnmanaged(DocData.Expr) = .{};
- var field_default_refs: std.ArrayListUnmanaged(?DocData.Expr) = .{};
- var field_name_indexes: std.ArrayListUnmanaged(usize) = .{};
- try self.collectStructFieldInfo(
- file,
- &scope,
- src_info,
- fields_len,
- &field_type_refs,
- &field_default_refs,
- &field_name_indexes,
- extra_index,
- small.is_tuple,
- call_ctx,
- );
-
- self.ast_nodes.items[self_ast_node_index].fields = field_name_indexes.items;
-
- self.types.items[type_slot_index] = .{
- .Struct = .{
- .name = "todo_name",
- .src = self_ast_node_index,
- .privDecls = priv_decl_indexes.items,
- .pubDecls = decl_indexes.items,
- .field_types = field_type_refs.items,
- .field_defaults = field_default_refs.items,
- .is_tuple = small.is_tuple,
- .backing_int = backing_int,
- .line_number = self.ast_nodes.items[self_ast_node_index].line,
- .parent_container = parent_scope.enclosing_type,
- .layout = layout_expr,
- },
- };
- if (self.ref_paths_pending_on_types.get(type_slot_index)) |paths| {
- for (paths.items) |resume_info| {
- try self.tryResolveRefPath(
- resume_info.file,
- inst,
- resume_info.ref_path,
- );
- }
-
- _ = self.ref_paths_pending_on_types.remove(type_slot_index);
- // TODO: we should deallocate the arraylist that holds all the
- // decl paths. not doing it now since it's arena-allocated
- // anyway, but maybe we should put it elsewhere.
- }
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .type = type_slot_index },
- };
- },
- .this => {
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{
- .this = parent_scope.enclosing_type.?,
- // We know enclosing_type is always present
- // because it's only null for the top-level
- // struct instruction of a file.
- },
- };
- },
- .int_from_error,
- .error_from_int,
- .reify,
- => {
- const extra = file.zir.extraData(Zir.Inst.UnNode, extended.operand).data;
- const bin_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
- const param = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.operand,
- false,
- call_ctx,
- );
-
- const param_index = self.exprs.items.len;
- try self.exprs.append(self.arena, param.expr);
-
- self.exprs.items[bin_index] = .{ .builtin = .{ .name = @tagName(extended.opcode), .param = param_index } };
-
- return DocData.WalkResult{
- .typeRef = param.typeRef orelse .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .builtinIndex = bin_index },
- };
- },
- .work_item_id,
- .work_group_size,
- .work_group_id,
- => {
- const extra = file.zir.extraData(Zir.Inst.UnNode, extended.operand).data;
- const bin_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .builtin = .{ .param = 0 } });
- const param = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.operand,
- false,
- call_ctx,
- );
-
- const param_index = self.exprs.items.len;
- try self.exprs.append(self.arena, param.expr);
-
- self.exprs.items[bin_index] = .{ .builtin = .{ .name = @tagName(extended.opcode), .param = param_index } };
-
- return DocData.WalkResult{
- // from docs we know they return u32
- .typeRef = .{ .type = @intFromEnum(Ref.u32_type) },
- .expr = .{ .builtinIndex = bin_index },
- };
- },
- .cmpxchg => {
- const extra = file.zir.extraData(Zir.Inst.Cmpxchg, extended.operand).data;
-
- const last_type_index = self.exprs.items.len;
- const last_type = self.exprs.items[last_type_index - 1];
- const type_index = self.exprs.items.len;
- try self.exprs.append(self.arena, last_type);
-
- const ptr_index = self.exprs.items.len;
- const ptr: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.ptr,
- false,
- call_ctx,
- );
- try self.exprs.append(self.arena, ptr.expr);
-
- const expected_value_index = self.exprs.items.len;
- const expected_value: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.expected_value,
- false,
- call_ctx,
- );
- try self.exprs.append(self.arena, expected_value.expr);
-
- const new_value_index = self.exprs.items.len;
- const new_value: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.new_value,
- false,
- call_ctx,
- );
- try self.exprs.append(self.arena, new_value.expr);
-
- const success_order_index = self.exprs.items.len;
- const success_order: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.success_order,
- false,
- call_ctx,
- );
- try self.exprs.append(self.arena, success_order.expr);
-
- const failure_order_index = self.exprs.items.len;
- const failure_order: DocData.WalkResult = try self.walkRef(
- file,
- parent_scope,
- parent_src,
- extra.failure_order,
- false,
- call_ctx,
- );
- try self.exprs.append(self.arena, failure_order.expr);
-
- const cmpxchg_index = self.exprs.items.len;
- try self.exprs.append(self.arena, .{ .cmpxchg = .{
- .name = @tagName(tags[@intFromEnum(inst)]),
- .type = type_index,
- .ptr = ptr_index,
- .expected_value = expected_value_index,
- .new_value = new_value_index,
- .success_order = success_order_index,
- .failure_order = failure_order_index,
- } });
- return DocData.WalkResult{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .cmpxchgIndex = cmpxchg_index },
- };
- },
- .closure_get => {
- const captured, const scope = parent_scope.getCapture(extended.small);
- switch (captured) {
- .inst => |cap_inst| return self.walkInstruction(file, scope, parent_src, cap_inst, need_type, call_ctx),
- .decl => |str| {
- const decl_status = parent_scope.resolveDeclName(str, file, inst.toOptional());
- return .{ .expr = .{ .declRef = decl_status } };
- },
- }
- },
- }
- },
- }
-}
-
-/// Called by `walkInstruction` when encountering a container type.
-/// Iterates over all decl definitions in its body and it also analyzes each
-/// decl's body recursively by calling into `walkInstruction`.
-///
-/// Does not append to `self.decls` directly because `walkInstruction`
-/// is expected to look-ahead scan all decls and reserve `body_len`
-/// slots in `self.decls`, which are then filled out by this function.
-fn analyzeAllDecls(
- self: *Autodoc,
- file: *File,
- scope: *Scope,
- parent_inst: Zir.Inst.Index,
- parent_src: SrcLocInfo,
- decl_indexes: *std.ArrayListUnmanaged(usize),
- priv_decl_indexes: *std.ArrayListUnmanaged(usize),
- call_ctx: ?*const CallContext,
-) AutodocErrors!usize {
- const first_decl_indexes_slot = decl_indexes.items.len;
- const original_it = file.zir.declIterator(parent_inst);
-
- // First loop to discover decl names
- {
- var it = original_it;
- while (it.next()) |zir_index| {
- const declaration, _ = file.zir.getDeclaration(zir_index);
- if (declaration.name.isNamedTest(file.zir)) continue;
- const decl_name = declaration.name.toString(file.zir) orelse continue;
- try scope.insertDeclRef(self.arena, decl_name, .Pending);
- }
- }
-
- // Second loop to analyze `usingnamespace` decls
- {
- var it = original_it;
- var decl_indexes_slot = first_decl_indexes_slot;
- while (it.next()) |zir_index| : (decl_indexes_slot += 1) {
- const pl_node = file.zir.instructions.items(.data)[@intFromEnum(zir_index)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Declaration, pl_node.payload_index);
- if (extra.data.name != .@"usingnamespace") continue;
- try self.analyzeUsingnamespaceDecl(
- file,
- scope,
- try self.srcLocInfo(file, pl_node.src_node, parent_src),
- decl_indexes,
- priv_decl_indexes,
- extra.data,
- @intCast(extra.end),
- call_ctx,
- );
- }
- }
-
- // Third loop to analyze all remaining decls
- {
- var it = original_it;
- while (it.next()) |zir_index| {
- const pl_node = file.zir.instructions.items(.data)[@intFromEnum(zir_index)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Declaration, pl_node.payload_index);
- switch (extra.data.name) {
- .@"comptime", .@"usingnamespace", .unnamed_test, .decltest => continue,
- _ => if (extra.data.name.isNamedTest(file.zir)) continue,
- }
- try self.analyzeDecl(
- file,
- scope,
- try self.srcLocInfo(file, pl_node.src_node, parent_src),
- decl_indexes,
- priv_decl_indexes,
- zir_index,
- extra.data,
- @intCast(extra.end),
- call_ctx,
- );
- }
- }
-
- // Fourth loop to analyze decltests
- var it = original_it;
- while (it.next()) |zir_index| {
- const pl_node = file.zir.instructions.items(.data)[@intFromEnum(zir_index)].pl_node;
- const extra = file.zir.extraData(Zir.Inst.Declaration, pl_node.payload_index);
- if (extra.data.name != .decltest) continue;
- try self.analyzeDecltest(
- file,
- scope,
- try self.srcLocInfo(file, pl_node.src_node, parent_src),
- extra.data,
- @intCast(extra.end),
- );
- }
-
- return it.extra_index;
-}
-
-fn walkInlineBody(
- autodoc: *Autodoc,
- file: *File,
- scope: *Scope,
- block_src: SrcLocInfo,
- parent_src: SrcLocInfo,
- body: []const Zir.Inst.Index,
- need_type: bool,
- call_ctx: ?*const CallContext,
-) AutodocErrors!DocData.WalkResult {
- const tags = file.zir.instructions.items(.tag);
- const break_inst = switch (tags[@intFromEnum(body[body.len - 1])]) {
- .condbr_inline => {
- // Unresolvable.
- const res: DocData.WalkResult = .{
- .typeRef = .{ .type = @intFromEnum(Ref.type_type) },
- .expr = .{ .comptimeExpr = autodoc.comptime_exprs.items.len },
- };
- const source = (try file.getTree(autodoc.zcu.gpa)).getNodeSource(block_src.src_node);
- try autodoc.comptime_exprs.append(autodoc.arena, .{
- .code = source,
- });
- return res;
- },
- .break_inline => body[body.len - 1],
- else => unreachable,
- };
- const break_data = file.zir.instructions.items(.data)[@intFromEnum(break_inst)].@"break";
- return autodoc.walkRef(file, scope, parent_src, break_data.operand, need_type, call_ctx);
-}
-
-// Asserts the given decl is public
-fn analyzeDecl(
- self: *Autodoc,
- file: *File,
- scope: *Scope,
- decl_src: SrcLocInfo,
- decl_indexes: *std.ArrayListUnmanaged(usize),
- priv_decl_indexes: *std.ArrayListUnmanaged(usize),
- decl_inst: Zir.Inst.Index,
- declaration: Zir.Inst.Declaration,
- extra_index: u32,
- call_ctx: ?*const CallContext,
-) AutodocErrors!void {
- const bodies = declaration.getBodies(extra_index, file.zir);
- const name = file.zir.nullTerminatedString(declaration.name.toString(file.zir).?);
-
- const doc_comment: ?[]const u8 = if (declaration.flags.has_doc_comment)
- file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[extra_index]))
- else
- null;
-
- // astnode
- const ast_node_index = idx: {
- const idx = self.ast_nodes.items.len;
- try self.ast_nodes.append(self.arena, .{
- .file = self.files.getIndex(file).?,
- .line = decl_src.line,
- .col = 0,
- .docs = doc_comment,
- .fields = null, // walkInstruction will fill `fields` if necessary
- });
- break :idx idx;
- };
-
- const walk_result = try self.walkInlineBody(
- file,
- scope,
- decl_src,
- decl_src,
- bodies.value_body,
- true,
- call_ctx,
- );
-
- const tree = try file.getTree(self.zcu.gpa);
- const kind_token = tree.nodes.items(.main_token)[decl_src.src_node];
- const kind: []const u8 = switch (tree.tokens.items(.tag)[kind_token]) {
- .keyword_var => "var",
- else => "const",
- };
-
- const decls_slot_index = self.decls.items.len;
- try self.decls.append(self.arena, .{
- .name = name,
- .src = ast_node_index,
- .value = walk_result,
- .kind = kind,
- .parent_container = scope.enclosing_type,
- });
-
- if (declaration.flags.is_pub) {
- try decl_indexes.append(self.arena, decls_slot_index);
- } else {
- try priv_decl_indexes.append(self.arena, decls_slot_index);
- }
-
- const decl_status_ptr = scope.resolveDeclName(declaration.name.toString(file.zir).?, file, .none);
- std.debug.assert(decl_status_ptr.* == .Pending);
- decl_status_ptr.* = .{ .Analyzed = decls_slot_index };
-
- // Unblock any pending decl path that was waiting for this decl.
- if (self.ref_paths_pending_on_decls.get(decl_status_ptr)) |paths| {
- for (paths.items) |resume_info| {
- try self.tryResolveRefPath(
- resume_info.file,
- decl_inst,
- resume_info.ref_path,
- );
- }
-
- _ = self.ref_paths_pending_on_decls.remove(decl_status_ptr);
- // TODO: we should deallocate the arraylist that holds all the
- // ref paths. not doing it now since it's arena-allocated
- // anyway, but maybe we should put it elsewhere.
- }
-}
-
-fn analyzeUsingnamespaceDecl(
- self: *Autodoc,
- file: *File,
- scope: *Scope,
- decl_src: SrcLocInfo,
- decl_indexes: *std.ArrayListUnmanaged(usize),
- priv_decl_indexes: *std.ArrayListUnmanaged(usize),
- declaration: Zir.Inst.Declaration,
- extra_index: u32,
- call_ctx: ?*const CallContext,
-) AutodocErrors!void {
- const bodies = declaration.getBodies(extra_index, file.zir);
-
- const doc_comment: ?[]const u8 = if (declaration.flags.has_doc_comment)
- file.zir.nullTerminatedString(@enumFromInt(file.zir.extra[extra_index]))
- else
- null;
-
- // astnode
- const ast_node_index = idx: {
- const idx = self.ast_nodes.items.len;
- try self.ast_nodes.append(self.arena, .{
- .file = self.files.getIndex(file).?,
- .line = decl_src.line,
- .col = 0,
- .docs = doc_comment,
- .fields = null, // walkInstruction will fill `fields` if necessary
- });
- break :idx idx;
- };
-
- const walk_result = try self.walkInlineBody(
- file,
- scope,
- decl_src,
- decl_src,
- bodies.value_body,
- true,
- call_ctx,
- );
-
- const decl_slot_index = self.decls.items.len;
- try self.decls.append(self.arena, .{
- .name = "",
- .kind = "",
- .src = ast_node_index,
- .value = walk_result,
- .is_uns = true,
- .parent_container = scope.enclosing_type,
- });
-
- if (declaration.flags.is_pub) {
- try decl_indexes.append(self.arena, decl_slot_index);
- } else {
- try priv_decl_indexes.append(self.arena, decl_slot_index);
- }
-}
-
-fn analyzeDecltest(
- self: *Autodoc,
- file: *File,
- scope: *Scope,
- decl_src: SrcLocInfo,
- declaration: Zir.Inst.Declaration,
- extra_index: u32,
-) AutodocErrors!void {
- std.debug.assert(declaration.flags.has_doc_comment);
- const decl_name_index: Zir.NullTerminatedString = @enumFromInt(file.zir.extra[extra_index]);
-
- const test_source_code = (try file.getTree(self.zcu.gpa)).getNodeSource(decl_src.src_node);
-
- const decl_name: ?[]const u8 = if (decl_name_index != .empty)
- file.zir.nullTerminatedString(decl_name_index)
- else
- null;
-
- // astnode
- const ast_node_index = idx: {
- const idx = self.ast_nodes.items.len;
- try self.ast_nodes.append(self.arena, .{
- .file = self.files.getIndex(file).?,
- .line = decl_src.line,
- .col = 0,
- .name = decl_name,
- .code = test_source_code,
- });
- break :idx idx;
- };
-
- const decl_status = scope.resolveDeclName(decl_name_index, file, .none);
-
- switch (decl_status.*) {
- .Analyzed => |idx| {
- self.decls.items[idx].decltest = ast_node_index;
- },
- else => unreachable, // we assume analyzeAllDecls analyzed other decls by this point
- }
-}
-
-/// An unresolved path has a non-string WalkResult at its beginnig, while every
-/// other element is a string WalkResult. Resolving means iteratively map each
-/// string to a Decl / Type / Call / etc.
-///
-/// If we encounter an unanalyzed decl during the process, we append the
-/// unsolved sub-path to `self.ref_paths_pending_on_decls` and bail out.
-/// Same happens when a decl holds a type definition that hasn't been fully
-/// analyzed yet (except that we append to `self.ref_paths_pending_on_types`.
-///
-/// When analyzeAllDecls / walkInstruction finishes analyzing a decl / type, it will
-/// then check if there's any pending ref path blocked on it and, if any, it
-/// will progress their resolution by calling tryResolveRefPath again.
-///
-/// Ref paths can also depend on other ref paths. See
-/// `self.pending_ref_paths` for more info.
-///
-/// A ref path that has a component that resolves into a comptimeExpr will
-/// give up its resolution process entirely, leaving the remaining components
-/// as strings.
-fn tryResolveRefPath(
- self: *Autodoc,
- /// File from which the decl path originates.
- file: *File,
- inst: Zir.Inst.Index, // used only for panicWithContext
- path: []DocData.Expr,
-) AutodocErrors!void {
- var i: usize = 0;
- outer: while (i < path.len - 1) : (i += 1) {
- const parent = path[i];
- const child_string = path[i + 1].declName; // we expect to find an unsolved decl
-
- var resolved_parent = parent;
- var j: usize = 0;
- while (j < 10_000) : (j += 1) {
- switch (resolved_parent) {
- else => break,
- .this => |t| resolved_parent = .{ .type = t },
- .declIndex => |decl_index| {
- const decl = self.decls.items[decl_index];
- resolved_parent = decl.value.expr;
- continue;
- },
- .declRef => |decl_status_ptr| {
- // NOTE: must be kep in sync with `findNameInUnsDecls`
- switch (decl_status_ptr.*) {
- // The use of unreachable here is conservative.
- // It might be that it truly should be up to us to
- // request the analys of this decl, but it's not clear
- // at the moment of writing.
- .NotRequested => unreachable,
- .Analyzed => |decl_index| {
- const decl = self.decls.items[decl_index];
- resolved_parent = decl.value.expr;
- continue;
- },
- .Pending => {
- // This decl path is pending completion
- {
- const res = try self.pending_ref_paths.getOrPut(
- self.arena,
- &path[path.len - 1],
- );
- if (!res.found_existing) res.value_ptr.* = .{};
- }
-
- const res = try self.ref_paths_pending_on_decls.getOrPut(
- self.arena,
- decl_status_ptr,
- );
- if (!res.found_existing) res.value_ptr.* = .{};
- try res.value_ptr.*.append(self.arena, .{
- .file = file,
- .ref_path = path[i..path.len],
- });
-
- // We return instead doing `break :outer` to prevent the
- // code after the :outer while loop to run, as it assumes
- // that the path will have been fully analyzed (or we
- // have given up because of a comptimeExpr).
- return;
- },
- }
- },
- .refPath => |rp| {
- if (self.pending_ref_paths.getPtr(&rp[rp.len - 1])) |waiter_list| {
- try waiter_list.append(self.arena, .{
- .file = file,
- .ref_path = path[i..path.len],
- });
-
- // This decl path is pending completion
- {
- const res = try self.pending_ref_paths.getOrPut(
- self.arena,
- &path[path.len - 1],
- );
- if (!res.found_existing) res.value_ptr.* = .{};
- }
-
- return;
- }
-
- // If the last element is a declName or a CTE, then we give up,
- // otherwise we resovle the parent to it and loop again.
- // NOTE: we assume that if we find a string, it's because of
- // a CTE component somewhere in the path. We know that the path
- // is not pending futher evaluation because we just checked!
- const last = rp[rp.len - 1];
- switch (last) {
- .comptimeExpr, .declName => break :outer,
- else => {
- resolved_parent = last;
- continue;
- },
- }
- },
- .fieldVal => |fv| {
- resolved_parent = self.exprs.items[fv.val.expr];
- },
- }
- } else {
- panicWithContext(
- file,
- inst,
- "exhausted eval quota for `{}`in tryResolveRefPath\n",
- .{resolved_parent},
- );
- }
-
- switch (resolved_parent) {
- else => {
- // NOTE: indirect references to types / decls should be handled
- // in the switch above this one!
- printWithContext(
- file,
- inst,
- "TODO: handle `{s}`in tryResolveRefPath\nInfo: {}",
- .{ @tagName(resolved_parent), resolved_parent },
- );
- // path[i + 1] = (try self.cteTodo("
");
-}
-
-fn writeEscapedLines(out: anytype, text: []const u8) !void {
- for (text) |char| {
- if (char == '\n') {
- try out.writeAll(end_line);
- line_counter += 1;
- try out.print(start_line, .{line_counter});
- } else {
- try writeEscaped(out, &[_]u8{char});
- }
- }
-}
-
-fn writeEscaped(out: anytype, input: []const u8) !void {
- for (input) |c| {
- try switch (c) {
- '&' => out.writeAll("&"),
- '<' => out.writeAll("<"),
- '>' => out.writeAll(">"),
- '"' => out.writeAll("""),
- else => out.writeByte(c),
- };
- }
-}
-
-const builtin_types = [_][]const u8{
- "f16", "f32", "f64", "f80", "f128",
- "c_longdouble", "c_short", "c_ushort", "c_int", "c_uint",
- "c_long", "c_ulong", "c_longlong", "c_ulonglong", "c_char",
- "anyopaque", "void", "bool", "isize", "usize",
- "noreturn", "type", "anyerror", "comptime_int", "comptime_float",
-};
-
-fn isType(name: []const u8) bool {
- for (builtin_types) |t| {
- if (mem.eql(u8, t, name))
- return true;
- }
- return false;
-}
diff --git a/src/main.zig b/src/main.zig
index 05c9149b53..5a187c65e9 100644
--- a/src/main.zig
+++ b/src/main.zig
@@ -98,6 +98,7 @@ const normal_usage =
\\
\\ env Print lib path, std path, cache directory, and version
\\ help Print this help and exit
+ \\ std View standard library documentation in a browser
\\ libc Display native libc paths file or validate one
\\ targets List available compilation targets
\\ version Print version number and exit
@@ -309,6 +310,14 @@ fn mainArgs(gpa: Allocator, arena: Allocator, args: []const []const u8) !void {
.root_src_path = "libc.zig",
.prepend_zig_lib_dir_path = true,
});
+ } else if (mem.eql(u8, cmd, "std")) {
+ return jitCmd(gpa, arena, cmd_args, .{
+ .cmd_name = "std",
+ .root_src_path = "std-docs.zig",
+ .prepend_zig_lib_dir_path = true,
+ .prepend_zig_exe_path = true,
+ .prepend_global_cache_path = true,
+ });
} else if (mem.eql(u8, cmd, "init")) {
return cmdInit(gpa, arena, cmd_args);
} else if (mem.eql(u8, cmd, "targets")) {
@@ -5556,6 +5565,8 @@ const JitCmdOptions = struct {
cmd_name: []const u8,
root_src_path: []const u8,
prepend_zig_lib_dir_path: bool = false,
+ prepend_global_cache_path: bool = false,
+ prepend_zig_exe_path: bool = false,
depend_on_aro: bool = false,
capture: ?*[]u8 = null,
};
@@ -5714,6 +5725,10 @@ fn jitCmd(
if (options.prepend_zig_lib_dir_path)
child_argv.appendAssumeCapacity(zig_lib_directory.path.?);
+ if (options.prepend_zig_exe_path)
+ child_argv.appendAssumeCapacity(self_exe_path);
+ if (options.prepend_global_cache_path)
+ child_argv.appendAssumeCapacity(global_cache_directory.path.?);
child_argv.appendSliceAssumeCapacity(args);
" ++ start_line, .{line_counter});
- var tokenizer = std.zig.Tokenizer.init(src);
- var index: usize = 0;
- var next_tok_is_fn = false;
- while (true) {
- const prev_tok_was_fn = next_tok_is_fn;
- next_tok_is_fn = false;
-
- const token = tokenizer.next();
- if (mem.indexOf(u8, src[index..token.loc.start], "//")) |comment_start_off| {
- // render one comment
- const comment_start = index + comment_start_off;
- const comment_end_off = mem.indexOf(u8, src[comment_start..token.loc.start], "\n");
- const comment_end = if (comment_end_off) |o| comment_start + o else token.loc.start;
-
- try writeEscapedLines(out, src[index..comment_start]);
- try out.writeAll("");
- try writeEscaped(out, src[comment_start..comment_end]);
- try out.writeAll("\n");
- index = comment_end;
- tokenizer.index = index;
- continue;
- }
-
- try writeEscapedLines(out, src[index..token.loc.start]);
- switch (token.tag) {
- .eof => break,
-
- .keyword_addrspace,
- .keyword_align,
- .keyword_and,
- .keyword_asm,
- .keyword_async,
- .keyword_await,
- .keyword_break,
- .keyword_catch,
- .keyword_comptime,
- .keyword_const,
- .keyword_continue,
- .keyword_defer,
- .keyword_else,
- .keyword_enum,
- .keyword_errdefer,
- .keyword_error,
- .keyword_export,
- .keyword_extern,
- .keyword_for,
- .keyword_if,
- .keyword_inline,
- .keyword_noalias,
- .keyword_noinline,
- .keyword_nosuspend,
- .keyword_opaque,
- .keyword_or,
- .keyword_orelse,
- .keyword_packed,
- .keyword_anyframe,
- .keyword_pub,
- .keyword_resume,
- .keyword_return,
- .keyword_linksection,
- .keyword_callconv,
- .keyword_struct,
- .keyword_suspend,
- .keyword_switch,
- .keyword_test,
- .keyword_threadlocal,
- .keyword_try,
- .keyword_union,
- .keyword_unreachable,
- .keyword_usingnamespace,
- .keyword_var,
- .keyword_volatile,
- .keyword_allowzero,
- .keyword_while,
- .keyword_anytype,
- => {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- },
-
- .keyword_fn => {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- next_tok_is_fn = true;
- },
-
- .string_literal,
- .char_literal,
- => {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- },
-
- .multiline_string_literal_line => {
- if (src[token.loc.end - 1] == '\n') {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start .. token.loc.end - 1]);
- line_counter += 1;
- try out.print("" ++ end_line ++ "\n" ++ start_line, .{line_counter});
- } else {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- }
- },
-
- .builtin => {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- },
-
- .doc_comment,
- .container_doc_comment,
- => {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- },
-
- .identifier => {
- const tok_bytes = src[token.loc.start..token.loc.end];
- if (mem.eql(u8, tok_bytes, "undefined") or
- mem.eql(u8, tok_bytes, "null") or
- mem.eql(u8, tok_bytes, "true") or
- mem.eql(u8, tok_bytes, "false"))
- {
- try out.writeAll("");
- try writeEscaped(out, tok_bytes);
- try out.writeAll("");
- } else if (prev_tok_was_fn) {
- try out.writeAll("");
- try writeEscaped(out, tok_bytes);
- try out.writeAll("");
- } else {
- const is_int = blk: {
- if (src[token.loc.start] != 'i' and src[token.loc.start] != 'u')
- break :blk false;
- var i = token.loc.start + 1;
- if (i == token.loc.end)
- break :blk false;
- while (i != token.loc.end) : (i += 1) {
- if (src[i] < '0' or src[i] > '9')
- break :blk false;
- }
- break :blk true;
- };
- if (is_int or isType(tok_bytes)) {
- try out.writeAll("");
- try writeEscaped(out, tok_bytes);
- try out.writeAll("");
- } else {
- try writeEscaped(out, tok_bytes);
- }
- }
- },
-
- .number_literal => {
- try out.writeAll("");
- try writeEscaped(out, src[token.loc.start..token.loc.end]);
- try out.writeAll("");
- },
-
- .bang,
- .pipe,
- .pipe_pipe,
- .pipe_equal,
- .equal,
- .equal_equal,
- .equal_angle_bracket_right,
- .bang_equal,
- .l_paren,
- .r_paren,
- .semicolon,
- .percent,
- .percent_equal,
- .l_brace,
- .r_brace,
- .l_bracket,
- .r_bracket,
- .period,
- .period_asterisk,
- .ellipsis2,
- .ellipsis3,
- .caret,
- .caret_equal,
- .plus,
- .plus_plus,
- .plus_equal,
- .plus_percent,
- .plus_percent_equal,
- .plus_pipe,
- .plus_pipe_equal,
- .minus,
- .minus_equal,
- .minus_percent,
- .minus_percent_equal,
- .minus_pipe,
- .minus_pipe_equal,
- .asterisk,
- .asterisk_equal,
- .asterisk_asterisk,
- .asterisk_percent,
- .asterisk_percent_equal,
- .asterisk_pipe,
- .asterisk_pipe_equal,
- .arrow,
- .colon,
- .slash,
- .slash_equal,
- .comma,
- .ampersand,
- .ampersand_equal,
- .question_mark,
- .angle_bracket_left,
- .angle_bracket_left_equal,
- .angle_bracket_angle_bracket_left,
- .angle_bracket_angle_bracket_left_equal,
- .angle_bracket_angle_bracket_left_pipe,
- .angle_bracket_angle_bracket_left_pipe_equal,
- .angle_bracket_right,
- .angle_bracket_right_equal,
- .angle_bracket_angle_bracket_right,
- .angle_bracket_angle_bracket_right_equal,
- .tilde,
- => try writeEscaped(out, src[token.loc.start..token.loc.end]),
-
- .invalid, .invalid_periodasterisks => return error.ParseError,
- }
- index = token.loc.end;
- }
- try out.writeAll(end_line ++ "