Commit 794c6921 authored by protolambda's avatar protolambda Committed by GitHub

Snapshots go scripts (#11338)

* contracts-bedrock: Go version of generate-invariant-docs

* contracts-bedrock: Go version of generate-snapshots
parent d616ae13
......@@ -17,7 +17,7 @@
"prebuild": "./scripts/checks/check-foundry-install.sh",
"build": "forge build",
"build:go-ffi": "(cd scripts/go-ffi && go build)",
"autogen:invariant-docs": "npx tsx scripts/autogen/generate-invariant-docs.ts",
"autogen:invariant-docs": "go run ./scripts/autogen/generate-invariant-docs .",
"test": "pnpm build:go-ffi && forge test",
"test:kontrol": "./test/kontrol/scripts/run-kontrol.sh script",
"genesis": "forge script scripts/L2Genesis.s.sol:L2Genesis --sig 'runWithStateDump()'",
......@@ -29,7 +29,7 @@
"gas-snapshot": "pnpm build:go-ffi && pnpm gas-snapshot:no-build",
"kontrol-summary": "./test/kontrol/scripts/make-summary-deployment.sh",
"kontrol-summary-fp": "KONTROL_FP_DEPLOYMENT=true pnpm kontrol-summary",
"snapshots": "forge build && npx tsx scripts/autogen/generate-snapshots.ts && pnpm kontrol-summary-fp && pnpm kontrol-summary",
"snapshots": "forge build && go run ./scripts/autogen/generate-snapshots . && pnpm kontrol-summary-fp && pnpm kontrol-summary",
"snapshots:check": "./scripts/checks/check-snapshots.sh",
"semver-lock": "forge script scripts/SemverLock.s.sol",
"validate-deploy-configs": "./scripts/checks/check-deploy-configs.sh",
......
import fs from 'fs'
import path from 'path'
const ROOT_DIR = path.join(__dirname, '..', '..')
const BASE_INVARIANTS_DIR = path.join(ROOT_DIR, 'test', 'invariants')
const BASE_DOCS_DIR = path.join(ROOT_DIR, 'invariant-docs')
const BASE_INVARIANT_GH_URL = '../test/invariants/'
const NATSPEC_INV = '@custom:invariant'
// Represents an invariant test contract
type Contract = {
name: string
fileName: string
docs: InvariantDoc[]
}
// Represents the documentation of an invariant
type InvariantDoc = {
header?: string
desc?: string
lineNo?: number
}
const writtenFiles = []
// Lazy-parses all test files in the `test/invariants` directory
// to generate documentation on all invariant tests.
const docGen = (dir: string): void => {
// Grab all files within the invariants test dir
const files = fs.readdirSync(dir)
// Array to store all found invariant documentation comments.
const docs: Contract[] = []
for (const fileName of files) {
// Read the contents of the invariant test file.
const fileContents = fs.readFileSync(path.join(dir, fileName)).toString()
// Split the file into individual lines and trim whitespace.
const lines = fileContents.split('\n').map((line: string) => line.trim())
// Create an object to store all invariant test docs for the current contract
const name = fileName.replace('.t.sol', '')
const contract: Contract = { name, fileName, docs: [] }
let currentDoc: InvariantDoc
// Loop through all lines to find comments.
for (let i = 0; i < lines.length; i++) {
let line = lines[i]
// We have an invariant doc
if (line.startsWith(`/// ${NATSPEC_INV}`)) {
// Assign the header of the invariant doc.
// TODO: Handle ambiguous case for `INVARIANT: ` prefix.
currentDoc = {
header: line.replace(`/// ${NATSPEC_INV}`, '').trim(),
desc: '',
}
// If the header is multi-line, continue appending to the `currentDoc`'s header.
line = lines[++i]
while (line.startsWith(`///`) && line.trim() !== '///') {
currentDoc.header += ` ${line.replace(`///`, '').trim()}`
line = lines[++i]
}
// Process the description
while ((line = lines[++i]).startsWith('///')) {
line = line.replace('///', '').trim()
// If the line has any contents, insert it into the desc.
// Otherwise, consider it a linebreak.
currentDoc.desc += line.length > 0 ? `${line} ` : '\n'
}
// Set the line number of the test
currentDoc.lineNo = i + 1
// Add the doc to the contract
contract.docs.push(currentDoc)
}
}
// Add the contract to the array of docs
docs.push(contract)
}
for (const contract of docs) {
const fileName = path.join(BASE_DOCS_DIR, `${contract.name}.md`)
const alreadyWritten = writtenFiles.includes(fileName)
// If the file has already been written, append the extra docs to the end.
// Otherwise, write the file from scratch.
fs.writeFileSync(
fileName,
alreadyWritten
? `${fs.readFileSync(fileName)}\n${renderContractDoc(contract, false)}`
: renderContractDoc(contract, true)
)
// If the file was just written for the first time, add it to the list of written files.
if (!alreadyWritten) {
writtenFiles.push(fileName)
}
}
console.log(
`Generated invariant test documentation for:\n - ${
docs.length
} contracts\n - ${docs.reduce(
(acc: number, contract: Contract) => acc + contract.docs.length,
0
)} invariant tests\nsuccessfully!`
)
}
// Generate a table of contents for all invariant docs and place it in the README.
const tocGen = (): void => {
const autoTOCPrefix = '<!-- START autoTOC -->\n'
const autoTOCPostfix = '<!-- END autoTOC -->\n'
// Grab the name of all markdown files in `BASE_DOCS_DIR` except for `README.md`.
const files = fs
.readdirSync(BASE_DOCS_DIR)
.filter((fileName: string) => fileName !== 'README.md')
// Generate a table of contents section.
const tocList = files
.map(
(fileName: string) => `- [${fileName.replace('.md', '')}](./${fileName})`
)
.join('\n')
const toc = `${autoTOCPrefix}\n## Table of Contents\n${tocList}\n${autoTOCPostfix}`
// Write the table of contents to the README.
const readmeContents = fs
.readFileSync(path.join(BASE_DOCS_DIR, 'README.md'))
.toString()
const above = readmeContents.split(autoTOCPrefix)[0]
const below = readmeContents.split(autoTOCPostfix)[1]
fs.writeFileSync(
path.join(BASE_DOCS_DIR, 'README.md'),
`${above}${toc}${below}`
)
}
// Render a `Contract` object into valid markdown.
const renderContractDoc = (contract: Contract, header: boolean): string => {
const _header = header ? `# \`${contract.name}\` Invariants\n` : ''
const docs = contract.docs
.map((doc: InvariantDoc) => {
const line = `${contract.fileName}#L${doc.lineNo}`
return `## ${doc.header}\n**Test:** [\`${line}\`](${BASE_INVARIANT_GH_URL}${line})\n\n${doc.desc}`
})
.join('\n\n')
return `${_header}\n${docs}`
}
// Generate the docs
// Forge
console.log('Generating docs for forge invariants...')
docGen(BASE_INVARIANTS_DIR)
// New line
console.log()
// Generate an updated table of contents
tocGen()
package main
import (
"flag"
"fmt"
"os"
"path/filepath"
"strings"
)
const (
NatspecInv = "@custom:invariant"
BaseInvariantGhUrl = "../test/invariants/"
)
// Contract represents an invariant test contract
type Contract struct {
Name string
FileName string
Docs []InvariantDoc
}
// InvariantDoc represents the documentation of an invariant
type InvariantDoc struct {
Header string
Desc string
LineNo int
}
var writtenFiles []string
// Generate the docs
func main() {
flag.Parse()
if flag.NArg() != 1 {
fmt.Println("Expected path of contracts-bedrock as CLI argument")
os.Exit(1)
}
rootDir := flag.Arg(0)
invariantsDir := filepath.Join(rootDir, "test/invariants")
fmt.Printf("invariants dir: %s\n", invariantsDir)
docsDir := filepath.Join(rootDir, "invariant-docs")
fmt.Printf("invariant docs dir: %s\n", docsDir)
// Forge
fmt.Println("Generating docs for forge invariants...")
if err := docGen(invariantsDir, docsDir); err != nil {
fmt.Printf("Failed to generate invariant docs: %v\n", err)
os.Exit(1)
}
fmt.Println("Generating table-of-contents...")
// Generate an updated table of contents
if err := tocGen(docsDir); err != nil {
fmt.Printf("Failed to generate TOC of docs: %v\n", err)
os.Exit(1)
}
fmt.Println("Done!")
}
// Lazy-parses all test files in the `test/invariants` directory
// to generate documentation on all invariant tests.
func docGen(invariantsDir, docsDir string) error {
// Grab all files within the invariants test dir
files, err := os.ReadDir(invariantsDir)
if err != nil {
return fmt.Errorf("error reading directory: %w", err)
}
// Array to store all found invariant documentation comments.
var docs []Contract
for _, file := range files {
// Read the contents of the invariant test file.
fileName := file.Name()
filePath := filepath.Join(invariantsDir, fileName)
fileContents, err := os.ReadFile(filePath)
if err != nil {
return fmt.Errorf("error reading file %q: %w", filePath, err)
}
// Split the file into individual lines and trim whitespace.
lines := strings.Split(string(fileContents), "\n")
for i, line := range lines {
lines[i] = strings.TrimSpace(line)
}
// Create an object to store all invariant test docs for the current contract
name := strings.Replace(fileName, ".t.sol", "", 1)
contract := Contract{Name: name, FileName: fileName}
var currentDoc InvariantDoc
// Loop through all lines to find comments.
for i := 0; i < len(lines); i++ {
line := lines[i]
// We have an invariant doc
if strings.HasPrefix(line, "/// "+NatspecInv) {
// Assign the header of the invariant doc.
currentDoc = InvariantDoc{
Header: strings.TrimSpace(strings.Replace(line, "/// "+NatspecInv, "", 1)),
Desc: "",
}
i++
// If the header is multi-line, continue appending to the `currentDoc`'s header.
for {
if i >= len(lines) {
break
}
line = lines[i]
i++
if !(strings.HasPrefix(line, "///") && strings.TrimSpace(line) != "///") {
break
}
currentDoc.Header += " " + strings.TrimSpace(strings.Replace(line, "///", "", 1))
}
// Process the description
for {
if i >= len(lines) {
break
}
line = lines[i]
i++
if !strings.HasPrefix(line, "///") {
break
}
line = strings.TrimSpace(strings.Replace(line, "///", "", 1))
// If the line has any contents, insert it into the desc.
// Otherwise, consider it a linebreak.
if len(line) > 0 {
currentDoc.Desc += line + " "
} else {
currentDoc.Desc += "\n"
}
}
// Set the line number of the test
currentDoc.LineNo = i
// Add the doc to the contract
contract.Docs = append(contract.Docs, currentDoc)
}
}
// Add the contract to the array of docs
docs = append(docs, contract)
}
for _, contract := range docs {
filePath := filepath.Join(docsDir, contract.Name+".md")
alreadyWritten := contains(writtenFiles, filePath)
// If the file has already been written, append the extra docs to the end.
// Otherwise, write the file from scratch.
var fileContent string
if alreadyWritten {
existingContent, err := os.ReadFile(filePath)
if err != nil {
return fmt.Errorf("error reading existing file %q: %w", filePath, err)
}
fileContent = string(existingContent) + "\n" + renderContractDoc(contract, false)
} else {
fileContent = renderContractDoc(contract, true)
}
err = os.WriteFile(filePath, []byte(fileContent), 0644)
if err != nil {
return fmt.Errorf("error writing file %q: %w", filePath, err)
}
if !alreadyWritten {
writtenFiles = append(writtenFiles, filePath)
}
}
_, _ = fmt.Fprintf(os.Stderr,
"Generated invariant test documentation for:\n"+
" - %d contracts\n"+
" - %d invariant tests\n"+
"successfully!\n",
len(docs),
func() int {
total := 0
for _, contract := range docs {
total += len(contract.Docs)
}
return total
}(),
)
return nil
}
// Generate a table of contents for all invariant docs and place it in the README.
func tocGen(docsDir string) error {
autoTOCPrefix := "<!-- START autoTOC -->\n"
autoTOCPostfix := "<!-- END autoTOC -->\n"
files, err := os.ReadDir(docsDir)
if err != nil {
return fmt.Errorf("error reading directory %q: %w", docsDir, err)
}
// Generate a table of contents section.
var tocList []string
for _, file := range files {
fileName := file.Name()
if fileName != "README.md" {
tocList = append(tocList, fmt.Sprintf("- [%s](./%s)", strings.Replace(fileName, ".md", "", 1), fileName))
}
}
toc := fmt.Sprintf("%s\n## Table of Contents\n%s\n%s",
autoTOCPrefix, strings.Join(tocList, "\n"), autoTOCPostfix)
// Write the table of contents to the README.
readmePath := filepath.Join(docsDir, "README.md")
readmeContents, err := os.ReadFile(readmePath)
if err != nil {
return fmt.Errorf("error reading README file %q: %w", readmePath, err)
}
readmeParts := strings.Split(string(readmeContents), autoTOCPrefix)
above := readmeParts[0]
readmeParts = strings.Split(readmeParts[1], autoTOCPostfix)
below := readmeParts[1]
err = os.WriteFile(readmePath, []byte(above+toc+below), 0644)
if err != nil {
return fmt.Errorf("error writing README file %q: %w", readmePath, err)
}
return nil
}
// Render a `Contract` object into valid markdown.
func renderContractDoc(contract Contract, header bool) string {
var sb strings.Builder
if header {
sb.WriteString(fmt.Sprintf("# `%s` Invariants\n", contract.Name))
}
sb.WriteString("\n")
for i, doc := range contract.Docs {
line := fmt.Sprintf("%s#L%d", contract.FileName, doc.LineNo)
sb.WriteString(fmt.Sprintf("## %s\n**Test:** [`%s`](%s%s)\n\n%s", doc.Header, line, BaseInvariantGhUrl, line, doc.Desc))
if i != len(contract.Docs)-1 {
sb.WriteString("\n\n")
}
}
return sb.String()
}
func contains(slice []string, item string) bool {
for _, v := range slice {
if v == item {
return true
}
}
return false
}
import fs from 'fs'
import path from 'path'
const root = path.join(__dirname, '..', '..')
const outdir = process.argv[2] || path.join(root, 'snapshots')
const forgeArtifactsDir = path.join(root, 'forge-artifacts')
const getAllContractsSources = (): Array<string> => {
const paths = []
const readFilesRecursively = (dir: string) => {
const files = fs.readdirSync(dir)
for (const file of files) {
const filePath = path.join(dir, file)
const fileStat = fs.statSync(filePath)
if (fileStat.isDirectory()) {
readFilesRecursively(filePath)
} else {
paths.push(filePath)
}
}
}
readFilesRecursively(path.join(root, 'src'))
return paths
.filter((x) => x.endsWith('.sol'))
.map((p: string) => path.basename(p))
.sort()
}
type ForgeArtifact = {
abi: object
ast: {
nodeType: string
nodes: any[]
}
storageLayout: {
storage: [{ type: string; label: string; offset: number; slot: number }]
types: { [key: string]: { label: string; numberOfBytes: number } }
}
bytecode: {
object: string
}
}
type AbiSpecStorageLayoutEntry = {
label: string
slot: number
offset: number
bytes: number
type: string
}
const sortKeys = (obj: any) => {
if (typeof obj !== 'object' || obj === null) {
return obj
}
return Object.keys(obj)
.sort()
.reduce(
(acc, key) => {
acc[key] = sortKeys(obj[key])
return acc
},
Array.isArray(obj) ? [] : {}
)
}
// ContractName.0.9.8.json -> ContractName.sol
// ContractName.json -> ContractName.sol
const parseArtifactName = (artifactVersionFile: string): string => {
const match = artifactVersionFile.match(/(.*?)\.([0-9]+\.[0-9]+\.[0-9]+)?/)
if (!match) {
throw new Error(`Invalid artifact file name: ${artifactVersionFile}`)
}
return match[1]
}
const main = async () => {
console.log(`writing abi and storage layout snapshots to ${outdir}`)
const storageLayoutDir = path.join(outdir, 'storageLayout')
const abiDir = path.join(outdir, 'abi')
fs.rmSync(storageLayoutDir, { recursive: true })
fs.rmSync(abiDir, { recursive: true })
fs.mkdirSync(storageLayoutDir, { recursive: true })
fs.mkdirSync(abiDir, { recursive: true })
const contractSources = getAllContractsSources()
const knownAbis = {}
for (const contractFile of contractSources) {
const contractArtifacts = path.join(forgeArtifactsDir, contractFile)
for (const name of fs.readdirSync(contractArtifacts)) {
const data = fs.readFileSync(path.join(contractArtifacts, name))
const artifact: ForgeArtifact = JSON.parse(data.toString())
const contractName = parseArtifactName(name)
// HACK: This is a hack to ignore libraries and abstract contracts. Not robust against changes to solc's internal ast repr
if (artifact.ast === undefined) {
throw new Error(
"ast isn't present in forge-artifacts. Did you run forge build with `--ast`?"
)
}
const isContract = artifact.ast.nodes.some((node: any) => {
return (
node.nodeType === 'ContractDefinition' &&
node.name === contractName &&
node.contractKind === 'contract' &&
(node.abstract === undefined || // solc < 0.6 doesn't have explicit abstract contracts
node.abstract === false)
)
})
if (!isContract) {
console.log(`ignoring library/interface ${contractName}`)
continue
}
const storageLayout: AbiSpecStorageLayoutEntry[] = []
for (const storageEntry of artifact.storageLayout.storage) {
// convert ast-based type to solidity type
const typ = artifact.storageLayout.types[storageEntry.type]
if (typ === undefined) {
throw new Error(
`undefined type for ${contractName}:${storageEntry.label}`
)
}
storageLayout.push({
label: storageEntry.label,
bytes: typ.numberOfBytes,
offset: storageEntry.offset,
slot: storageEntry.slot,
type: typ.label,
})
}
if (knownAbis[contractName] === undefined) {
knownAbis[contractName] = artifact.abi
} else if (
JSON.stringify(knownAbis[contractName]) !== JSON.stringify(artifact.abi)
) {
throw Error(
`detected multiple artifact versions with different ABIs for ${contractFile}`
)
} else {
console.log(`detected multiple artifacts for ${contractName}`)
}
// Sort snapshots for easier manual inspection
fs.writeFileSync(
`${abiDir}/${contractName}.json`,
JSON.stringify(sortKeys(artifact.abi), null, 2)
)
fs.writeFileSync(
`${storageLayoutDir}/${contractName}.json`,
JSON.stringify(sortKeys(storageLayout), null, 2)
)
}
}
}
main()
package main
import (
"bytes"
"encoding/json"
"flag"
"fmt"
"os"
"path/filepath"
"regexp"
"sort"
)
type ForgeArtifact struct {
// ABI is a nested JSON data structure, including some objects/maps.
// We declare it as interface, and not raw-message, such that Go decodes into map[string]interface{}
// where possible. The JSON-encoder will then sort the keys (default Go JSON behavior on maps),
// to reproduce the sortKeys(abi) result of the legacy Typescript version of the snapshort-generator.
ABI interface{} `json:"abi"`
Ast *struct {
NodeType string `json:"nodeType"`
Nodes []struct {
NodeType string `json:"nodeType"`
Name string `json:"name"`
ContractKind string `json:"contractKind"`
Abstract bool `json:"abstract"`
} `json:"nodes"`
} `json:"ast"`
StorageLayout struct {
Storage []struct {
Type string `json:"type"`
Label json.RawMessage `json:"label"`
Offset json.RawMessage `json:"offset"`
Slot json.RawMessage `json:"slot"`
} `json:"storage"`
Types map[string]struct {
Label string `json:"label"`
NumberOfBytes json.RawMessage `json:"numberOfBytes"`
} `json:"types"`
} `json:"storageLayout"`
Bytecode struct {
Object string `json:"object"`
} `json:"bytecode"`
}
type AbiSpecStorageLayoutEntry struct {
Bytes json.RawMessage `json:"bytes"`
Label json.RawMessage `json:"label"`
Offset json.RawMessage `json:"offset"`
Slot json.RawMessage `json:"slot"`
Type string `json:"type"`
}
func main() {
flag.Parse()
if flag.NArg() != 1 {
fmt.Println("Expected path of contracts-bedrock as CLI argument")
os.Exit(1)
}
rootDir := flag.Arg(0)
err := generateSnapshots(rootDir)
if err != nil {
fmt.Printf("Failed to generate snapshots: %v\n", err)
os.Exit(1)
}
}
func generateSnapshots(rootDir string) error {
forgeArtifactsDir := filepath.Join(rootDir, "forge-artifacts")
srcDir := filepath.Join(rootDir, "src")
outDir := filepath.Join(rootDir, "snapshots")
storageLayoutDir := filepath.Join(outDir, "storageLayout")
abiDir := filepath.Join(outDir, "abi")
fmt.Printf("writing abi and storage layout snapshots to %s\n", outDir)
// Clean and recreate directories
if err := os.RemoveAll(storageLayoutDir); err != nil {
return fmt.Errorf("failed to remove storage layout dir: %w", err)
}
if err := os.RemoveAll(abiDir); err != nil {
return fmt.Errorf("failed to remove ABI dir: %w", err)
}
if err := os.MkdirAll(storageLayoutDir, os.ModePerm); err != nil {
return fmt.Errorf("failed to create storage layout dir: %w", err)
}
if err := os.MkdirAll(abiDir, os.ModePerm); err != nil {
return fmt.Errorf("failed to create ABI dir: %w", err)
}
contractSources, err := getAllContractsSources(srcDir)
if err != nil {
return fmt.Errorf("failed to retrieve contract sources: %w", err)
}
knownAbis := make(map[string]interface{})
for _, contractFile := range contractSources {
contractArtifacts := filepath.Join(forgeArtifactsDir, contractFile)
files, err := os.ReadDir(contractArtifacts)
if err != nil {
return fmt.Errorf("failed to scan contract artifacts of %q: %w", contractFile, err)
}
for _, file := range files {
artifactPath := filepath.Join(contractArtifacts, file.Name())
data, err := os.ReadFile(artifactPath)
if err != nil {
return fmt.Errorf("failed to read artifact %q: %w", artifactPath, err)
}
var artifact ForgeArtifact
if err := json.Unmarshal(data, &artifact); err != nil {
return fmt.Errorf("failed to decode artifact %q: %w", artifactPath, err)
}
contractName, err := parseArtifactName(file.Name())
if err != nil {
return fmt.Errorf("failed to parse artifact name %q: %w", file.Name(), err)
}
// HACK: This is a hack to ignore libraries and abstract contracts. Not robust against changes to solc's internal ast repr
if artifact.Ast == nil {
return fmt.Errorf("ast isn't present in forge-artifacts. Did you run forge build with `--ast`? Artifact: %s", artifactPath)
}
// Check if the artifact is a contract
isContract := false
for _, node := range artifact.Ast.Nodes {
if node.NodeType == "ContractDefinition" &&
node.Name == contractName &&
node.ContractKind == "contract" &&
!node.Abstract {
isContract = true
break
}
}
if !isContract {
fmt.Printf("ignoring library/interface %s\n", contractName)
continue
}
storageLayout := make([]AbiSpecStorageLayoutEntry, 0, len(artifact.StorageLayout.Storage))
for _, storageEntry := range artifact.StorageLayout.Storage {
// convert ast-based type to solidity type
typ, ok := artifact.StorageLayout.Types[storageEntry.Type]
if !ok {
return fmt.Errorf("undefined type for %s:%s", contractName, storageEntry.Label)
}
storageLayout = append(storageLayout, AbiSpecStorageLayoutEntry{
Label: storageEntry.Label,
Bytes: typ.NumberOfBytes,
Offset: storageEntry.Offset,
Slot: storageEntry.Slot,
Type: typ.Label,
})
}
if existingAbi, exists := knownAbis[contractName]; exists {
if !jsonEqual(existingAbi, artifact.ABI) {
return fmt.Errorf("detected multiple artifact versions with different ABIs for %s", contractFile)
} else {
fmt.Printf("detected multiple artifacts for %s\n", contractName)
}
} else {
knownAbis[contractName] = artifact.ABI
}
// Sort and write snapshots
if err := writeJSON(filepath.Join(abiDir, contractName+".json"), artifact.ABI); err != nil {
return fmt.Errorf("failed to write ABI snapshot JSON of %q: %w", contractName, err)
}
if err := writeJSON(filepath.Join(storageLayoutDir, contractName+".json"), storageLayout); err != nil {
return fmt.Errorf("failed to write storage layout snapshot JSON of %q: %w", contractName, err)
}
}
}
return nil
}
func getAllContractsSources(srcDir string) ([]string, error) {
var paths []string
if err := readFilesRecursively(srcDir, &paths); err != nil {
return nil, fmt.Errorf("failed to retrieve files: %w", err)
}
var solFiles []string
for _, p := range paths {
if filepath.Ext(p) == ".sol" {
solFiles = append(solFiles, filepath.Base(p))
}
}
sort.Strings(solFiles)
return solFiles, nil
}
func readFilesRecursively(dir string, paths *[]string) error {
files, err := os.ReadDir(dir)
if err != nil {
return err
}
for _, file := range files {
filePath := filepath.Join(dir, file.Name())
if file.IsDir() {
if err := readFilesRecursively(filePath, paths); err != nil {
return fmt.Errorf("failed to recurse into %q: %w", filePath, err)
}
} else {
*paths = append(*paths, filePath)
}
}
return nil
}
// ContractName.0.9.8.json -> ContractName.sol
// ContractName.json -> ContractName.sol
func parseArtifactName(artifactVersionFile string) (string, error) {
re := regexp.MustCompile(`(.*?)\.([0-9]+\.[0-9]+\.[0-9]+)?`)
match := re.FindStringSubmatch(artifactVersionFile)
if len(match) < 2 {
return "", fmt.Errorf("invalid artifact file name: %q", artifactVersionFile)
}
return match[1], nil
}
func writeJSON(filename string, data interface{}) error {
var out bytes.Buffer
enc := json.NewEncoder(&out)
enc.SetEscapeHTML(false)
enc.SetIndent("", " ")
err := enc.Encode(data)
if err != nil {
return fmt.Errorf("failed to encode data: %w", err)
}
jsonData := out.Bytes()
if len(jsonData) > 0 && jsonData[len(jsonData)-1] == '\n' { // strip newline
jsonData = jsonData[:len(jsonData)-1]
}
if err := os.WriteFile(filename, jsonData, 0644); err != nil {
return fmt.Errorf("failed to write file: %w", err)
}
return nil
}
func jsonEqual(a, b interface{}) bool {
jsonA, errA := json.Marshal(a)
jsonB, errB := json.Marshal(b)
return errA == nil && errB == nil && string(jsonA) == string(jsonB)
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment